From f7fedb4d0aec5dc60bf52bb4c460584d08a236ce Mon Sep 17 00:00:00 2001 From: Salvatore Campagna <93581129+salvatore-campagna@users.noreply.github.com> Date: Fri, 29 Mar 2024 04:01:45 +0100 Subject: [PATCH 001/264] Fix downsample persistent task params serialization bwc (#106878) Missing a check on the transport version results in unreadable cluster state if it includes a serialized instance of DownsampleShardTaskParams. #98023 introduced an optional string array including dimensions used by time serie indices. Reading an optional array requires reading a boolean first which is required to know if an array of values exists in serialized form. From 8.13 on we try to read such a boolean which is not there because older versions don't write any boolean nor any string array. Here we include the check on versions for backward compatibility skipping reading any boolean or array whatsoever whenever not possible. Customers using downsampling might have cluster states including such serielized objects and would be unable to upgrade to version 8.13. They will be able to upgrade to any version including this fix. This fix has a side effect #106880 --- docs/changelog/106878.yaml | 5 + .../org/elasticsearch/TransportVersions.java | 1 + .../downsample/qa/mixed-cluster/build.gradle | 49 ++++++ .../MixedClusterDownsampleRestIT.java | 48 ++++++ .../test/downsample/10_basic.yml | 152 ++++++++++++++++++ .../downsample/DownsampleShardIndexer.java | 9 ++ .../downsample/DownsampleShardTaskParams.java | 19 ++- .../DownsampleShardTaskParamsTests.java | 134 +++++++++++++++ 8 files changed, 413 insertions(+), 4 deletions(-) create mode 100644 docs/changelog/106878.yaml create mode 100644 x-pack/plugin/downsample/qa/mixed-cluster/build.gradle create mode 100644 x-pack/plugin/downsample/qa/mixed-cluster/src/yamlRestTest/java/org/elasticsearch/xpack/downsample/MixedClusterDownsampleRestIT.java create mode 100644 x-pack/plugin/downsample/qa/mixed-cluster/src/yamlRestTest/resources/rest-api-spec/test/downsample/10_basic.yml create mode 100644 x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleShardTaskParamsTests.java diff --git a/docs/changelog/106878.yaml b/docs/changelog/106878.yaml new file mode 100644 index 0000000000000..585475bb5ea55 --- /dev/null +++ b/docs/changelog/106878.yaml @@ -0,0 +1,5 @@ +pr: 106878 +summary: Gate reading of optional string array for bwc +area: Downsampling +type: bug +issues: [] diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 1be7bd795d1c1..76f4d6c1c0fae 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -133,6 +133,7 @@ static TransportVersion def(int id) { public static final TransportVersion INDEX_REQUEST_NORMALIZED_BYTES_PARSED = def(8_593_00_0); public static final TransportVersion INGEST_GRAPH_STRUCTURE_EXCEPTION = def(8_594_00_0); public static final TransportVersion ML_MODEL_IN_SERVICE_SETTINGS = def(8_595_00_0); + // 8.14.0+ public static final TransportVersion RANDOM_AGG_SHARD_SEED = def(8_596_00_0); public static final TransportVersion ESQL_TIMINGS = def(8_597_00_0); public static final TransportVersion DATA_STREAM_AUTO_SHARDING_EVENT = def(8_598_00_0); diff --git a/x-pack/plugin/downsample/qa/mixed-cluster/build.gradle b/x-pack/plugin/downsample/qa/mixed-cluster/build.gradle new file mode 100644 index 0000000000000..2449991a8e1e0 --- /dev/null +++ b/x-pack/plugin/downsample/qa/mixed-cluster/build.gradle @@ -0,0 +1,49 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import org.elasticsearch.gradle.Version +import org.elasticsearch.gradle.internal.info.BuildParams +import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask + +apply plugin: 'elasticsearch.internal-yaml-rest-test' +apply plugin: 'elasticsearch.internal-test-artifact' +apply plugin: 'elasticsearch.bwc-test' + + +dependencies { + testImplementation project(path: ':test:test-clusters') + yamlRestTestImplementation project(path: xpackModule('rollup')) +} + +restResources { + restApi { + include '_common', 'bulk', 'cluster', 'indices', 'search', 'ingest.put_pipeline', 'ingest.delete_pipeline' + } +} + +def supportedVersion = bwcVersion -> { + return bwcVersion.onOrAfter("8.8.0"); +} + +BuildParams.bwcVersions.withWireCompatible(supportedVersion) { bwcVersion, baseName -> + + def yamlRestTest = tasks.register("v${bwcVersion}#yamlRestTest", StandaloneRestIntegTestTask) { + usesDefaultDistribution() + usesBwcDistribution(bwcVersion) + systemProperty("tests.old_cluster_version", bwcVersion) + testClassesDirs = sourceSets.yamlRestTest.output.classesDirs + classpath = sourceSets.yamlRestTest.runtimeClasspath + } + + tasks.register(bwcTaskName(bwcVersion)) { + dependsOn yamlRestTest + } +} + +tasks.named("yamlRestTest") { + enabled = false +} diff --git a/x-pack/plugin/downsample/qa/mixed-cluster/src/yamlRestTest/java/org/elasticsearch/xpack/downsample/MixedClusterDownsampleRestIT.java b/x-pack/plugin/downsample/qa/mixed-cluster/src/yamlRestTest/java/org/elasticsearch/xpack/downsample/MixedClusterDownsampleRestIT.java new file mode 100644 index 0000000000000..a4765271e7300 --- /dev/null +++ b/x-pack/plugin/downsample/qa/mixed-cluster/src/yamlRestTest/java/org/elasticsearch/xpack/downsample/MixedClusterDownsampleRestIT.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.downsample; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.cluster.util.Version; +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.junit.ClassRule; + +public class MixedClusterDownsampleRestIT extends ESClientYamlSuiteTestCase { + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .withNode(node -> node.version(getOldVersion())) + .withNode(node -> node.version(Version.CURRENT)) + .setting("xpack.security.enabled", "false") + .setting("xpack.license.self_generated.type", "trial") + .build(); + + static Version getOldVersion() { + return Version.fromString(System.getProperty("tests.old_cluster_version")); + } + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + public MixedClusterDownsampleRestIT(final ClientYamlTestCandidate testCandidate) { + super(testCandidate); + } + + @ParametersFactory + public static Iterable parameters() throws Exception { + return ESClientYamlSuiteTestCase.createParameters(); + } + +} diff --git a/x-pack/plugin/downsample/qa/mixed-cluster/src/yamlRestTest/resources/rest-api-spec/test/downsample/10_basic.yml b/x-pack/plugin/downsample/qa/mixed-cluster/src/yamlRestTest/resources/rest-api-spec/test/downsample/10_basic.yml new file mode 100644 index 0000000000000..265f97e73c234 --- /dev/null +++ b/x-pack/plugin/downsample/qa/mixed-cluster/src/yamlRestTest/resources/rest-api-spec/test/downsample/10_basic.yml @@ -0,0 +1,152 @@ +setup: + - skip: + version: " - 8.4.99" + reason: "rollup renamed to downsample in 8.5.0" + + - do: + indices.create: + index: test + body: + settings: + number_of_shards: 1 + index: + mode: time_series + routing_path: [metricset, k8s.pod.uid] + time_series: + start_time: 2021-04-28T00:00:00Z + end_time: 2021-04-29T00:00:00Z + mappings: + properties: + "@timestamp": + type: date + metricset: + type: keyword + time_series_dimension: true + k8s: + properties: + pod: + properties: + uid: + type: keyword + time_series_dimension: true + name: + type: keyword + created_at: + type: date_nanos + running: + type: boolean + number_of_containers: + type: integer + ip: + type: ip + tags: + type: keyword + values: + type: integer + multi-counter: + type: long + time_series_metric: counter + scaled-counter: + type: scaled_float + scaling_factor: 100 + time_series_metric: counter + multi-gauge: + type: integer + time_series_metric: gauge + scaled-gauge: + type: scaled_float + scaling_factor: 100 + time_series_metric: gauge + network: + properties: + tx: + type: long + time_series_metric: gauge + rx: + type: long + time_series_metric: gauge + - do: + bulk: + refresh: true + index: test + body: + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T18:50:04.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.1", "multi-counter" : [10, 11, 12], "scaled-counter": 10.0, "multi-gauge": [100, 200, 150], "scaled-gauge": 100.0, "network": {"tx": 2001818691, "rx": 802133794}, "created_at": "2021-04-28T19:34:00.000Z", "running": false, "number_of_containers": 2, "tags": ["backend", "prod"], "values": [2, 3, 6]}}}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T18:50:24.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.26", "multi-counter" : [21, 22, 23], "scaled-counter": 20.0, "multi-gauge": [90, 91, 95], "scaled-gauge": 90.0, "network": {"tx": 2005177954, "rx": 801479970}, "created_at": "2021-04-28T19:35:00.000Z", "running": true, "number_of_containers": 2, "tags": ["backend", "prod", "us-west1"], "values": [1, 1, 3]}}}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T20:50:44.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.41", "multi-counter" : [1, 5, 10], "scaled-counter": 1.0, "multi-gauge": [103, 110, 109], "scaled-gauge": 104.0, "network": {"tx": 2006223737, "rx": 802337279}, "created_at": "2021-04-28T19:36:00.000Z", "running": true, "number_of_containers": 2, "tags": ["backend", "prod", "us-west2"], "values": [4, 1, 2]}}}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T20:51:04.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.22", "multi-counter" : [101, 102, 105], "scaled-counter": 100.0, "multi-gauge": [100, 100, 100], "scaled-gauge": 102.0, "network": {"tx": 2012916202, "rx": 803685721}, "created_at": "2021-04-28T19:37:00.000Z", "running": true, "number_of_containers": 2, "tags": ["backend", "prod"], "values": [2, 3, 1]}}}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T18:50:03.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.33", "multi-counter" : [7, 11, 44], "scaled-counter": 7.0, "multi-gauge": [100, 100, 102], "scaled-gauge": 100.0, "network": {"tx": 1434521831, "rx": 530575198}, "created_at": "2021-04-28T19:42:00.000Z", "running": false, "number_of_containers": 1, "tags": ["backend", "test"], "values": [2, 3, 4]}}}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T18:50:23.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.56", "multi-counter" : [0, 0, 1], "scaled-counter": 0.0, "multi-gauge": [101, 102, 102], "scaled-gauge": 101.0, "network": {"tx": 1434577921, "rx": 530600088}, "created_at": "2021-04-28T19:43:00.000Z", "running": false, "number_of_containers": 1, "tags": ["backend", "test", "us-west2"], "values": [2, 1, 1]}}}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T19:50:53.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.37", "multi-counter" : [1000, 1001, 1002], "scaled-counter": 1000.0, "multi-gauge": [99, 100, 110], "scaled-gauge": 99.0, "network": {"tx": 1434587694, "rx": 530604797}, "created_at": "2021-04-28T19:44:00.000Z", "running": true, "number_of_containers": 1, "tags": ["backend", "test", "us-west1"], "values": [4, 5, 2]}}}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T19:51:03.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.120", "multi-counter" : [76, 77, 78], "scaled-counter": 70.0, "multi-gauge": [95, 98, 100], "scaled-gauge": 95.0, "network": {"tx": 1434595272, "rx": 530605511}, "created_at": "2021-04-28T19:45:00.000Z", "running": true, "number_of_containers": 1, "tags": ["backend", "test", "us-west1"], "values": [3, 2, 1]}}}' + + - do: + indices.put_settings: + index: test + body: + index.blocks.write: true + +--- +"Downsample index": + + - do: + indices.downsample: + index: test + target_index: test-downsample + body: > + { + "fixed_interval": "1h" + } + - is_true: acknowledged + + - do: + search: + index: test-downsample + body: + sort: [ "@timestamp" ] + + - length: { hits.hits: 4 } + - match: { hits.hits.0._source._doc_count: 2 } + - match: { hits.hits.0._source.metricset: pod } + + # Assert rollup index settings + - do: + indices.get_settings: + index: test-downsample + + - match: { test-downsample.settings.index.mode: time_series } + - match: { test-downsample.settings.index.time_series.end_time: 2021-04-29T00:00:00Z } + - match: { test-downsample.settings.index.time_series.start_time: 2021-04-28T00:00:00Z } + - match: { test-downsample.settings.index.routing_path: [ "metricset", "k8s.pod.uid"] } + - match: { test-downsample.settings.index.downsample.source.name: test } + + # Assert rollup index mapping + - do: + indices.get_mapping: + index: test-downsample + + - match: { test-downsample.mappings.properties.@timestamp.type: date } + - match: { test-downsample.mappings.properties.@timestamp.meta.fixed_interval: 1h } + - match: { test-downsample.mappings.properties.@timestamp.meta.time_zone: UTC } + - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.multi-gauge.type: aggregate_metric_double } + - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.multi-gauge.metrics: [ "min", "max", "sum", "value_count" ] } + - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.multi-gauge.default_metric: max } + - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.multi-gauge.time_series_metric: gauge } + - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.multi-counter.type: long } + - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.multi-counter.time_series_metric: counter } + - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.scaled-counter.type: scaled_float } + - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.scaled-counter.scaling_factor: 100 } + - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.scaled-counter.time_series_metric: counter } + - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.scaled-gauge.type: aggregate_metric_double } + - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.scaled-gauge.metrics: [ "min", "max", "sum", "value_count" ] } + - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.scaled-gauge.default_metric: max } + - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.scaled-gauge.time_series_metric: gauge } + - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.uid.type: keyword } + - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.uid.time_series_dimension: true } diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardIndexer.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardIndexer.java index 59c1c9c38efae..72d4b934ecdda 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardIndexer.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardIndexer.java @@ -564,6 +564,15 @@ public XContentBuilder buildDownsampleDocument() throws IOException { fieldProducer.write(builder); } + if (dimensions.length == 0) { + logger.debug("extracting dimensions from legacy tsid"); + Map dimensions = (Map) DocValueFormat.TIME_SERIES_ID.format(tsid); + for (Map.Entry e : dimensions.entrySet()) { + assert e.getValue() != null; + builder.field((String) e.getKey(), e.getValue()); + } + } + builder.endObject(); return builder; } diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardTaskParams.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardTaskParams.java index 4ccc913b974d6..93be79e859f8d 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardTaskParams.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardTaskParams.java @@ -10,6 +10,7 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.action.downsample.DownsampleConfig; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.shard.ShardId; @@ -36,6 +37,7 @@ public record DownsampleShardTaskParams( String[] dimensions ) implements PersistentTaskParams { + private static final TransportVersion V_8_13_0 = TransportVersions.ML_MODEL_IN_SERVICE_SETTINGS; public static final String NAME = DownsampleShardTask.TASK_NAME; private static final ParseField DOWNSAMPLE_CONFIG = new ParseField("downsample_config"); private static final ParseField DOWNSAMPLE_INDEX = new ParseField("rollup_index"); @@ -71,7 +73,7 @@ public record DownsampleShardTaskParams( new ShardId(in), in.readStringArray(), in.readStringArray(), - in.readOptionalStringArray() + in.getTransportVersion().onOrAfter(V_8_13_0) ? in.readOptionalStringArray() : new String[] {} ); } @@ -85,7 +87,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(SHARD_ID.getPreferredName(), shardId); builder.array(METRICS.getPreferredName(), metrics); builder.array(LABELS.getPreferredName(), labels); - builder.array(DIMENSIONS.getPreferredName(), dimensions); + if (dimensions.length > 0) { + builder.array(DIMENSIONS.getPreferredName(), dimensions); + } return builder.endObject(); } @@ -108,7 +112,9 @@ public void writeTo(StreamOutput out) throws IOException { shardId.writeTo(out); out.writeStringArray(metrics); out.writeStringArray(labels); - out.writeOptionalStringArray(dimensions); + if (out.getTransportVersion().onOrAfter(V_8_13_0)) { + out.writeOptionalStringArray(dimensions); + } } public static DownsampleShardTaskParams fromXContent(XContentParser parser) throws IOException { @@ -157,7 +163,7 @@ public static class Builder { ShardId shardId; String[] metrics; String[] labels; - String[] dimensions; + String[] dimensions = Strings.EMPTY_ARRAY; public Builder downsampleConfig(final DownsampleConfig downsampleConfig) { this.downsampleConfig = downsampleConfig; @@ -212,4 +218,9 @@ public DownsampleShardTaskParams build() { ); } } + + @Override + public String toString() { + return Strings.toString(this, true, true); + } } diff --git a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleShardTaskParamsTests.java b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleShardTaskParamsTests.java new file mode 100644 index 0000000000000..7d79ae720271e --- /dev/null +++ b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleShardTaskParamsTests.java @@ -0,0 +1,134 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.downsample; + +import org.elasticsearch.action.downsample.DownsampleConfig; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; +import org.elasticsearch.test.AbstractXContentSerializingTestCase; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; + +public class DownsampleShardTaskParamsTests extends AbstractXContentSerializingTestCase { + @Override + protected Writeable.Reader instanceReader() { + return DownsampleShardTaskParams::new; + } + + @Override + protected DownsampleShardTaskParams createTestInstance() { + long startTime = randomLongBetween(100000, 200000); + long endTime = startTime + randomLongBetween(1000, 10_000); + String[] dimensions = randomBoolean() ? generateRandomStringArray(5, 5, false, true) : new String[] {}; + return new DownsampleShardTaskParams( + new DownsampleConfig(randomFrom(DateHistogramInterval.HOUR, DateHistogramInterval.DAY)), + randomAlphaOfLength(5), + startTime, + endTime, + new ShardId(new Index(randomAlphaOfLength(5), "n/a"), between(0, 5)), + generateRandomStringArray(5, 5, false, false), + generateRandomStringArray(5, 5, false, false), + dimensions + ); + } + + @Override + protected DownsampleShardTaskParams mutateInstance(DownsampleShardTaskParams in) throws IOException { + return switch (between(0, 7)) { + case 0 -> new DownsampleShardTaskParams( + new DownsampleConfig(randomFrom(DateHistogramInterval.WEEK, DateHistogramInterval.MONTH)), + in.downsampleIndex(), + in.indexStartTimeMillis(), + in.indexEndTimeMillis(), + in.shardId(), + in.metrics(), + in.labels(), + in.dimensions() + ); + case 1 -> new DownsampleShardTaskParams( + in.downsampleConfig(), + randomAlphaOfLength(6), + in.indexStartTimeMillis(), + in.indexEndTimeMillis(), + in.shardId(), + in.metrics(), + in.labels(), + in.dimensions() + ); + case 2 -> new DownsampleShardTaskParams( + in.downsampleConfig(), + in.downsampleIndex(), + in.indexStartTimeMillis() + between(1, 100), + in.indexEndTimeMillis() + between(1, 100), + in.shardId(), + in.metrics(), + in.labels(), + in.dimensions() + ); + case 3 -> new DownsampleShardTaskParams( + in.downsampleConfig(), + in.downsampleIndex(), + in.indexStartTimeMillis(), + in.indexEndTimeMillis() + between(10, 100), + new ShardId(new Index(randomAlphaOfLength(6), "n/a"), between(0, 5)), + in.metrics(), + in.labels(), + in.dimensions() + ); + case 4 -> new DownsampleShardTaskParams( + in.downsampleConfig(), + in.downsampleIndex(), + in.indexStartTimeMillis(), + in.indexEndTimeMillis() + between(10, 100), + in.shardId(), + in.metrics(), + in.labels(), + in.dimensions() + ); + case 5 -> new DownsampleShardTaskParams( + in.downsampleConfig(), + in.downsampleIndex(), + in.indexStartTimeMillis(), + in.indexEndTimeMillis(), + in.shardId(), + generateRandomStringArray(6, 6, false, false), + in.labels(), + in.dimensions() + ); + case 6 -> new DownsampleShardTaskParams( + in.downsampleConfig(), + in.downsampleIndex(), + in.indexStartTimeMillis(), + in.indexEndTimeMillis(), + in.shardId(), + in.metrics(), + generateRandomStringArray(6, 6, false, false), + in.dimensions() + ); + case 7 -> new DownsampleShardTaskParams( + in.downsampleConfig(), + in.downsampleIndex(), + in.indexStartTimeMillis(), + in.indexEndTimeMillis(), + in.shardId(), + in.metrics(), + in.labels(), + generateRandomStringArray(6, 6, false, false) + ); + default -> throw new AssertionError("unknown option"); + }; + } + + @Override + protected DownsampleShardTaskParams doParseInstance(XContentParser parser) throws IOException { + return DownsampleShardTaskParams.fromXContent(parser); + } +} From b4938e16457dc69d392235eaf404a6dad9ddb717 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Fri, 29 Mar 2024 09:24:52 +0200 Subject: [PATCH 002/264] Query API Key Information API support for the `typed_keys` request parameter (#106873) The typed_keys request parameter is the canonical parameter, that's also used in the regular index _search enpoint, in order to return the types of aggregations in the response. This is required by typed language clients of the _security/_query/api_key endpoint that are using aggregations. Closes #106817 --- docs/changelog/106873.yaml | 6 + .../rest-api/security/query-api-key.asciidoc | 4 + docs/reference/search/search.asciidoc | 4 +- .../api/security.query_api_keys.json | 5 + .../xpack/security/ApiKeyAggsIT.java | 136 +++++++++++------- .../action/apikey/RestQueryApiKeyAction.java | 7 + 6 files changed, 105 insertions(+), 57 deletions(-) create mode 100644 docs/changelog/106873.yaml diff --git a/docs/changelog/106873.yaml b/docs/changelog/106873.yaml new file mode 100644 index 0000000000000..f823caff7aefe --- /dev/null +++ b/docs/changelog/106873.yaml @@ -0,0 +1,6 @@ +pr: 106873 +summary: Query API Key Information API support for the `typed_keys` request parameter +area: Security +type: enhancement +issues: + - 106817 diff --git a/docs/reference/rest-api/security/query-api-key.asciidoc b/docs/reference/rest-api/security/query-api-key.asciidoc index f2969aa43d57b..1888a110e072f 100644 --- a/docs/reference/rest-api/security/query-api-key.asciidoc +++ b/docs/reference/rest-api/security/query-api-key.asciidoc @@ -159,6 +159,10 @@ its <> and the owner user's If it exists, the profile uid is returned under the `profile_uid` response field for each API key. Defaults to `false`. +`typed_keys`:: +(Optional, Boolean) If `true`, aggregation names are prefixed by their respective types in the response. +Defaults to `false`. + [[security-api-query-api-key-request-body]] ==== {api-request-body-title} diff --git a/docs/reference/search/search.asciidoc b/docs/reference/search/search.asciidoc index 074c950d3e987..f602b6457c31e 100644 --- a/docs/reference/search/search.asciidoc +++ b/docs/reference/search/search.asciidoc @@ -341,8 +341,8 @@ If `true`, the exact number of hits is returned at the cost of some performance. If `false`, the response does not include the total number of hits matching the query. `typed_keys`:: -(Optional, Boolean) If `true`, aggregation and suggester names are be prefixed -by their respective types in the response. Defaults to `true`. +(Optional, Boolean) If `true`, aggregation and suggester names are prefixed +by their respective types in the response. Defaults to `false`. `version`:: (Optional, Boolean) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.query_api_keys.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.query_api_keys.json index 1127f79294910..de95f76ad49db 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.query_api_keys.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.query_api_keys.json @@ -31,6 +31,11 @@ "type":"boolean", "default":false, "description": "flag to also retrieve the API Key's owner profile uid, if it exists" + }, + "typed_keys":{ + "type":"boolean", + "default":false, + "description": "flag to prefix aggregation names by their respective types in the response" } }, "body":{ diff --git a/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/ApiKeyAggsIT.java b/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/ApiKeyAggsIT.java index f4fa304f9c1e2..427d918fd64d5 100644 --- a/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/ApiKeyAggsIT.java +++ b/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/ApiKeyAggsIT.java @@ -65,7 +65,8 @@ public void testFiltersAggs() throws IOException { ), API_KEY_USER_AUTH_HEADER ); - assertAggs(API_KEY_ADMIN_AUTH_HEADER, """ + final boolean typedAggs = randomBoolean(); + assertAggs(API_KEY_ADMIN_AUTH_HEADER, typedAggs, """ { "aggs": { "hostnames": { @@ -79,22 +80,23 @@ public void testFiltersAggs() throws IOException { } } """, aggs -> { - assertThat(((Map) ((Map) aggs.get("hostnames")).get("buckets")).size(), is(2)); + String aggName = typedAggs ? "filters#hostnames" : "hostnames"; + assertThat(((Map) ((Map) aggs.get(aggName)).get("buckets")).size(), is(2)); assertThat( - ((Map) ((Map) ((Map) aggs.get("hostnames")).get("buckets")).get( + ((Map) ((Map) ((Map) aggs.get(aggName)).get("buckets")).get( "my-org-host-1" )).get("doc_count"), is(2) ); assertThat( - ((Map) ((Map) ((Map) aggs.get("hostnames")).get("buckets")).get( + ((Map) ((Map) ((Map) aggs.get(aggName)).get("buckets")).get( "my-org-host-2" )).get("doc_count"), is(2) ); }); // other bucket - assertAggs(API_KEY_USER_AUTH_HEADER, """ + assertAggs(API_KEY_USER_AUTH_HEADER, typedAggs, """ { "aggs": { "only_user_keys": { @@ -108,22 +110,23 @@ public void testFiltersAggs() throws IOException { } } """, aggs -> { - assertThat(((Map) ((Map) aggs.get("only_user_keys")).get("buckets")).size(), is(2)); + String aggName = typedAggs ? "filters#only_user_keys" : "only_user_keys"; + assertThat(((Map) ((Map) aggs.get(aggName)).get("buckets")).size(), is(2)); assertThat( - ((Map) ((Map) ((Map) aggs.get("only_user_keys")).get("buckets")).get( + ((Map) ((Map) ((Map) aggs.get(aggName)).get("buckets")).get( "only_key4_match" )).get("doc_count"), is(1) ); assertThat( - ((Map) ((Map) ((Map) aggs.get("only_user_keys")).get("buckets")).get( + ((Map) ((Map) ((Map) aggs.get(aggName)).get("buckets")).get( "other_user_keys" )).get("doc_count"), is(1) ); }); // anonymous filters - assertAggs(API_KEY_USER_AUTH_HEADER, """ + assertAggs(API_KEY_USER_AUTH_HEADER, typedAggs, """ { "aggs": { "all_user_keys": { @@ -139,27 +142,28 @@ public void testFiltersAggs() throws IOException { } } """, aggs -> { - assertThat(((List>) ((Map) aggs.get("all_user_keys")).get("buckets")).size(), is(4)); + String aggName = typedAggs ? "filters#all_user_keys" : "all_user_keys"; + assertThat(((List>) ((Map) aggs.get(aggName)).get("buckets")).size(), is(4)); assertThat( - ((List>) ((Map) aggs.get("all_user_keys")).get("buckets")).get(0).get("doc_count"), + ((List>) ((Map) aggs.get(aggName)).get("buckets")).get(0).get("doc_count"), is(2) ); assertThat( - ((List>) ((Map) aggs.get("all_user_keys")).get("buckets")).get(1).get("doc_count"), + ((List>) ((Map) aggs.get(aggName)).get("buckets")).get(1).get("doc_count"), is(2) ); assertThat( - ((List>) ((Map) aggs.get("all_user_keys")).get("buckets")).get(2).get("doc_count"), + ((List>) ((Map) aggs.get(aggName)).get("buckets")).get(2).get("doc_count"), is(2) ); // the "other" bucket assertThat( - ((List>) ((Map) aggs.get("all_user_keys")).get("buckets")).get(3).get("doc_count"), + ((List>) ((Map) aggs.get(aggName)).get("buckets")).get(3).get("doc_count"), is(0) ); }); // nested filters - assertAggs(API_KEY_USER_AUTH_HEADER, """ + assertAggs(API_KEY_USER_AUTH_HEADER, typedAggs, """ { "aggs": { "level1": { @@ -184,36 +188,44 @@ public void testFiltersAggs() throws IOException { } } """, aggs -> { - List> level1Buckets = (List>) ((Map) aggs.get("level1")).get("buckets"); + String level1AggName = typedAggs ? "filters#level1" : "level1"; + List> level1Buckets = (List>) ((Map) aggs.get(level1AggName)).get( + "buckets" + ); assertThat(level1Buckets.size(), is(2)); assertThat(level1Buckets.get(0).get("doc_count"), is(2)); assertThat(level1Buckets.get(0).get("key"), is("rest-filter")); + String level2AggName = typedAggs ? "filters#level2" : "level2"; assertThat( - ((Map) ((Map) ((Map) level1Buckets.get(0).get("level2")).get("buckets")) - .get("invalidated")).get("doc_count"), + ((Map) ((Map) ((Map) level1Buckets.get(0).get(level2AggName)).get( + "buckets" + )).get("invalidated")).get("doc_count"), is(0) ); assertThat( - ((Map) ((Map) ((Map) level1Buckets.get(0).get("level2")).get("buckets")) - .get("not-invalidated")).get("doc_count"), + ((Map) ((Map) ((Map) level1Buckets.get(0).get(level2AggName)).get( + "buckets" + )).get("not-invalidated")).get("doc_count"), is(2) ); assertThat(level1Buckets.get(1).get("doc_count"), is(2)); assertThat(level1Buckets.get(1).get("key"), is("user-filter")); assertThat( - ((Map) ((Map) ((Map) level1Buckets.get(1).get("level2")).get("buckets")) - .get("invalidated")).get("doc_count"), + ((Map) ((Map) ((Map) level1Buckets.get(1).get(level2AggName)).get( + "buckets" + )).get("invalidated")).get("doc_count"), is(0) ); assertThat( - ((Map) ((Map) ((Map) level1Buckets.get(1).get("level2")).get("buckets")) - .get("not-invalidated")).get("doc_count"), + ((Map) ((Map) ((Map) level1Buckets.get(1).get(level2AggName)).get( + "buckets" + )).get("not-invalidated")).get("doc_count"), is(2) ); }); // filter on disallowed fields { - Request request = new Request("GET", "/_security/_query/api_key"); + Request request = new Request("GET", "/_security/_query/api_key" + (randomBoolean() ? "?typed_keys" : "")); request.setOptions( request.getOptions() .toBuilder() @@ -240,7 +252,7 @@ public void testFiltersAggs() throws IOException { ); } { - Request request = new Request("GET", "/_security/_query/api_key"); + Request request = new Request("GET", "/_security/_query/api_key" + (randomBoolean() ? "?typed_keys" : "")); request.setOptions( request.getOptions() .toBuilder() @@ -310,7 +322,8 @@ public void testAggsForType() throws IOException { updateApiKeys(systemWriteCreds, "ctx._source['type']='cross_cluster';", crossApiKeyIds); boolean isAdmin = randomBoolean(); - assertAggs(isAdmin ? API_KEY_ADMIN_AUTH_HEADER : API_KEY_USER_AUTH_HEADER, """ + final boolean typedAggs = randomBoolean(); + assertAggs(isAdmin ? API_KEY_ADMIN_AUTH_HEADER : API_KEY_USER_AUTH_HEADER, typedAggs, """ { "size": 0, "aggs": { @@ -324,9 +337,8 @@ public void testAggsForType() throws IOException { } } """, aggs -> { - List> buckets = (List>) ((Map) aggs.get("all_keys_by_type")).get( - "buckets" - ); + String aggName = typedAggs ? "composite#all_keys_by_type" : "all_keys_by_type"; + List> buckets = (List>) ((Map) aggs.get(aggName)).get("buckets"); assertThat(buckets.size(), is(3)); assertThat(((Map) buckets.get(0).get("key")).get("type"), is("cross_cluster")); assertThat(((Map) buckets.get(1).get("key")).get("type"), is("other")); @@ -342,7 +354,7 @@ public void testAggsForType() throws IOException { } }); - assertAggs(isAdmin ? API_KEY_ADMIN_AUTH_HEADER : API_KEY_USER_AUTH_HEADER, """ + assertAggs(isAdmin ? API_KEY_ADMIN_AUTH_HEADER : API_KEY_USER_AUTH_HEADER, typedAggs, """ { "size": 0, "aggs": { @@ -371,23 +383,23 @@ public void testAggsForType() throws IOException { """, aggs -> { assertThat(aggs.size(), is(4)); // 3 types - assertThat(((Map) aggs.get("type_cardinality")).get("value"), is(3)); + assertThat(((Map) aggs.get((typedAggs ? "cardinality#" : "") + "type_cardinality")).get("value"), is(3)); if (isAdmin) { // 8 keys - assertThat(((Map) aggs.get("type_value_count")).get("value"), is(8)); + assertThat(((Map) aggs.get((typedAggs ? "value_count#" : "") + "type_value_count")).get("value"), is(8)); } else { // 4 keys - assertThat(((Map) aggs.get("type_value_count")).get("value"), is(4)); + assertThat(((Map) aggs.get((typedAggs ? "value_count#" : "") + "type_value_count")).get("value"), is(4)); } - assertThat(((Map) aggs.get("missing_type_count")).get("doc_count"), is(0)); - List> typeTermsBuckets = (List>) ((Map) aggs.get("type_terms")).get( - "buckets" - ); + assertThat(((Map) aggs.get((typedAggs ? "missing#" : "") + "missing_type_count")).get("doc_count"), is(0)); + List> typeTermsBuckets = (List>) ((Map) aggs.get( + (typedAggs ? "sterms#" : "") + "type_terms" + )).get("buckets"); assertThat(typeTermsBuckets.size(), is(3)); }); // runtime type field is disallowed { - Request request = new Request("GET", "/_security/_query/api_key"); + Request request = new Request("GET", "/_security/_query/api_key" + (typedAggs ? "?typed_keys" : "")); request.setOptions( request.getOptions() .toBuilder() @@ -432,7 +444,8 @@ public void testFilterAggs() throws IOException { invalidateApiKey(key2User1KeyId, false, API_KEY_ADMIN_AUTH_HEADER); invalidateApiKey(key1User3KeyId, false, API_KEY_ADMIN_AUTH_HEADER); - assertAggs(API_KEY_ADMIN_AUTH_HEADER, """ + final boolean typedAggs = randomBoolean(); + assertAggs(API_KEY_ADMIN_AUTH_HEADER, typedAggs, """ { "size": 0, "aggs": { @@ -451,10 +464,11 @@ public void testFilterAggs() throws IOException { } } """, aggs -> { - assertThat(((Map) aggs.get("not_invalidated")).get("doc_count"), is(4)); // 6 - 2 (invalidated) + // 6 - 2 (invalidated) + assertThat(((Map) aggs.get(typedAggs ? "filter#not_invalidated" : "not_invalidated")).get("doc_count"), is(4)); List> buckets = (List>) ((Map) ((Map) aggs.get( - "not_invalidated" - )).get("keys_by_username")).get("buckets"); + typedAggs ? "filter#not_invalidated" : "not_invalidated" + )).get(typedAggs ? "composite#keys_by_username" : "keys_by_username")).get("buckets"); assertThat(buckets.size(), is(3)); assertThat(((Map) buckets.get(0).get("key")).get("usernames"), is("test-user-1")); assertThat(buckets.get(0).get("doc_count"), is(1)); @@ -464,7 +478,7 @@ public void testFilterAggs() throws IOException { assertThat(buckets.get(2).get("doc_count"), is(1)); }); - assertAggs(API_KEY_ADMIN_AUTH_HEADER, """ + assertAggs(API_KEY_ADMIN_AUTH_HEADER, typedAggs, """ { "aggs": { "keys_by_username": { @@ -488,23 +502,32 @@ public void testFilterAggs() throws IOException { } } """, aggs -> { - List> buckets = (List>) ((Map) aggs.get("keys_by_username")).get( - "buckets" - ); + List> buckets = (List>) ((Map) aggs.get( + typedAggs ? "composite#keys_by_username" : "keys_by_username" + )).get("buckets"); assertThat(buckets.size(), is(3)); assertThat(buckets.get(0).get("doc_count"), is(2)); assertThat(((Map) buckets.get(0).get("key")).get("usernames"), is("test-user-1")); - assertThat(((Map) buckets.get(0).get("not_expired")).get("doc_count"), is(0)); + assertThat( + ((Map) buckets.get(0).get(typedAggs ? "filter#not_expired" : "not_expired")).get("doc_count"), + is(0) + ); assertThat(buckets.get(1).get("doc_count"), is(2)); assertThat(((Map) buckets.get(1).get("key")).get("usernames"), is("test-user-2")); - assertThat(((Map) buckets.get(1).get("not_expired")).get("doc_count"), is(1)); + assertThat( + ((Map) buckets.get(1).get(typedAggs ? "filter#not_expired" : "not_expired")).get("doc_count"), + is(1) + ); assertThat(buckets.get(2).get("doc_count"), is(2)); assertThat(((Map) buckets.get(2).get("key")).get("usernames"), is("test-user-3")); - assertThat(((Map) buckets.get(2).get("not_expired")).get("doc_count"), is(2)); + assertThat( + ((Map) buckets.get(2).get(typedAggs ? "filter#not_expired" : "not_expired")).get("doc_count"), + is(2) + ); }); // "creator" field is disallowed { - Request request = new Request("GET", "/_security/_query/api_key"); + Request request = new Request("GET", "/_security/_query/api_key" + (typedAggs ? "?typed_keys" : "?typed_keys=false")); request.setOptions( request.getOptions() .toBuilder() @@ -533,7 +556,7 @@ public void testFilterAggs() throws IOException { public void testDisallowedAggTypes() { // global aggregation type MUST never be allowed in order to not expose non-owned non-API key docs { - Request request = new Request("GET", "/_security/_query/api_key"); + Request request = new Request("GET", "/_security/_query/api_key" + (randomBoolean() ? "?typed_keys=true" : "")); request.setOptions( request.getOptions() .toBuilder() @@ -559,7 +582,7 @@ public void testDisallowedAggTypes() { } // pipeline aggs are not allowed but could be if there's an identified use-case { - Request request = new Request("GET", "/_security/_query/api_key"); + Request request = new Request("GET", "/_security/_query/api_key" + (randomBoolean() ? "?typed_keys=true" : "")); request.setOptions( request.getOptions() .toBuilder() @@ -587,8 +610,11 @@ public void testDisallowedAggTypes() { } } - void assertAggs(String authHeader, String body, Consumer> aggsVerifier) throws IOException { - final Request request = new Request("GET", "/_security/_query/api_key"); + void assertAggs(String authHeader, boolean typedAggs, String body, Consumer> aggsVerifier) throws IOException { + final Request request = new Request( + "GET", + "/_security/_query/api_key" + (typedAggs ? randomFrom("?typed_keys", "?typed_keys=true") : randomFrom("", "?typed_keys=false")) + ); request.setJsonEntity(body); request.setOptions(request.getOptions().toBuilder().addHeader(HttpHeaders.AUTHORIZATION, authHeader)); final Response response = client().performRequest(request); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyAction.java index 7e7768212719b..77c2a080dbb57 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyAction.java @@ -17,6 +17,7 @@ import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.searchafter.SearchAfterBuilder; import org.elasticsearch.search.sort.FieldSortBuilder; @@ -29,6 +30,7 @@ import java.io.IOException; import java.util.List; +import java.util.Set; import static org.elasticsearch.index.query.AbstractQueryBuilder.parseTopLevelQuery; import static org.elasticsearch.rest.RestRequest.Method.GET; @@ -99,6 +101,11 @@ public String getName() { return "xpack_security_query_api_key"; } + @Override + protected Set responseParams() { + return Set.of(RestSearchAction.TYPED_KEYS_PARAM); + } + @Override protected RestChannelConsumer innerPrepareRequest(final RestRequest request, final NodeClient client) throws IOException { final boolean withLimitedBy = request.paramAsBoolean("with_limited_by", false); From 3d53707adc87e8816e7b7e9fb6b1ce075d00e837 Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Fri, 29 Mar 2024 22:53:20 +1100 Subject: [PATCH 003/264] [Doc] Remove invalid watcher ssl enabled settings (#106901) In #99115 we updated the ssl settings template to cover new features. It inadvertently introduced a doc bug that show invalied ssl.enabled settings for watcher HTTP and email. This PR removes them. --- docs/reference/settings/notification-settings.asciidoc | 2 ++ docs/reference/settings/ssl-settings.asciidoc | 2 ++ 2 files changed, 4 insertions(+) diff --git a/docs/reference/settings/notification-settings.asciidoc b/docs/reference/settings/notification-settings.asciidoc index af6636606630a..4a48c26974084 100644 --- a/docs/reference/settings/notification-settings.asciidoc +++ b/docs/reference/settings/notification-settings.asciidoc @@ -95,6 +95,7 @@ corresponding endpoints are explicitly allowed as well. :verifies: :server!: :ssl-context: watcher +:no-enabled-setting: include::ssl-settings.asciidoc[] @@ -284,6 +285,7 @@ Defaults to `Warning: The attachment [%s] contains characters which spreadsheet :verifies: :server!: :ssl-context: watcher-email +:no-enabled-setting: include::ssl-settings.asciidoc[] diff --git a/docs/reference/settings/ssl-settings.asciidoc b/docs/reference/settings/ssl-settings.asciidoc index 2c0eef7077f4d..2ab32c1a65c2d 100644 --- a/docs/reference/settings/ssl-settings.asciidoc +++ b/docs/reference/settings/ssl-settings.asciidoc @@ -1,6 +1,7 @@ ==== {component} TLS/SSL settings You can configure the following TLS/SSL settings. +ifndef::no-enabled-setting[] +{ssl-prefix}.ssl.enabled+:: (<>) Used to enable or disable TLS/SSL on the {ssl-layer}. @@ -10,6 +11,7 @@ endif::enabled-by-default[] ifndef::enabled-by-default[] The default is `false`. endif::enabled-by-default[] +endif::no-enabled-setting[] +{ssl-prefix}.ssl.supported_protocols+:: (<>) From 4352791daa4080c2f17275e3429ef833da031e93 Mon Sep 17 00:00:00 2001 From: Mary Gouseti Date: Fri, 29 Mar 2024 14:19:11 +0200 Subject: [PATCH 004/264] Add rolling upgrade test for persistent health node task (#106902) --- .../upgrades/HealthNodeUpgradeIT.java | 43 +++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java new file mode 100644 index 0000000000000..184343349d317 --- /dev/null +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.upgrades; + +import com.carrotsearch.randomizedtesting.annotations.Name; + +import org.apache.http.util.EntityUtils; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.hamcrest.Matchers; + +import java.nio.charset.StandardCharsets; +import java.util.Map; + +import static org.hamcrest.CoreMatchers.equalTo; + +public class HealthNodeUpgradeIT extends ParameterizedRollingUpgradeTestCase { + + public HealthNodeUpgradeIT(@Name("upgradedNodes") int upgradedNodes) { + super(upgradedNodes); + } + + public void testHealthNode() throws Exception { + if (clusterHasFeature("health.supports_health")) { + assertBusy(() -> { + Response response = client().performRequest(new Request("GET", "_cat/tasks")); + String tasks = EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8); + assertThat(tasks, Matchers.containsString("health-node")); + }); + assertBusy(() -> { + Response response = client().performRequest(new Request("GET", "_health_report")); + Map health_report = entityAsMap(response.getEntity()); + assertThat(health_report.get("status"), equalTo("green")); + }); + } + } +} From e59dd0b60efd865f3e182d4da340c38006cf764e Mon Sep 17 00:00:00 2001 From: Parker Timmins Date: Fri, 29 Mar 2024 08:40:37 -0600 Subject: [PATCH 005/264] Add total size in bytes to doc stats (#106840) --- docs/changelog/106840.yaml | 6 ++++++ docs/reference/cluster/stats.asciidoc | 7 ++++++- docs/reference/rest-api/common-parms.asciidoc | 2 +- .../main/java/org/elasticsearch/index/shard/DocsStats.java | 2 ++ .../collector/cluster/ClusterStatsMonitoringDocTests.java | 3 ++- 5 files changed, 17 insertions(+), 3 deletions(-) create mode 100644 docs/changelog/106840.yaml diff --git a/docs/changelog/106840.yaml b/docs/changelog/106840.yaml new file mode 100644 index 0000000000000..3f6831e4907ca --- /dev/null +++ b/docs/changelog/106840.yaml @@ -0,0 +1,6 @@ +pr: 106840 +summary: Add total size in bytes to doc stats +area: Stats +type: enhancement +issues: + - 97670 diff --git a/docs/reference/cluster/stats.asciidoc b/docs/reference/cluster/stats.asciidoc index dad6b46686ea2..6d8a8f748fa0e 100644 --- a/docs/reference/cluster/stats.asciidoc +++ b/docs/reference/cluster/stats.asciidoc @@ -223,6 +223,10 @@ selected nodes. + This number is based on documents in Lucene segments. {es} reclaims the disk space of deleted Lucene documents when a segment is merged. + +`total_size_in_bytes`:: +(integer) +Total size in bytes across all primary shards assigned to selected nodes. ===== `store`:: @@ -1594,7 +1598,8 @@ The API returns the following response: }, "docs": { "count": 10, - "deleted": 0 + "deleted": 0, + "total_size_in_bytes": 8833 }, "store": { "size": "16.2kb", diff --git a/docs/reference/rest-api/common-parms.asciidoc b/docs/reference/rest-api/common-parms.asciidoc index 4d71634f38acf..d0fd4087c275c 100644 --- a/docs/reference/rest-api/common-parms.asciidoc +++ b/docs/reference/rest-api/common-parms.asciidoc @@ -508,7 +508,7 @@ Return all statistics. <> statistics. `docs`:: -Number of documents and deleted docs, which have not yet merged out. +Number of documents, number of deleted docs which have not yet merged out, and total size in bytes. <> can affect this statistic. `fielddata`:: diff --git a/server/src/main/java/org/elasticsearch/index/shard/DocsStats.java b/server/src/main/java/org/elasticsearch/index/shard/DocsStats.java index 0d8fc52cddacf..20a7ffe9c7433 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/DocsStats.java +++ b/server/src/main/java/org/elasticsearch/index/shard/DocsStats.java @@ -81,6 +81,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(Fields.DOCS); builder.field(Fields.COUNT, count); builder.field(Fields.DELETED, deleted); + builder.field(Fields.TOTAL_SIZE_IN_BYTES, totalSizeInBytes); builder.endObject(); return builder; } @@ -102,5 +103,6 @@ static final class Fields { static final String DOCS = "docs"; static final String COUNT = "count"; static final String DELETED = "deleted"; + static final String TOTAL_SIZE_IN_BYTES = "total_size_in_bytes"; } } diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java index d88adea7aaef3..cb270c7f19ae8 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java @@ -522,7 +522,8 @@ public void testToXContent() throws IOException { }, "docs": { "count": 0, - "deleted": 0 + "deleted": 0, + "total_size_in_bytes": 0 }, "store": { "size_in_bytes": 0, From 1c35baa603e130e76fe17156ffa6558750df8bcc Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Fri, 29 Mar 2024 11:52:16 -0400 Subject: [PATCH 006/264] Slightly better geoip databaseType validation (#106889) --- docs/changelog/106889.yaml | 5 +++ .../ingest/geoip/GeoIpProcessor.java | 27 ++++++++------ .../geoip/GeoIpProcessorFactoryTests.java | 36 +++++++++++++++++++ 3 files changed, 58 insertions(+), 10 deletions(-) create mode 100644 docs/changelog/106889.yaml diff --git a/docs/changelog/106889.yaml b/docs/changelog/106889.yaml new file mode 100644 index 0000000000000..7755081d09036 --- /dev/null +++ b/docs/changelog/106889.yaml @@ -0,0 +1,5 @@ +pr: 106889 +summary: Slightly better geoip `databaseType` validation +area: Ingest Node +type: bug +issues: [] diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java index 6a5fb9007377b..087f0ac9480f8 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java @@ -175,10 +175,7 @@ private Map getGeoData(GeoIpDatabase geoIpDatabase, String ip) t } else if (databaseType.endsWith(ASN_DB_SUFFIX)) { geoData = retrieveAsnGeoData(geoIpDatabase, ipAddress); } else { - throw new ElasticsearchParseException( - "Unsupported database type [" + geoIpDatabase.getDatabaseType() + "]", - new IllegalStateException() - ); + throw new ElasticsearchParseException("Unsupported database type [" + databaseType + "]", new IllegalStateException()); } return geoData; } @@ -440,12 +437,24 @@ public Processor create( // pipeline. return new DatabaseUnavailableProcessor(processorTag, description, databaseFile); } + final String databaseType; try { databaseType = geoIpDatabase.getDatabaseType(); } finally { geoIpDatabase.release(); } + if (databaseType == null + || (databaseType.endsWith(CITY_DB_SUFFIX) + || databaseType.endsWith(COUNTRY_DB_SUFFIX) + || databaseType.endsWith(ASN_DB_SUFFIX)) == false) { + throw newConfigurationException( + TYPE, + processorTag, + "database_file", + "Unsupported database type [" + databaseType + "] for file [" + databaseFile + "]" + ); + } final Set properties; if (propertyNames != null) { @@ -466,12 +475,8 @@ public Processor create( } else if (databaseType.endsWith(ASN_DB_SUFFIX)) { properties = DEFAULT_ASN_PROPERTIES; } else { - throw newConfigurationException( - TYPE, - processorTag, - "database_file", - "Unsupported database type [" + databaseType + "]" - ); + assert false : "unsupported database type [" + databaseType + "]"; + properties = Set.of(); } } return new GeoIpProcessor( @@ -545,6 +550,8 @@ public static Property parseProperty(String databaseType, String value) { validProperties = ALL_COUNTRY_PROPERTIES; } else if (databaseType.endsWith(ASN_DB_SUFFIX)) { validProperties = ALL_ASN_PROPERTIES; + } else { + assert false : "unsupported database type [" + databaseType + "]"; } try { diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java index dee9ba3189c26..20e0fa9be6c06 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java @@ -50,6 +50,7 @@ import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.sameInstance; +import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -287,6 +288,41 @@ public void testBuildIllegalFieldOption() throws Exception { assertThat(e.getMessage(), equalTo("[properties] property isn't a list, but of type [java.lang.String]")); } + public void testBuildUnsupportedDatabase() throws Exception { + // mock up some unsupported database (it has a databaseType that we don't recognize) + GeoIpDatabase database = mock(GeoIpDatabase.class); + when(database.getDatabaseType()).thenReturn("some-unsupported-database"); + GeoIpDatabaseProvider provider = mock(GeoIpDatabaseProvider.class); + when(provider.getDatabase(anyString())).thenReturn(database); + + GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(provider); + + Map config1 = new HashMap<>(); + config1.put("field", "_field"); + config1.put("properties", List.of("ip")); + Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, null, config1)); + assertThat( + e.getMessage(), + equalTo("[database_file] Unsupported database type [some-unsupported-database] for file [GeoLite2-City.mmdb]") + ); + } + + public void testBuildNullDatabase() throws Exception { + // mock up a provider that returns a null databaseType + GeoIpDatabase database = mock(GeoIpDatabase.class); + when(database.getDatabaseType()).thenReturn(null); + GeoIpDatabaseProvider provider = mock(GeoIpDatabaseProvider.class); + when(provider.getDatabase(anyString())).thenReturn(database); + + GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(provider); + + Map config1 = new HashMap<>(); + config1.put("field", "_field"); + config1.put("properties", List.of("ip")); + Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, null, config1)); + assertThat(e.getMessage(), equalTo("[database_file] Unsupported database type [null] for file [GeoLite2-City.mmdb]")); + } + @SuppressWarnings("HiddenField") public void testLazyLoading() throws Exception { final Path configDir = createTempDir(); From 00b0c54a7482d13a6b062dc77d92615133cf4364 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 29 Mar 2024 12:24:31 -0400 Subject: [PATCH 007/264] ESQL: Generate docs for the trig functions (#106891) This updates the in-code docs on the trig functions to line up with the docs, removes the docs, and uses the now mostly identical generated docs. This means we only need to document these functions in one place - right next to the code. --- docs/reference/esql/functions/cos.asciidoc | 31 ------------------- .../esql/functions/description/cos.asciidoc | 2 +- .../esql/functions/description/cosh.asciidoc | 2 +- .../esql/functions/description/sin.asciidoc | 2 +- .../esql/functions/description/sinh.asciidoc | 2 +- .../esql/functions/description/tan.asciidoc | 2 +- .../esql/functions/description/tanh.asciidoc | 2 +- .../esql/functions/examples/cos.asciidoc | 13 ++++++++ .../esql/functions/examples/cosh.asciidoc | 13 ++++++++ .../esql/functions/examples/sin.asciidoc | 13 ++++++++ .../esql/functions/examples/sinh.asciidoc | 13 ++++++++ .../esql/functions/examples/tan.asciidoc | 13 ++++++++ .../esql/functions/examples/tanh.asciidoc | 13 ++++++++ .../esql/functions/layout/cos.asciidoc | 1 + .../esql/functions/layout/cosh.asciidoc | 1 + .../esql/functions/layout/sin.asciidoc | 1 + .../esql/functions/layout/sinh.asciidoc | 1 + .../esql/functions/layout/tan.asciidoc | 1 + .../esql/functions/layout/tanh.asciidoc | 1 + .../esql/functions/math-functions.asciidoc | 12 +++---- .../esql/functions/parameters/abs.asciidoc | 2 ++ .../esql/functions/parameters/acos.asciidoc | 2 ++ .../esql/functions/parameters/asin.asciidoc | 2 ++ .../esql/functions/parameters/atan.asciidoc | 2 ++ .../esql/functions/parameters/atan2.asciidoc | 2 ++ .../functions/parameters/auto_bucket.asciidoc | 2 ++ .../esql/functions/parameters/case.asciidoc | 2 ++ .../esql/functions/parameters/ceil.asciidoc | 2 ++ .../functions/parameters/coalesce.asciidoc | 2 ++ .../esql/functions/parameters/concat.asciidoc | 2 ++ .../esql/functions/parameters/cos.asciidoc | 6 ++-- .../esql/functions/parameters/cosh.asciidoc | 6 ++-- .../functions/parameters/date_diff.asciidoc | 2 ++ .../parameters/date_extract.asciidoc | 2 ++ .../functions/parameters/date_format.asciidoc | 2 ++ .../functions/parameters/date_parse.asciidoc | 2 ++ .../esql/functions/parameters/e.asciidoc | 2 ++ .../functions/parameters/ends_with.asciidoc | 2 ++ .../esql/functions/parameters/floor.asciidoc | 2 ++ .../functions/parameters/greatest.asciidoc | 2 ++ .../esql/functions/parameters/least.asciidoc | 2 ++ .../esql/functions/parameters/left.asciidoc | 2 ++ .../esql/functions/parameters/length.asciidoc | 2 ++ .../esql/functions/parameters/log.asciidoc | 2 ++ .../esql/functions/parameters/log10.asciidoc | 2 ++ .../esql/functions/parameters/ltrim.asciidoc | 2 ++ .../esql/functions/parameters/mv_avg.asciidoc | 2 ++ .../functions/parameters/mv_concat.asciidoc | 2 ++ .../functions/parameters/mv_count.asciidoc | 2 ++ .../functions/parameters/mv_dedupe.asciidoc | 2 ++ .../functions/parameters/mv_first.asciidoc | 2 ++ .../functions/parameters/mv_last.asciidoc | 2 ++ .../esql/functions/parameters/mv_max.asciidoc | 2 ++ .../functions/parameters/mv_median.asciidoc | 2 ++ .../esql/functions/parameters/mv_min.asciidoc | 2 ++ .../functions/parameters/mv_slice.asciidoc | 2 ++ .../functions/parameters/mv_sort.asciidoc | 2 ++ .../esql/functions/parameters/mv_sum.asciidoc | 2 ++ .../esql/functions/parameters/mv_zip.asciidoc | 2 ++ .../esql/functions/parameters/pi.asciidoc | 2 ++ .../esql/functions/parameters/pow.asciidoc | 2 ++ .../functions/parameters/replace.asciidoc | 2 ++ .../esql/functions/parameters/right.asciidoc | 2 ++ .../esql/functions/parameters/round.asciidoc | 2 ++ .../esql/functions/parameters/rtrim.asciidoc | 2 ++ .../esql/functions/parameters/sin.asciidoc | 6 ++-- .../esql/functions/parameters/sinh.asciidoc | 6 ++-- .../esql/functions/parameters/split.asciidoc | 2 ++ .../esql/functions/parameters/sqrt.asciidoc | 2 ++ .../parameters/st_intersects.asciidoc | 2 ++ .../esql/functions/parameters/st_x.asciidoc | 2 ++ .../esql/functions/parameters/st_y.asciidoc | 2 ++ .../functions/parameters/starts_with.asciidoc | 2 ++ .../functions/parameters/substring.asciidoc | 2 ++ .../esql/functions/parameters/tan.asciidoc | 6 ++-- .../esql/functions/parameters/tanh.asciidoc | 6 ++-- .../esql/functions/parameters/tau.asciidoc | 2 ++ .../functions/parameters/to_boolean.asciidoc | 2 ++ .../parameters/to_cartesianpoint.asciidoc | 2 ++ .../parameters/to_cartesianshape.asciidoc | 2 ++ .../functions/parameters/to_datetime.asciidoc | 2 ++ .../functions/parameters/to_degrees.asciidoc | 2 ++ .../functions/parameters/to_double.asciidoc | 2 ++ .../functions/parameters/to_geopoint.asciidoc | 2 ++ .../functions/parameters/to_geoshape.asciidoc | 2 ++ .../functions/parameters/to_integer.asciidoc | 2 ++ .../esql/functions/parameters/to_ip.asciidoc | 2 ++ .../functions/parameters/to_long.asciidoc | 2 ++ .../functions/parameters/to_lower.asciidoc | 2 ++ .../functions/parameters/to_radians.asciidoc | 2 ++ .../functions/parameters/to_string.asciidoc | 2 ++ .../parameters/to_unsigned_long.asciidoc | 2 ++ .../functions/parameters/to_upper.asciidoc | 2 ++ .../functions/parameters/to_version.asciidoc | 2 ++ .../esql/functions/parameters/trim.asciidoc | 2 ++ .../esql/functions/signature/cos.svg | 2 +- .../esql/functions/signature/cosh.svg | 2 +- .../esql/functions/signature/sin.svg | 2 +- .../esql/functions/signature/sinh.svg | 2 +- .../esql/functions/signature/tan.svg | 2 +- .../esql/functions/signature/tanh.svg | 2 +- docs/reference/esql/functions/sin.asciidoc | 31 ------------------- docs/reference/esql/functions/sinh.asciidoc | 30 ------------------ docs/reference/esql/functions/tan.asciidoc | 31 ------------------- docs/reference/esql/functions/tanh.asciidoc | 30 ------------------ .../esql/functions/types/cos.asciidoc | 2 +- .../esql/functions/types/cosh.asciidoc | 2 +- .../esql/functions/types/sin.asciidoc | 2 +- .../esql/functions/types/sinh.asciidoc | 2 +- .../esql/functions/types/tan.asciidoc | 2 +- .../esql/functions/types/tanh.asciidoc | 2 +- x-pack/plugin/esql/build.gradle | 2 +- .../src/main/resources/meta.csv-spec | 30 +++++++++--------- .../expression/function/scalar/math/Cos.java | 15 +++++++-- .../expression/function/scalar/math/Cosh.java | 15 ++++++--- .../expression/function/scalar/math/Sin.java | 15 +++++++-- .../expression/function/scalar/math/Sinh.java | 15 ++++++--- .../expression/function/scalar/math/Tan.java | 15 +++++++-- .../expression/function/scalar/math/Tanh.java | 15 ++++++--- .../function/AbstractFunctionTestCase.java | 1 + 120 files changed, 353 insertions(+), 229 deletions(-) delete mode 100644 docs/reference/esql/functions/cos.asciidoc create mode 100644 docs/reference/esql/functions/examples/cos.asciidoc create mode 100644 docs/reference/esql/functions/examples/cosh.asciidoc create mode 100644 docs/reference/esql/functions/examples/sin.asciidoc create mode 100644 docs/reference/esql/functions/examples/sinh.asciidoc create mode 100644 docs/reference/esql/functions/examples/tan.asciidoc create mode 100644 docs/reference/esql/functions/examples/tanh.asciidoc delete mode 100644 docs/reference/esql/functions/sin.asciidoc delete mode 100644 docs/reference/esql/functions/sinh.asciidoc delete mode 100644 docs/reference/esql/functions/tan.asciidoc delete mode 100644 docs/reference/esql/functions/tanh.asciidoc diff --git a/docs/reference/esql/functions/cos.asciidoc b/docs/reference/esql/functions/cos.asciidoc deleted file mode 100644 index 7fa1d973c86b6..0000000000000 --- a/docs/reference/esql/functions/cos.asciidoc +++ /dev/null @@ -1,31 +0,0 @@ -[discrete] -[[esql-cos]] -=== `COS` - -*Syntax* - -[.text-center] -image::esql/functions/signature/cos.svg[Embedded,opts=inline] - -*Parameters* - -`n`:: -Numeric expression. If `null`, the function returns `null`. - -*Description* - -Returns the {wikipedia}/Sine_and_cosine[cosine] of `n`. Input expected in -radians. - -include::types/cos.asciidoc[] - -*Example* - -[source.merge.styled,esql] ----- -include::{esql-specs}/floats.csv-spec[tag=cos] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/floats.csv-spec[tag=cos-result] -|=== diff --git a/docs/reference/esql/functions/description/cos.asciidoc b/docs/reference/esql/functions/description/cos.asciidoc index e46d651b34c00..101489faabe1c 100644 --- a/docs/reference/esql/functions/description/cos.asciidoc +++ b/docs/reference/esql/functions/description/cos.asciidoc @@ -2,4 +2,4 @@ *Description* -Returns the trigonometric cosine of an angle +Returns the {wikipedia}/Sine_and_cosine[cosine] of an angle. diff --git a/docs/reference/esql/functions/description/cosh.asciidoc b/docs/reference/esql/functions/description/cosh.asciidoc index deaf780addb93..bfe51f9152875 100644 --- a/docs/reference/esql/functions/description/cosh.asciidoc +++ b/docs/reference/esql/functions/description/cosh.asciidoc @@ -2,4 +2,4 @@ *Description* -Returns the hyperbolic cosine of a number +Returns the {wikipedia}/Hyperbolic_functions[hyperbolic cosine] of an angle. diff --git a/docs/reference/esql/functions/description/sin.asciidoc b/docs/reference/esql/functions/description/sin.asciidoc index 4a5f04732fccc..ba12ba88ca37a 100644 --- a/docs/reference/esql/functions/description/sin.asciidoc +++ b/docs/reference/esql/functions/description/sin.asciidoc @@ -2,4 +2,4 @@ *Description* -Returns the trigonometric sine of an angle +Returns ths {wikipedia}/Sine_and_cosine[Sine] trigonometric function of an angle. diff --git a/docs/reference/esql/functions/description/sinh.asciidoc b/docs/reference/esql/functions/description/sinh.asciidoc index a51b88c7d446e..bb7761e2a0254 100644 --- a/docs/reference/esql/functions/description/sinh.asciidoc +++ b/docs/reference/esql/functions/description/sinh.asciidoc @@ -2,4 +2,4 @@ *Description* -Returns the hyperbolic sine of a number +Returns the {wikipedia}/Hyperbolic_functions[hyperbolic sine] of an angle. diff --git a/docs/reference/esql/functions/description/tan.asciidoc b/docs/reference/esql/functions/description/tan.asciidoc index 1f6a4f96f59f1..925bebf044a7b 100644 --- a/docs/reference/esql/functions/description/tan.asciidoc +++ b/docs/reference/esql/functions/description/tan.asciidoc @@ -2,4 +2,4 @@ *Description* -Returns the trigonometric tangent of an angle +Returns the {wikipedia}/Sine_and_cosine[Tangent] trigonometric function of an angle. diff --git a/docs/reference/esql/functions/description/tanh.asciidoc b/docs/reference/esql/functions/description/tanh.asciidoc index 277783f7f70fe..7ee5e457dfe48 100644 --- a/docs/reference/esql/functions/description/tanh.asciidoc +++ b/docs/reference/esql/functions/description/tanh.asciidoc @@ -2,4 +2,4 @@ *Description* -Returns the hyperbolic tangent of a number +Returns the {wikipedia}/Hyperbolic_functions[Tangent] hyperbolic function of an angle. diff --git a/docs/reference/esql/functions/examples/cos.asciidoc b/docs/reference/esql/functions/examples/cos.asciidoc new file mode 100644 index 0000000000000..6d673fb413753 --- /dev/null +++ b/docs/reference/esql/functions/examples/cos.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/floats.csv-spec[tag=cos] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/floats.csv-spec[tag=cos-result] +|=== + diff --git a/docs/reference/esql/functions/examples/cosh.asciidoc b/docs/reference/esql/functions/examples/cosh.asciidoc new file mode 100644 index 0000000000000..bd9a8759f64e4 --- /dev/null +++ b/docs/reference/esql/functions/examples/cosh.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/floats.csv-spec[tag=cosh] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/floats.csv-spec[tag=cosh-result] +|=== + diff --git a/docs/reference/esql/functions/examples/sin.asciidoc b/docs/reference/esql/functions/examples/sin.asciidoc new file mode 100644 index 0000000000000..33ef89de10c70 --- /dev/null +++ b/docs/reference/esql/functions/examples/sin.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/floats.csv-spec[tag=sin] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/floats.csv-spec[tag=sin-result] +|=== + diff --git a/docs/reference/esql/functions/examples/sinh.asciidoc b/docs/reference/esql/functions/examples/sinh.asciidoc new file mode 100644 index 0000000000000..84eff26a0068e --- /dev/null +++ b/docs/reference/esql/functions/examples/sinh.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/floats.csv-spec[tag=sinh] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/floats.csv-spec[tag=sinh-result] +|=== + diff --git a/docs/reference/esql/functions/examples/tan.asciidoc b/docs/reference/esql/functions/examples/tan.asciidoc new file mode 100644 index 0000000000000..a8ad1ae8c2151 --- /dev/null +++ b/docs/reference/esql/functions/examples/tan.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/floats.csv-spec[tag=tan] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/floats.csv-spec[tag=tan-result] +|=== + diff --git a/docs/reference/esql/functions/examples/tanh.asciidoc b/docs/reference/esql/functions/examples/tanh.asciidoc new file mode 100644 index 0000000000000..1f1a1d8d38397 --- /dev/null +++ b/docs/reference/esql/functions/examples/tanh.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/floats.csv-spec[tag=tanh] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/floats.csv-spec[tag=tanh-result] +|=== + diff --git a/docs/reference/esql/functions/layout/cos.asciidoc b/docs/reference/esql/functions/layout/cos.asciidoc index 7b97f40529096..1bb3e2544bc6f 100644 --- a/docs/reference/esql/functions/layout/cos.asciidoc +++ b/docs/reference/esql/functions/layout/cos.asciidoc @@ -12,3 +12,4 @@ image::esql/functions/signature/cos.svg[Embedded,opts=inline] include::../parameters/cos.asciidoc[] include::../description/cos.asciidoc[] include::../types/cos.asciidoc[] +include::../examples/cos.asciidoc[] diff --git a/docs/reference/esql/functions/layout/cosh.asciidoc b/docs/reference/esql/functions/layout/cosh.asciidoc index e36a96e0eb324..175b7b23324ce 100644 --- a/docs/reference/esql/functions/layout/cosh.asciidoc +++ b/docs/reference/esql/functions/layout/cosh.asciidoc @@ -12,3 +12,4 @@ image::esql/functions/signature/cosh.svg[Embedded,opts=inline] include::../parameters/cosh.asciidoc[] include::../description/cosh.asciidoc[] include::../types/cosh.asciidoc[] +include::../examples/cosh.asciidoc[] diff --git a/docs/reference/esql/functions/layout/sin.asciidoc b/docs/reference/esql/functions/layout/sin.asciidoc index 802045d0a23af..465bbd441f689 100644 --- a/docs/reference/esql/functions/layout/sin.asciidoc +++ b/docs/reference/esql/functions/layout/sin.asciidoc @@ -12,3 +12,4 @@ image::esql/functions/signature/sin.svg[Embedded,opts=inline] include::../parameters/sin.asciidoc[] include::../description/sin.asciidoc[] include::../types/sin.asciidoc[] +include::../examples/sin.asciidoc[] diff --git a/docs/reference/esql/functions/layout/sinh.asciidoc b/docs/reference/esql/functions/layout/sinh.asciidoc index 3ac1f03a608f6..11ae1a8b0403f 100644 --- a/docs/reference/esql/functions/layout/sinh.asciidoc +++ b/docs/reference/esql/functions/layout/sinh.asciidoc @@ -12,3 +12,4 @@ image::esql/functions/signature/sinh.svg[Embedded,opts=inline] include::../parameters/sinh.asciidoc[] include::../description/sinh.asciidoc[] include::../types/sinh.asciidoc[] +include::../examples/sinh.asciidoc[] diff --git a/docs/reference/esql/functions/layout/tan.asciidoc b/docs/reference/esql/functions/layout/tan.asciidoc index 056145f5eed44..2723076238228 100644 --- a/docs/reference/esql/functions/layout/tan.asciidoc +++ b/docs/reference/esql/functions/layout/tan.asciidoc @@ -12,3 +12,4 @@ image::esql/functions/signature/tan.svg[Embedded,opts=inline] include::../parameters/tan.asciidoc[] include::../description/tan.asciidoc[] include::../types/tan.asciidoc[] +include::../examples/tan.asciidoc[] diff --git a/docs/reference/esql/functions/layout/tanh.asciidoc b/docs/reference/esql/functions/layout/tanh.asciidoc index 3024ac5fb2aff..338d8574d4949 100644 --- a/docs/reference/esql/functions/layout/tanh.asciidoc +++ b/docs/reference/esql/functions/layout/tanh.asciidoc @@ -12,3 +12,4 @@ image::esql/functions/signature/tanh.svg[Embedded,opts=inline] include::../parameters/tanh.asciidoc[] include::../description/tanh.asciidoc[] include::../types/tanh.asciidoc[] +include::../examples/tanh.asciidoc[] diff --git a/docs/reference/esql/functions/math-functions.asciidoc b/docs/reference/esql/functions/math-functions.asciidoc index 5faf994d61db6..28830554198d2 100644 --- a/docs/reference/esql/functions/math-functions.asciidoc +++ b/docs/reference/esql/functions/math-functions.asciidoc @@ -37,8 +37,8 @@ include::layout/asin.asciidoc[] include::layout/atan.asciidoc[] include::layout/atan2.asciidoc[] include::layout/ceil.asciidoc[] -include::cos.asciidoc[] -include::cosh.asciidoc[] +include::layout/cos.asciidoc[] +include::layout/cosh.asciidoc[] include::e.asciidoc[] include::floor.asciidoc[] include::log.asciidoc[] @@ -46,9 +46,9 @@ include::log10.asciidoc[] include::pi.asciidoc[] include::pow.asciidoc[] include::round.asciidoc[] -include::sin.asciidoc[] -include::sinh.asciidoc[] +include::layout/sin.asciidoc[] +include::layout/sinh.asciidoc[] include::sqrt.asciidoc[] -include::tan.asciidoc[] -include::tanh.asciidoc[] +include::layout/tan.asciidoc[] +include::layout/tanh.asciidoc[] include::tau.asciidoc[] diff --git a/docs/reference/esql/functions/parameters/abs.asciidoc b/docs/reference/esql/functions/parameters/abs.asciidoc index 8527c7f74bb09..65013f4c21265 100644 --- a/docs/reference/esql/functions/parameters/abs.asciidoc +++ b/docs/reference/esql/functions/parameters/abs.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `number`:: diff --git a/docs/reference/esql/functions/parameters/acos.asciidoc b/docs/reference/esql/functions/parameters/acos.asciidoc index 2d06f7e70333d..d3fd81343a38b 100644 --- a/docs/reference/esql/functions/parameters/acos.asciidoc +++ b/docs/reference/esql/functions/parameters/acos.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `number`:: diff --git a/docs/reference/esql/functions/parameters/asin.asciidoc b/docs/reference/esql/functions/parameters/asin.asciidoc index 2d06f7e70333d..d3fd81343a38b 100644 --- a/docs/reference/esql/functions/parameters/asin.asciidoc +++ b/docs/reference/esql/functions/parameters/asin.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `number`:: diff --git a/docs/reference/esql/functions/parameters/atan.asciidoc b/docs/reference/esql/functions/parameters/atan.asciidoc index 8527c7f74bb09..65013f4c21265 100644 --- a/docs/reference/esql/functions/parameters/atan.asciidoc +++ b/docs/reference/esql/functions/parameters/atan.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `number`:: diff --git a/docs/reference/esql/functions/parameters/atan2.asciidoc b/docs/reference/esql/functions/parameters/atan2.asciidoc index 8dc744ad03e6a..eceba6fea4217 100644 --- a/docs/reference/esql/functions/parameters/atan2.asciidoc +++ b/docs/reference/esql/functions/parameters/atan2.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `y_coordinate`:: diff --git a/docs/reference/esql/functions/parameters/auto_bucket.asciidoc b/docs/reference/esql/functions/parameters/auto_bucket.asciidoc index 0f9c6a1b81c99..35228377b58de 100644 --- a/docs/reference/esql/functions/parameters/auto_bucket.asciidoc +++ b/docs/reference/esql/functions/parameters/auto_bucket.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `field`:: diff --git a/docs/reference/esql/functions/parameters/case.asciidoc b/docs/reference/esql/functions/parameters/case.asciidoc index fb70278c17d1a..c3617b7c0e32c 100644 --- a/docs/reference/esql/functions/parameters/case.asciidoc +++ b/docs/reference/esql/functions/parameters/case.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `condition`:: diff --git a/docs/reference/esql/functions/parameters/ceil.asciidoc b/docs/reference/esql/functions/parameters/ceil.asciidoc index 8527c7f74bb09..65013f4c21265 100644 --- a/docs/reference/esql/functions/parameters/ceil.asciidoc +++ b/docs/reference/esql/functions/parameters/ceil.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `number`:: diff --git a/docs/reference/esql/functions/parameters/coalesce.asciidoc b/docs/reference/esql/functions/parameters/coalesce.asciidoc index 07c8a84ed5583..9b62a2e7e0d87 100644 --- a/docs/reference/esql/functions/parameters/coalesce.asciidoc +++ b/docs/reference/esql/functions/parameters/coalesce.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `first`:: diff --git a/docs/reference/esql/functions/parameters/concat.asciidoc b/docs/reference/esql/functions/parameters/concat.asciidoc index 47a555fbe80c6..f0c9bfa62790c 100644 --- a/docs/reference/esql/functions/parameters/concat.asciidoc +++ b/docs/reference/esql/functions/parameters/concat.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `string1`:: diff --git a/docs/reference/esql/functions/parameters/cos.asciidoc b/docs/reference/esql/functions/parameters/cos.asciidoc index eceab83443236..a1c3f7edf30ce 100644 --- a/docs/reference/esql/functions/parameters/cos.asciidoc +++ b/docs/reference/esql/functions/parameters/cos.asciidoc @@ -1,4 +1,6 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* -`number`:: -An angle, in radians +`angle`:: +An angle, in radians. If `null`, the function returns `null`. diff --git a/docs/reference/esql/functions/parameters/cosh.asciidoc b/docs/reference/esql/functions/parameters/cosh.asciidoc index 1535b0feb8424..a1c3f7edf30ce 100644 --- a/docs/reference/esql/functions/parameters/cosh.asciidoc +++ b/docs/reference/esql/functions/parameters/cosh.asciidoc @@ -1,4 +1,6 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* -`number`:: -The number who's hyperbolic cosine is to be returned +`angle`:: +An angle, in radians. If `null`, the function returns `null`. diff --git a/docs/reference/esql/functions/parameters/date_diff.asciidoc b/docs/reference/esql/functions/parameters/date_diff.asciidoc index 9a9ef6fb34fba..1dbb32235fcfe 100644 --- a/docs/reference/esql/functions/parameters/date_diff.asciidoc +++ b/docs/reference/esql/functions/parameters/date_diff.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `unit`:: diff --git a/docs/reference/esql/functions/parameters/date_extract.asciidoc b/docs/reference/esql/functions/parameters/date_extract.asciidoc index 170bc40d89ef6..8f42ed240abb5 100644 --- a/docs/reference/esql/functions/parameters/date_extract.asciidoc +++ b/docs/reference/esql/functions/parameters/date_extract.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `datePart`:: diff --git a/docs/reference/esql/functions/parameters/date_format.asciidoc b/docs/reference/esql/functions/parameters/date_format.asciidoc index 7b000418b961c..773cbe1b66be5 100644 --- a/docs/reference/esql/functions/parameters/date_format.asciidoc +++ b/docs/reference/esql/functions/parameters/date_format.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `dateFormat`:: diff --git a/docs/reference/esql/functions/parameters/date_parse.asciidoc b/docs/reference/esql/functions/parameters/date_parse.asciidoc index 30a09e43c5361..cd6d432f67884 100644 --- a/docs/reference/esql/functions/parameters/date_parse.asciidoc +++ b/docs/reference/esql/functions/parameters/date_parse.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `datePattern`:: diff --git a/docs/reference/esql/functions/parameters/e.asciidoc b/docs/reference/esql/functions/parameters/e.asciidoc index ddb88c98f7503..25b3c973f1a26 100644 --- a/docs/reference/esql/functions/parameters/e.asciidoc +++ b/docs/reference/esql/functions/parameters/e.asciidoc @@ -1 +1,3 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* diff --git a/docs/reference/esql/functions/parameters/ends_with.asciidoc b/docs/reference/esql/functions/parameters/ends_with.asciidoc index 314eec2bf39ea..af3640ae29b2c 100644 --- a/docs/reference/esql/functions/parameters/ends_with.asciidoc +++ b/docs/reference/esql/functions/parameters/ends_with.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `str`:: diff --git a/docs/reference/esql/functions/parameters/floor.asciidoc b/docs/reference/esql/functions/parameters/floor.asciidoc index 9faa6c1adebe2..91c56709d182a 100644 --- a/docs/reference/esql/functions/parameters/floor.asciidoc +++ b/docs/reference/esql/functions/parameters/floor.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `number`:: diff --git a/docs/reference/esql/functions/parameters/greatest.asciidoc b/docs/reference/esql/functions/parameters/greatest.asciidoc index 55c75eae0de74..83ac29d0bf7c9 100644 --- a/docs/reference/esql/functions/parameters/greatest.asciidoc +++ b/docs/reference/esql/functions/parameters/greatest.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `first`:: diff --git a/docs/reference/esql/functions/parameters/least.asciidoc b/docs/reference/esql/functions/parameters/least.asciidoc index 55c75eae0de74..83ac29d0bf7c9 100644 --- a/docs/reference/esql/functions/parameters/least.asciidoc +++ b/docs/reference/esql/functions/parameters/least.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `first`:: diff --git a/docs/reference/esql/functions/parameters/left.asciidoc b/docs/reference/esql/functions/parameters/left.asciidoc index b296adfc064be..98f4c226456ef 100644 --- a/docs/reference/esql/functions/parameters/left.asciidoc +++ b/docs/reference/esql/functions/parameters/left.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `string`:: diff --git a/docs/reference/esql/functions/parameters/length.asciidoc b/docs/reference/esql/functions/parameters/length.asciidoc index 4c3a25283c403..5188a4fd5a1cd 100644 --- a/docs/reference/esql/functions/parameters/length.asciidoc +++ b/docs/reference/esql/functions/parameters/length.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `string`:: diff --git a/docs/reference/esql/functions/parameters/log.asciidoc b/docs/reference/esql/functions/parameters/log.asciidoc index 1d2306c5b215b..3591efb47a9bd 100644 --- a/docs/reference/esql/functions/parameters/log.asciidoc +++ b/docs/reference/esql/functions/parameters/log.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `base`:: diff --git a/docs/reference/esql/functions/parameters/log10.asciidoc b/docs/reference/esql/functions/parameters/log10.asciidoc index 9faa6c1adebe2..91c56709d182a 100644 --- a/docs/reference/esql/functions/parameters/log10.asciidoc +++ b/docs/reference/esql/functions/parameters/log10.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `number`:: diff --git a/docs/reference/esql/functions/parameters/ltrim.asciidoc b/docs/reference/esql/functions/parameters/ltrim.asciidoc index 4c3a25283c403..5188a4fd5a1cd 100644 --- a/docs/reference/esql/functions/parameters/ltrim.asciidoc +++ b/docs/reference/esql/functions/parameters/ltrim.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `string`:: diff --git a/docs/reference/esql/functions/parameters/mv_avg.asciidoc b/docs/reference/esql/functions/parameters/mv_avg.asciidoc index 9faa6c1adebe2..91c56709d182a 100644 --- a/docs/reference/esql/functions/parameters/mv_avg.asciidoc +++ b/docs/reference/esql/functions/parameters/mv_avg.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `number`:: diff --git a/docs/reference/esql/functions/parameters/mv_concat.asciidoc b/docs/reference/esql/functions/parameters/mv_concat.asciidoc index 88893478e2b74..8b2c62581d775 100644 --- a/docs/reference/esql/functions/parameters/mv_concat.asciidoc +++ b/docs/reference/esql/functions/parameters/mv_concat.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `string`:: diff --git a/docs/reference/esql/functions/parameters/mv_count.asciidoc b/docs/reference/esql/functions/parameters/mv_count.asciidoc index 56df4f5138a27..8903aa1a472a3 100644 --- a/docs/reference/esql/functions/parameters/mv_count.asciidoc +++ b/docs/reference/esql/functions/parameters/mv_count.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `field`:: diff --git a/docs/reference/esql/functions/parameters/mv_dedupe.asciidoc b/docs/reference/esql/functions/parameters/mv_dedupe.asciidoc index 56df4f5138a27..8903aa1a472a3 100644 --- a/docs/reference/esql/functions/parameters/mv_dedupe.asciidoc +++ b/docs/reference/esql/functions/parameters/mv_dedupe.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `field`:: diff --git a/docs/reference/esql/functions/parameters/mv_first.asciidoc b/docs/reference/esql/functions/parameters/mv_first.asciidoc index 56df4f5138a27..8903aa1a472a3 100644 --- a/docs/reference/esql/functions/parameters/mv_first.asciidoc +++ b/docs/reference/esql/functions/parameters/mv_first.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `field`:: diff --git a/docs/reference/esql/functions/parameters/mv_last.asciidoc b/docs/reference/esql/functions/parameters/mv_last.asciidoc index 56df4f5138a27..8903aa1a472a3 100644 --- a/docs/reference/esql/functions/parameters/mv_last.asciidoc +++ b/docs/reference/esql/functions/parameters/mv_last.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `field`:: diff --git a/docs/reference/esql/functions/parameters/mv_max.asciidoc b/docs/reference/esql/functions/parameters/mv_max.asciidoc index 56df4f5138a27..8903aa1a472a3 100644 --- a/docs/reference/esql/functions/parameters/mv_max.asciidoc +++ b/docs/reference/esql/functions/parameters/mv_max.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `field`:: diff --git a/docs/reference/esql/functions/parameters/mv_median.asciidoc b/docs/reference/esql/functions/parameters/mv_median.asciidoc index 9faa6c1adebe2..91c56709d182a 100644 --- a/docs/reference/esql/functions/parameters/mv_median.asciidoc +++ b/docs/reference/esql/functions/parameters/mv_median.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `number`:: diff --git a/docs/reference/esql/functions/parameters/mv_min.asciidoc b/docs/reference/esql/functions/parameters/mv_min.asciidoc index 56df4f5138a27..8903aa1a472a3 100644 --- a/docs/reference/esql/functions/parameters/mv_min.asciidoc +++ b/docs/reference/esql/functions/parameters/mv_min.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `field`:: diff --git a/docs/reference/esql/functions/parameters/mv_slice.asciidoc b/docs/reference/esql/functions/parameters/mv_slice.asciidoc index cffbfaff95e86..aa40404140e93 100644 --- a/docs/reference/esql/functions/parameters/mv_slice.asciidoc +++ b/docs/reference/esql/functions/parameters/mv_slice.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `field`:: diff --git a/docs/reference/esql/functions/parameters/mv_sort.asciidoc b/docs/reference/esql/functions/parameters/mv_sort.asciidoc index aee8353cfd416..1ccbf2f0ee0c5 100644 --- a/docs/reference/esql/functions/parameters/mv_sort.asciidoc +++ b/docs/reference/esql/functions/parameters/mv_sort.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `field`:: diff --git a/docs/reference/esql/functions/parameters/mv_sum.asciidoc b/docs/reference/esql/functions/parameters/mv_sum.asciidoc index 9faa6c1adebe2..91c56709d182a 100644 --- a/docs/reference/esql/functions/parameters/mv_sum.asciidoc +++ b/docs/reference/esql/functions/parameters/mv_sum.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `number`:: diff --git a/docs/reference/esql/functions/parameters/mv_zip.asciidoc b/docs/reference/esql/functions/parameters/mv_zip.asciidoc index 09ab5969fe66a..25940864bfdcd 100644 --- a/docs/reference/esql/functions/parameters/mv_zip.asciidoc +++ b/docs/reference/esql/functions/parameters/mv_zip.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `string1`:: diff --git a/docs/reference/esql/functions/parameters/pi.asciidoc b/docs/reference/esql/functions/parameters/pi.asciidoc index ddb88c98f7503..25b3c973f1a26 100644 --- a/docs/reference/esql/functions/parameters/pi.asciidoc +++ b/docs/reference/esql/functions/parameters/pi.asciidoc @@ -1 +1,3 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* diff --git a/docs/reference/esql/functions/parameters/pow.asciidoc b/docs/reference/esql/functions/parameters/pow.asciidoc index 77b3dc186dac7..8e94723086e2e 100644 --- a/docs/reference/esql/functions/parameters/pow.asciidoc +++ b/docs/reference/esql/functions/parameters/pow.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `base`:: diff --git a/docs/reference/esql/functions/parameters/replace.asciidoc b/docs/reference/esql/functions/parameters/replace.asciidoc index f8831e5a6b8c5..331c7425e7189 100644 --- a/docs/reference/esql/functions/parameters/replace.asciidoc +++ b/docs/reference/esql/functions/parameters/replace.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `string`:: diff --git a/docs/reference/esql/functions/parameters/right.asciidoc b/docs/reference/esql/functions/parameters/right.asciidoc index 3ddd7e7c8cd68..1a05aedf542a9 100644 --- a/docs/reference/esql/functions/parameters/right.asciidoc +++ b/docs/reference/esql/functions/parameters/right.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `string`:: diff --git a/docs/reference/esql/functions/parameters/round.asciidoc b/docs/reference/esql/functions/parameters/round.asciidoc index ef53d9e07eb00..788c99434fd29 100644 --- a/docs/reference/esql/functions/parameters/round.asciidoc +++ b/docs/reference/esql/functions/parameters/round.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `number`:: diff --git a/docs/reference/esql/functions/parameters/rtrim.asciidoc b/docs/reference/esql/functions/parameters/rtrim.asciidoc index 4c3a25283c403..5188a4fd5a1cd 100644 --- a/docs/reference/esql/functions/parameters/rtrim.asciidoc +++ b/docs/reference/esql/functions/parameters/rtrim.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `string`:: diff --git a/docs/reference/esql/functions/parameters/sin.asciidoc b/docs/reference/esql/functions/parameters/sin.asciidoc index eceab83443236..a1c3f7edf30ce 100644 --- a/docs/reference/esql/functions/parameters/sin.asciidoc +++ b/docs/reference/esql/functions/parameters/sin.asciidoc @@ -1,4 +1,6 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* -`number`:: -An angle, in radians +`angle`:: +An angle, in radians. If `null`, the function returns `null`. diff --git a/docs/reference/esql/functions/parameters/sinh.asciidoc b/docs/reference/esql/functions/parameters/sinh.asciidoc index d2dfa9701ff89..a1c3f7edf30ce 100644 --- a/docs/reference/esql/functions/parameters/sinh.asciidoc +++ b/docs/reference/esql/functions/parameters/sinh.asciidoc @@ -1,4 +1,6 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* -`number`:: -The number to return the hyperbolic sine of +`angle`:: +An angle, in radians. If `null`, the function returns `null`. diff --git a/docs/reference/esql/functions/parameters/split.asciidoc b/docs/reference/esql/functions/parameters/split.asciidoc index 7b3c24adae928..2308f59362d98 100644 --- a/docs/reference/esql/functions/parameters/split.asciidoc +++ b/docs/reference/esql/functions/parameters/split.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `string`:: diff --git a/docs/reference/esql/functions/parameters/sqrt.asciidoc b/docs/reference/esql/functions/parameters/sqrt.asciidoc index 9faa6c1adebe2..91c56709d182a 100644 --- a/docs/reference/esql/functions/parameters/sqrt.asciidoc +++ b/docs/reference/esql/functions/parameters/sqrt.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `number`:: diff --git a/docs/reference/esql/functions/parameters/st_intersects.asciidoc b/docs/reference/esql/functions/parameters/st_intersects.asciidoc index dbc9adf478948..e87a0d0eb94f0 100644 --- a/docs/reference/esql/functions/parameters/st_intersects.asciidoc +++ b/docs/reference/esql/functions/parameters/st_intersects.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `geomA`:: diff --git a/docs/reference/esql/functions/parameters/st_x.asciidoc b/docs/reference/esql/functions/parameters/st_x.asciidoc index d3d26fc981caf..4e8e77dea1f86 100644 --- a/docs/reference/esql/functions/parameters/st_x.asciidoc +++ b/docs/reference/esql/functions/parameters/st_x.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `point`:: diff --git a/docs/reference/esql/functions/parameters/st_y.asciidoc b/docs/reference/esql/functions/parameters/st_y.asciidoc index d3d26fc981caf..4e8e77dea1f86 100644 --- a/docs/reference/esql/functions/parameters/st_y.asciidoc +++ b/docs/reference/esql/functions/parameters/st_y.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `point`:: diff --git a/docs/reference/esql/functions/parameters/starts_with.asciidoc b/docs/reference/esql/functions/parameters/starts_with.asciidoc index 75558cad04106..93a43b3406856 100644 --- a/docs/reference/esql/functions/parameters/starts_with.asciidoc +++ b/docs/reference/esql/functions/parameters/starts_with.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `str`:: diff --git a/docs/reference/esql/functions/parameters/substring.asciidoc b/docs/reference/esql/functions/parameters/substring.asciidoc index 19c4e5551185a..df6aa84d85e87 100644 --- a/docs/reference/esql/functions/parameters/substring.asciidoc +++ b/docs/reference/esql/functions/parameters/substring.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `string`:: diff --git a/docs/reference/esql/functions/parameters/tan.asciidoc b/docs/reference/esql/functions/parameters/tan.asciidoc index eceab83443236..a1c3f7edf30ce 100644 --- a/docs/reference/esql/functions/parameters/tan.asciidoc +++ b/docs/reference/esql/functions/parameters/tan.asciidoc @@ -1,4 +1,6 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* -`number`:: -An angle, in radians +`angle`:: +An angle, in radians. If `null`, the function returns `null`. diff --git a/docs/reference/esql/functions/parameters/tanh.asciidoc b/docs/reference/esql/functions/parameters/tanh.asciidoc index 1fc97c3b68f84..a1c3f7edf30ce 100644 --- a/docs/reference/esql/functions/parameters/tanh.asciidoc +++ b/docs/reference/esql/functions/parameters/tanh.asciidoc @@ -1,4 +1,6 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* -`number`:: -The number to return the hyperbolic tangent of +`angle`:: +An angle, in radians. If `null`, the function returns `null`. diff --git a/docs/reference/esql/functions/parameters/tau.asciidoc b/docs/reference/esql/functions/parameters/tau.asciidoc index ddb88c98f7503..25b3c973f1a26 100644 --- a/docs/reference/esql/functions/parameters/tau.asciidoc +++ b/docs/reference/esql/functions/parameters/tau.asciidoc @@ -1 +1,3 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* diff --git a/docs/reference/esql/functions/parameters/to_boolean.asciidoc b/docs/reference/esql/functions/parameters/to_boolean.asciidoc index 56df4f5138a27..8903aa1a472a3 100644 --- a/docs/reference/esql/functions/parameters/to_boolean.asciidoc +++ b/docs/reference/esql/functions/parameters/to_boolean.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `field`:: diff --git a/docs/reference/esql/functions/parameters/to_cartesianpoint.asciidoc b/docs/reference/esql/functions/parameters/to_cartesianpoint.asciidoc index 56df4f5138a27..8903aa1a472a3 100644 --- a/docs/reference/esql/functions/parameters/to_cartesianpoint.asciidoc +++ b/docs/reference/esql/functions/parameters/to_cartesianpoint.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `field`:: diff --git a/docs/reference/esql/functions/parameters/to_cartesianshape.asciidoc b/docs/reference/esql/functions/parameters/to_cartesianshape.asciidoc index 56df4f5138a27..8903aa1a472a3 100644 --- a/docs/reference/esql/functions/parameters/to_cartesianshape.asciidoc +++ b/docs/reference/esql/functions/parameters/to_cartesianshape.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `field`:: diff --git a/docs/reference/esql/functions/parameters/to_datetime.asciidoc b/docs/reference/esql/functions/parameters/to_datetime.asciidoc index 56df4f5138a27..8903aa1a472a3 100644 --- a/docs/reference/esql/functions/parameters/to_datetime.asciidoc +++ b/docs/reference/esql/functions/parameters/to_datetime.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `field`:: diff --git a/docs/reference/esql/functions/parameters/to_degrees.asciidoc b/docs/reference/esql/functions/parameters/to_degrees.asciidoc index 9faa6c1adebe2..91c56709d182a 100644 --- a/docs/reference/esql/functions/parameters/to_degrees.asciidoc +++ b/docs/reference/esql/functions/parameters/to_degrees.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `number`:: diff --git a/docs/reference/esql/functions/parameters/to_double.asciidoc b/docs/reference/esql/functions/parameters/to_double.asciidoc index 56df4f5138a27..8903aa1a472a3 100644 --- a/docs/reference/esql/functions/parameters/to_double.asciidoc +++ b/docs/reference/esql/functions/parameters/to_double.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `field`:: diff --git a/docs/reference/esql/functions/parameters/to_geopoint.asciidoc b/docs/reference/esql/functions/parameters/to_geopoint.asciidoc index 56df4f5138a27..8903aa1a472a3 100644 --- a/docs/reference/esql/functions/parameters/to_geopoint.asciidoc +++ b/docs/reference/esql/functions/parameters/to_geopoint.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `field`:: diff --git a/docs/reference/esql/functions/parameters/to_geoshape.asciidoc b/docs/reference/esql/functions/parameters/to_geoshape.asciidoc index 56df4f5138a27..8903aa1a472a3 100644 --- a/docs/reference/esql/functions/parameters/to_geoshape.asciidoc +++ b/docs/reference/esql/functions/parameters/to_geoshape.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `field`:: diff --git a/docs/reference/esql/functions/parameters/to_integer.asciidoc b/docs/reference/esql/functions/parameters/to_integer.asciidoc index 56df4f5138a27..8903aa1a472a3 100644 --- a/docs/reference/esql/functions/parameters/to_integer.asciidoc +++ b/docs/reference/esql/functions/parameters/to_integer.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `field`:: diff --git a/docs/reference/esql/functions/parameters/to_ip.asciidoc b/docs/reference/esql/functions/parameters/to_ip.asciidoc index 56df4f5138a27..8903aa1a472a3 100644 --- a/docs/reference/esql/functions/parameters/to_ip.asciidoc +++ b/docs/reference/esql/functions/parameters/to_ip.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `field`:: diff --git a/docs/reference/esql/functions/parameters/to_long.asciidoc b/docs/reference/esql/functions/parameters/to_long.asciidoc index 56df4f5138a27..8903aa1a472a3 100644 --- a/docs/reference/esql/functions/parameters/to_long.asciidoc +++ b/docs/reference/esql/functions/parameters/to_long.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `field`:: diff --git a/docs/reference/esql/functions/parameters/to_lower.asciidoc b/docs/reference/esql/functions/parameters/to_lower.asciidoc index 4f2e56949be24..e9e9436113786 100644 --- a/docs/reference/esql/functions/parameters/to_lower.asciidoc +++ b/docs/reference/esql/functions/parameters/to_lower.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `str`:: diff --git a/docs/reference/esql/functions/parameters/to_radians.asciidoc b/docs/reference/esql/functions/parameters/to_radians.asciidoc index 9faa6c1adebe2..91c56709d182a 100644 --- a/docs/reference/esql/functions/parameters/to_radians.asciidoc +++ b/docs/reference/esql/functions/parameters/to_radians.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `number`:: diff --git a/docs/reference/esql/functions/parameters/to_string.asciidoc b/docs/reference/esql/functions/parameters/to_string.asciidoc index 56df4f5138a27..8903aa1a472a3 100644 --- a/docs/reference/esql/functions/parameters/to_string.asciidoc +++ b/docs/reference/esql/functions/parameters/to_string.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `field`:: diff --git a/docs/reference/esql/functions/parameters/to_unsigned_long.asciidoc b/docs/reference/esql/functions/parameters/to_unsigned_long.asciidoc index 56df4f5138a27..8903aa1a472a3 100644 --- a/docs/reference/esql/functions/parameters/to_unsigned_long.asciidoc +++ b/docs/reference/esql/functions/parameters/to_unsigned_long.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `field`:: diff --git a/docs/reference/esql/functions/parameters/to_upper.asciidoc b/docs/reference/esql/functions/parameters/to_upper.asciidoc index 4f2e56949be24..e9e9436113786 100644 --- a/docs/reference/esql/functions/parameters/to_upper.asciidoc +++ b/docs/reference/esql/functions/parameters/to_upper.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `str`:: diff --git a/docs/reference/esql/functions/parameters/to_version.asciidoc b/docs/reference/esql/functions/parameters/to_version.asciidoc index 56df4f5138a27..8903aa1a472a3 100644 --- a/docs/reference/esql/functions/parameters/to_version.asciidoc +++ b/docs/reference/esql/functions/parameters/to_version.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `field`:: diff --git a/docs/reference/esql/functions/parameters/trim.asciidoc b/docs/reference/esql/functions/parameters/trim.asciidoc index 4c3a25283c403..5188a4fd5a1cd 100644 --- a/docs/reference/esql/functions/parameters/trim.asciidoc +++ b/docs/reference/esql/functions/parameters/trim.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `string`:: diff --git a/docs/reference/esql/functions/signature/cos.svg b/docs/reference/esql/functions/signature/cos.svg index ff0484a362aef..4f8ddafaec65b 100644 --- a/docs/reference/esql/functions/signature/cos.svg +++ b/docs/reference/esql/functions/signature/cos.svg @@ -1 +1 @@ -COS(number) \ No newline at end of file +COS(angle) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/cosh.svg b/docs/reference/esql/functions/signature/cosh.svg index 9b9eddd3cb808..11b14d922929a 100644 --- a/docs/reference/esql/functions/signature/cosh.svg +++ b/docs/reference/esql/functions/signature/cosh.svg @@ -1 +1 @@ -COSH(number) \ No newline at end of file +COSH(angle) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/sin.svg b/docs/reference/esql/functions/signature/sin.svg index 2c60f0580f8fb..c111a7611ac9e 100644 --- a/docs/reference/esql/functions/signature/sin.svg +++ b/docs/reference/esql/functions/signature/sin.svg @@ -1 +1 @@ -SIN(number) \ No newline at end of file +SIN(angle) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/sinh.svg b/docs/reference/esql/functions/signature/sinh.svg index 16e7ddb6b6534..0bb4ac31dee30 100644 --- a/docs/reference/esql/functions/signature/sinh.svg +++ b/docs/reference/esql/functions/signature/sinh.svg @@ -1 +1 @@ -SINH(number) \ No newline at end of file +SINH(angle) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/tan.svg b/docs/reference/esql/functions/signature/tan.svg index c8065b30586cc..f85929a58164b 100644 --- a/docs/reference/esql/functions/signature/tan.svg +++ b/docs/reference/esql/functions/signature/tan.svg @@ -1 +1 @@ -TAN(number) \ No newline at end of file +TAN(angle) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/tanh.svg b/docs/reference/esql/functions/signature/tanh.svg index c2edfe2d6942f..f7b968f8b30c4 100644 --- a/docs/reference/esql/functions/signature/tanh.svg +++ b/docs/reference/esql/functions/signature/tanh.svg @@ -1 +1 @@ -TANH(number) \ No newline at end of file +TANH(angle) \ No newline at end of file diff --git a/docs/reference/esql/functions/sin.asciidoc b/docs/reference/esql/functions/sin.asciidoc deleted file mode 100644 index 6034a695c6071..0000000000000 --- a/docs/reference/esql/functions/sin.asciidoc +++ /dev/null @@ -1,31 +0,0 @@ -[discrete] -[[esql-sin]] -=== `SIN` - -*Syntax* - -[.text-center] -image::esql/functions/signature/sin.svg[Embedded,opts=inline] - -*Parameters* - -`n`:: -Numeric expression. If `null`, the function returns `null`. - -*Description* - -{wikipedia}/Sine_and_cosine[Sine] trigonometric function. Input expected in -radians. - -include::types/sin.asciidoc[] - -*Example* - -[source.merge.styled,esql] ----- -include::{esql-specs}/floats.csv-spec[tag=sin] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/floats.csv-spec[tag=sin-result] -|=== diff --git a/docs/reference/esql/functions/sinh.asciidoc b/docs/reference/esql/functions/sinh.asciidoc deleted file mode 100644 index 0931b9a2b88e1..0000000000000 --- a/docs/reference/esql/functions/sinh.asciidoc +++ /dev/null @@ -1,30 +0,0 @@ -[discrete] -[[esql-sinh]] -=== `SINH` - -*Syntax* - -[.text-center] -image::esql/functions/signature/sinh.svg[Embedded,opts=inline] - -*Parameters* - -`n`:: -Numeric expression. If `null`, the function returns `null`. - -*Description* - -{wikipedia}/Hyperbolic_functions[Sine] hyperbolic function. - -include::types/sinh.asciidoc[] - -*Example* - -[source.merge.styled,esql] ----- -include::{esql-specs}/floats.csv-spec[tag=sinh] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/floats.csv-spec[tag=sinh-result] -|=== diff --git a/docs/reference/esql/functions/tan.asciidoc b/docs/reference/esql/functions/tan.asciidoc deleted file mode 100644 index 3b1c446806733..0000000000000 --- a/docs/reference/esql/functions/tan.asciidoc +++ /dev/null @@ -1,31 +0,0 @@ -[discrete] -[[esql-tan]] -=== `TAN` - -*Syntax* - -[.text-center] -image::esql/functions/signature/tan.svg[Embedded,opts=inline] - -*Parameters* - -`n`:: -Numeric expression. If `null`, the function returns `null`. - -*Description* - -{wikipedia}/Sine_and_cosine[Tangent] trigonometric function. Input expected in -radians. - -include::types/tan.asciidoc[] - -*Example* - -[source.merge.styled,esql] ----- -include::{esql-specs}/floats.csv-spec[tag=tan] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/floats.csv-spec[tag=tan-result] -|=== diff --git a/docs/reference/esql/functions/tanh.asciidoc b/docs/reference/esql/functions/tanh.asciidoc deleted file mode 100644 index 9b47c68c19cf1..0000000000000 --- a/docs/reference/esql/functions/tanh.asciidoc +++ /dev/null @@ -1,30 +0,0 @@ -[discrete] -[[esql-tanh]] -=== `TANH` - -*Syntax* - -[.text-center] -image::esql/functions/signature/tanh.svg[Embedded,opts=inline] - -*Parameters* - -`n`:: -Numeric expression. If `null`, the function returns `null`. - -*Description* - -{wikipedia}/Hyperbolic_functions[Tangent] hyperbolic function. - -include::types/tanh.asciidoc[] - -*Example* - -[source.merge.styled,esql] ----- -include::{esql-specs}/floats.csv-spec[tag=tanh] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/floats.csv-spec[tag=tanh-result] -|=== diff --git a/docs/reference/esql/functions/types/cos.asciidoc b/docs/reference/esql/functions/types/cos.asciidoc index 7cda278abdb56..d96a34b678531 100644 --- a/docs/reference/esql/functions/types/cos.asciidoc +++ b/docs/reference/esql/functions/types/cos.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -number | result +angle | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/cosh.asciidoc b/docs/reference/esql/functions/types/cosh.asciidoc index 7cda278abdb56..d96a34b678531 100644 --- a/docs/reference/esql/functions/types/cosh.asciidoc +++ b/docs/reference/esql/functions/types/cosh.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -number | result +angle | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/sin.asciidoc b/docs/reference/esql/functions/types/sin.asciidoc index 7cda278abdb56..d96a34b678531 100644 --- a/docs/reference/esql/functions/types/sin.asciidoc +++ b/docs/reference/esql/functions/types/sin.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -number | result +angle | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/sinh.asciidoc b/docs/reference/esql/functions/types/sinh.asciidoc index 7cda278abdb56..d96a34b678531 100644 --- a/docs/reference/esql/functions/types/sinh.asciidoc +++ b/docs/reference/esql/functions/types/sinh.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -number | result +angle | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/tan.asciidoc b/docs/reference/esql/functions/types/tan.asciidoc index 7cda278abdb56..d96a34b678531 100644 --- a/docs/reference/esql/functions/types/tan.asciidoc +++ b/docs/reference/esql/functions/types/tan.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -number | result +angle | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/tanh.asciidoc b/docs/reference/esql/functions/types/tanh.asciidoc index 7cda278abdb56..d96a34b678531 100644 --- a/docs/reference/esql/functions/types/tanh.asciidoc +++ b/docs/reference/esql/functions/types/tanh.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -number | result +angle | result double | double integer | double long | double diff --git a/x-pack/plugin/esql/build.gradle b/x-pack/plugin/esql/build.gradle index 3fdfa7835b036..668ecec0e393d 100644 --- a/x-pack/plugin/esql/build.gradle +++ b/x-pack/plugin/esql/build.gradle @@ -79,7 +79,7 @@ tasks.named("test").configure { into "${rootDir}/docs/reference/esql/functions" include '**/*.asciidoc', '**/*.svg' preserve { - include '/*.asciidoc', '**/*.asciidoc', '**/*.svg' + include '/*.asciidoc', '**/*.asciidoc', '**/*.svg', 'README.md' } } } else { diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec index 7d1617b208f34..9f9aeec7e2838 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -15,8 +15,8 @@ ceil |"double|integer|long|unsigned_long ceil(number:double| cidr_match |"boolean cidr_match(ip:ip, blockX...:keyword|text)" |[ip, blockX] |[ip, "keyword|text"] |["", "CIDR block to test the IP against."] |boolean | "Returns true if the provided IP is contained in one of the provided CIDR blocks." | [false, false] | true | false coalesce |"boolean|text|integer|keyword|long coalesce(first:boolean|text|integer|keyword|long, ?rest...:boolean|text|integer|keyword|long)" |first | "boolean|text|integer|keyword|long" | "Expression to evaluate" |"boolean|text|integer|keyword|long" | "Returns the first of its arguments that is not null. If all arguments are null, it returns `null`." | false | true | false concat |"keyword concat(string1:keyword|text, string2...:keyword|text)" |[string1, string2] |["keyword|text", "keyword|text"] |["", ""] |keyword | "Concatenates two or more strings." | [false, false] | true | false -cos |"double cos(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "An angle, in radians" |double | "Returns the trigonometric cosine of an angle" | false | false | false -cosh |"double cosh(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "The number who's hyperbolic cosine is to be returned" |double | "Returns the hyperbolic cosine of a number" | false | false | false +cos |"double cos(angle:double|integer|long|unsigned_long)" |angle |"double|integer|long|unsigned_long" | "An angle, in radians. If `null`, the function returns `null`." |double | "Returns the {wikipedia}/Sine_and_cosine[cosine] of an angle." | false | false | false +cosh |"double cosh(angle:double|integer|long|unsigned_long)" |angle |"double|integer|long|unsigned_long" | "An angle, in radians. If `null`, the function returns `null`." |double | "Returns the {wikipedia}/Hyperbolic_functions[hyperbolic cosine] of an angle." | false | false | false count |"long count(?field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" |field |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" | "Column or literal for which to count the number of values." |long | "Returns the total number (count) of input values." | true | false | true count_distinct |"long count_distinct(field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|version, ?precision:integer)" |[field, precision] |["boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|version, integer"] |["Column or literal for which to count the number of distinct values.", ""] |long | "Returns the approximate number of distinct values." | [false, true] | false | true date_diff |"integer date_diff(unit:keyword|text, startTimestamp:date, endTimestamp:date)"|[unit, startTimestamp, endTimestamp] |["keyword|text", "date", "date"] |["A valid date unit", "A string representing a start timestamp", "A string representing an end timestamp"] |integer | "Subtract 2 dates and return their difference in multiples of a unit specified in the 1st argument" | [false, false, false] | false | false @@ -59,8 +59,8 @@ replace |"keyword replace(string:keyword|text, regex:keyword|te right |"keyword right(string:keyword|text, length:integer)" |[string, length] |["keyword|text", "integer"] |["", ""] |keyword | "Return the substring that extracts length chars from the string starting from the right." | [false, false] | false | false round |"double round(number:double, ?decimals:integer)" |[number, decimals] |["double", "integer"] |["The numeric value to round", "The number of decimal places to round to. Defaults to 0."] |double | "Rounds a number to the closest number with the specified number of digits." | [false, true] | false | false rtrim |"keyword|text rtrim(string:keyword|text)" |string |"keyword|text" | "" |"keyword|text" |Removes trailing whitespaces from a string.| false | false | false -sin |"double sin(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" |"An angle, in radians" |double |Returns the trigonometric sine of an angle | false | false | false -sinh |"double sinh(number:double|integer|long|unsigned_long)"|number |"double|integer|long|unsigned_long" |"The number to return the hyperbolic sine of" |double | "Returns the hyperbolic sine of a number" | false | false | false +sin |"double sin(angle:double|integer|long|unsigned_long)" |angle |"double|integer|long|unsigned_long" | "An angle, in radians. If `null`, the function returns `null`." | double | "Returns ths {wikipedia}/Sine_and_cosine[Sine] trigonometric function of an angle." | false | false | false +sinh |"double sinh(angle:double|integer|long|unsigned_long)" |angle |"double|integer|long|unsigned_long" | "An angle, in radians. If `null`, the function returns `null`." | double | "Returns the {wikipedia}/Hyperbolic_functions[hyperbolic sine] of an angle." | false | false | false split |"keyword split(string:keyword|text, delim:keyword|text)" |[string, delim] |["keyword|text", "keyword|text"] |["", ""] |keyword | "Split a single valued string into multiple strings." | [false, false] | false | false sqrt |"double sqrt(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "" |double | "Returns the square root of a number." | false | false | false st_centroid |"geo_point|cartesian_point st_centroid(field:geo_point|cartesian_point)" |field |"geo_point|cartesian_point" | "" |"geo_point|cartesian_point" | "The centroid of a spatial field." | false | false | true @@ -70,8 +70,8 @@ st_y |"double st_y(point:geo_point|cartesian_point)" starts_with |"boolean starts_with(str:keyword|text, prefix:keyword|text)" |[str, prefix] |["keyword|text", "keyword|text"] |["", ""] |boolean | "Returns a boolean that indicates whether a keyword string starts with another string" | [false, false] | false | false substring |"keyword substring(string:keyword|text, start:integer, ?length:integer)" |[string, start, length] |["keyword|text", "integer", "integer"] |["", "", ""] |keyword | "Returns a substring of a string, specified by a start position and an optional length" | [false, false, true]| false | false sum |"long sum(number:double|integer|long)" |number |"double|integer|long" | "" |long | "The sum of a numeric field." | false | false | true -tan |"double tan(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "An angle, in radians" |double | "Returns the trigonometric tangent of an angle" | false | false | false -tanh |"double tanh(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "The number to return the hyperbolic tangent of" |double | "Returns the hyperbolic tangent of a number" | false | false | false +tan |"double tan(angle:double|integer|long|unsigned_long)" |angle |"double|integer|long|unsigned_long" | "An angle, in radians. If `null`, the function returns `null`." |double | "Returns the {wikipedia}/Sine_and_cosine[Tangent] trigonometric function of an angle." | false | false | false +tanh |"double tanh(angle:double|integer|long|unsigned_long)" |angle |"double|integer|long|unsigned_long" | "An angle, in radians. If `null`, the function returns `null`." |double | "Returns the {wikipedia}/Hyperbolic_functions[Tangent] hyperbolic function of an angle." | false | false | false tau |double tau() | null | null | null |double | "The ratio of a circle’s circumference to its radius." | null | false | false to_bool |"boolean to_bool(field:boolean|keyword|text|double|long|unsigned_long|integer)" |field |"boolean|keyword|text|double|long|unsigned_long|integer" | |boolean | "Converts an input value to a boolean value." |false |false | false to_boolean |"boolean to_boolean(field:boolean|keyword|text|double|long|unsigned_long|integer)" |field |"boolean|keyword|text|double|long|unsigned_long|integer" | |boolean | "Converts an input value to a boolean value." |false |false | false @@ -119,8 +119,8 @@ synopsis:keyword "boolean cidr_match(ip:ip, blockX...:keyword|text)" "boolean|text|integer|keyword|long coalesce(first:boolean|text|integer|keyword|long, ?rest...:boolean|text|integer|keyword|long)" "keyword concat(string1:keyword|text, string2...:keyword|text)" -"double cos(number:double|integer|long|unsigned_long)" -"double cosh(number:double|integer|long|unsigned_long)" +"double cos(angle:double|integer|long|unsigned_long)" +"double cosh(angle:double|integer|long|unsigned_long)" "long count(?field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" "long count_distinct(field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|version, ?precision:integer)" "integer date_diff(unit:keyword|text, startTimestamp:date, endTimestamp:date)" @@ -163,8 +163,8 @@ double pi() "keyword right(string:keyword|text, length:integer)" "double round(number:double, ?decimals:integer)" "keyword|text rtrim(string:keyword|text)" -"double sin(number:double|integer|long|unsigned_long)" -"double sinh(number:double|integer|long|unsigned_long)" +"double sin(angle:double|integer|long|unsigned_long)" +"double sinh(angle:double|integer|long|unsigned_long)" "keyword split(string:keyword|text, delim:keyword|text)" "double sqrt(number:double|integer|long|unsigned_long)" "geo_point|cartesian_point st_centroid(field:geo_point|cartesian_point)" @@ -174,8 +174,8 @@ double pi() "boolean starts_with(str:keyword|text, prefix:keyword|text)" "keyword substring(string:keyword|text, start:integer, ?length:integer)" "long sum(number:double|integer|long)" -"double tan(number:double|integer|long|unsigned_long)" -"double tanh(number:double|integer|long|unsigned_long)" +"double tan(angle:double|integer|long|unsigned_long)" +"double tanh(angle:double|integer|long|unsigned_long)" double tau() "boolean to_bool(field:boolean|keyword|text|double|long|unsigned_long|integer)" "boolean to_boolean(field:boolean|keyword|text|double|long|unsigned_long|integer)" @@ -212,9 +212,9 @@ META FUNCTIONS | WHERE STARTS_WITH(name, "sin") ; - name:keyword | synopsis:keyword | argNames:keyword | argTypes:keyword | argDescriptions:keyword | returnType:keyword | description:keyword | optionalArgs:boolean | variadic:boolean | isAggregation:boolean -sin | "double sin(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "An angle, in radians" | double | "Returns the trigonometric sine of an angle" | false | false | false -sinh | "double sinh(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "The number to return the hyperbolic sine of" | "double" | "Returns the hyperbolic sine of a number" | false | false | false +name:keyword | synopsis:keyword |argNames:keyword | argTypes:keyword | argDescriptions:keyword | returnType:keyword | description:keyword | optionalArgs:boolean | variadic:boolean | isAggregation:boolean +sin |"double sin(angle:double|integer|long|unsigned_long)" |angle |"double|integer|long|unsigned_long" | "An angle, in radians. If `null`, the function returns `null`." | double | "Returns ths {wikipedia}/Sine_and_cosine[Sine] trigonometric function of an angle." | false | false | false +sinh |"double sinh(angle:double|integer|long|unsigned_long)" |angle |"double|integer|long|unsigned_long" | "An angle, in radians. If `null`, the function returns `null`." | double | "Returns the {wikipedia}/Hyperbolic_functions[hyperbolic sine] of an angle." | false | false | false ; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cos.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cos.java index e928f1ae2713e..d327956720840 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cos.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cos.java @@ -9,6 +9,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; @@ -21,12 +22,20 @@ * Cosine trigonometric function. */ public class Cos extends AbstractTrigonometricFunction { - @FunctionInfo(returnType = "double", description = "Returns the trigonometric cosine of an angle") + @FunctionInfo( + returnType = "double", + description = "Returns the {wikipedia}/Sine_and_cosine[cosine] of an angle.", + examples = @Example(file = "floats", tag = "cos") + ) public Cos( Source source, - @Param(name = "number", type = { "double", "integer", "long", "unsigned_long" }, description = "An angle, in radians") Expression n + @Param( + name = "angle", + type = { "double", "integer", "long", "unsigned_long" }, + description = "An angle, in radians. If `null`, the function returns `null`." + ) Expression angle ) { - super(source, n); + super(source, angle); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cosh.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cosh.java index 55250a3ac720f..93170ec4d7540 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cosh.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cosh.java @@ -9,6 +9,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; @@ -21,16 +22,20 @@ * Cosine hyperbolic function. */ public class Cosh extends AbstractTrigonometricFunction { - @FunctionInfo(returnType = "double", description = "Returns the hyperbolic cosine of a number") + @FunctionInfo( + returnType = "double", + description = "Returns the {wikipedia}/Hyperbolic_functions[hyperbolic cosine] of an angle.", + examples = @Example(file = "floats", tag = "cosh") + ) public Cosh( Source source, @Param( - name = "number", + name = "angle", type = { "double", "integer", "long", "unsigned_long" }, - description = "The number who's hyperbolic cosine is to be returned" - ) Expression n + description = "An angle, in radians. If `null`, the function returns `null`." + ) Expression angle ) { - super(source, n); + super(source, angle); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java index d8b36a3d38856..11cc7bccc2288 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java @@ -9,6 +9,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; @@ -22,12 +23,20 @@ */ public class Sin extends AbstractTrigonometricFunction { - @FunctionInfo(returnType = "double", description = "Returns the trigonometric sine of an angle") + @FunctionInfo( + returnType = "double", + description = "Returns ths {wikipedia}/Sine_and_cosine[Sine] trigonometric function of an angle.", + examples = @Example(file = "floats", tag = "sin") + ) public Sin( Source source, - @Param(name = "number", type = { "double", "integer", "long", "unsigned_long" }, description = "An angle, in radians") Expression n + @Param( + name = "angle", + type = { "double", "integer", "long", "unsigned_long" }, + description = "An angle, in radians. If `null`, the function returns `null`." + ) Expression angle ) { - super(source, n); + super(source, angle); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sinh.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sinh.java index 0c46002f56af6..142f15c8bfbe0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sinh.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sinh.java @@ -9,6 +9,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; @@ -21,16 +22,20 @@ * Sine hyperbolic function. */ public class Sinh extends AbstractTrigonometricFunction { - @FunctionInfo(returnType = "double", description = "Returns the hyperbolic sine of a number") + @FunctionInfo( + returnType = "double", + description = "Returns the {wikipedia}/Hyperbolic_functions[hyperbolic sine] of an angle.", + examples = @Example(file = "floats", tag = "sinh") + ) public Sinh( Source source, @Param( - name = "number", + name = "angle", type = { "double", "integer", "long", "unsigned_long" }, - description = "The number to return the hyperbolic sine of" - ) Expression n + description = "An angle, in radians. If `null`, the function returns `null`." + ) Expression angle ) { - super(source, n); + super(source, angle); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tan.java index 002de2ddfc277..3752f986894ed 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tan.java @@ -9,6 +9,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; @@ -21,12 +22,20 @@ * Tangent trigonometric function. */ public class Tan extends AbstractTrigonometricFunction { - @FunctionInfo(returnType = "double", description = "Returns the trigonometric tangent of an angle") + @FunctionInfo( + returnType = "double", + description = "Returns the {wikipedia}/Sine_and_cosine[Tangent] trigonometric function of an angle.", + examples = @Example(file = "floats", tag = "tan") + ) public Tan( Source source, - @Param(name = "number", type = { "double", "integer", "long", "unsigned_long" }, description = "An angle, in radians") Expression n + @Param( + name = "angle", + type = { "double", "integer", "long", "unsigned_long" }, + description = "An angle, in radians. If `null`, the function returns `null`." + ) Expression angle ) { - super(source, n); + super(source, angle); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tanh.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tanh.java index 932677ef0b26d..726a269ebedc5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tanh.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tanh.java @@ -9,6 +9,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; @@ -21,16 +22,20 @@ * Tangent hyperbolic function. */ public class Tanh extends AbstractTrigonometricFunction { - @FunctionInfo(returnType = "double", description = "Returns the hyperbolic tangent of a number") + @FunctionInfo( + returnType = "double", + description = "Returns the {wikipedia}/Hyperbolic_functions[Tangent] hyperbolic function of an angle.", + examples = @Example(file = "floats", tag = "tanh") + ) public Tanh( Source source, @Param( - name = "number", + name = "angle", type = { "double", "integer", "long", "unsigned_long" }, - description = "The number to return the hyperbolic tangent of" - ) Expression n + description = "An angle, in radians. If `null`, the function returns `null`." + ) Expression angle ) { - super(source, n); + super(source, angle); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 78c1c57e07782..56869de1b87ca 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -1149,6 +1149,7 @@ private static void renderTypes(List argNames) throws IOException { private static void renderParametersList(List argNames, List argDescriptions) throws IOException { StringBuilder builder = new StringBuilder(); + builder.append(DOCS_WARNING); builder.append("*Parameters*\n"); for (int a = 0; a < argNames.size(); a++) { builder.append("\n`").append(argNames.get(a)).append("`::\n").append(argDescriptions.get(a)).append('\n'); From adc42ebda2b8a908f2b4d092405ce4d7782b2f4b Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Fri, 29 Mar 2024 11:20:49 -0700 Subject: [PATCH 008/264] Remove row method from Vector (#106922) We previously introduced the row method for TopN. However, TopN no longer uses this method. We should remove it to prevent potential misuse. --- .../java/org/elasticsearch/compute/data/AbstractVector.java | 5 ----- .../src/main/java/org/elasticsearch/compute/data/Vector.java | 3 --- 2 files changed, 8 deletions(-) diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVector.java index 1e1f8bbf2f8df..240a16c6a28c3 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVector.java @@ -24,11 +24,6 @@ public final int getPositionCount() { return positionCount; } - @Override - public final Vector getRow(int position) { - return filter(position); - } - @Override public BlockFactory blockFactory() { return blockFactory; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java index 757e2a5b22145..9461a3e066df3 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java @@ -28,9 +28,6 @@ public interface Vector extends Accountable, RefCounted, Releasable { */ int getPositionCount(); - // TODO: improve implementation not to waste as much space - Vector getRow(int position); - /** * Creates a new vector that only exposes the positions provided. Materialization of the selected positions is avoided. * @param positions the positions to retain From 2c1e45a0d6e4418b1e643ca84abaa6dd668c8626 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Fri, 29 Mar 2024 12:00:49 -0700 Subject: [PATCH 009/264] Fix downsample action request serialization (#106919) This issue exists in 8.10. Closes #106917 --- docs/changelog/106919.yaml | 6 ++++ .../action/downsample/DownsampleAction.java | 16 +++++----- .../test/downsample/10_basic.yml | 31 +++++-------------- 3 files changed, 22 insertions(+), 31 deletions(-) create mode 100644 docs/changelog/106919.yaml diff --git a/docs/changelog/106919.yaml b/docs/changelog/106919.yaml new file mode 100644 index 0000000000000..d8288095590de --- /dev/null +++ b/docs/changelog/106919.yaml @@ -0,0 +1,6 @@ +pr: 106919 +summary: Fix downsample action request serialization +area: Downsampling +type: bug +issues: + - 106917 diff --git a/server/src/main/java/org/elasticsearch/action/downsample/DownsampleAction.java b/server/src/main/java/org/elasticsearch/action/downsample/DownsampleAction.java index 08d315fe39ce5..e8e299c58d2eb 100644 --- a/server/src/main/java/org/elasticsearch/action/downsample/DownsampleAction.java +++ b/server/src/main/java/org/elasticsearch/action/downsample/DownsampleAction.java @@ -62,9 +62,11 @@ public Request(StreamInput in) throws IOException { super(in); sourceIndex = in.readString(); targetIndex = in.readString(); - waitTimeout = in.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X) - ? TimeValue.parseTimeValue(in.readString(), "timeout") - : DEFAULT_WAIT_TIMEOUT; + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { + waitTimeout = TimeValue.parseTimeValue(in.readString(), "timeout"); + } else { + waitTimeout = DEFAULT_WAIT_TIMEOUT; + } downsampleConfig = new DownsampleConfig(in); } @@ -88,11 +90,9 @@ public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(sourceIndex); out.writeString(targetIndex); - out.writeString( - out.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X) - ? waitTimeout.getStringRep() - : DEFAULT_WAIT_TIMEOUT.getStringRep() - ); + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { + out.writeString(waitTimeout.getStringRep()); + } downsampleConfig.writeTo(out); } diff --git a/x-pack/plugin/downsample/qa/mixed-cluster/src/yamlRestTest/resources/rest-api-spec/test/downsample/10_basic.yml b/x-pack/plugin/downsample/qa/mixed-cluster/src/yamlRestTest/resources/rest-api-spec/test/downsample/10_basic.yml index 265f97e73c234..95c69efa5b36d 100644 --- a/x-pack/plugin/downsample/qa/mixed-cluster/src/yamlRestTest/resources/rest-api-spec/test/downsample/10_basic.yml +++ b/x-pack/plugin/downsample/qa/mixed-cluster/src/yamlRestTest/resources/rest-api-spec/test/downsample/10_basic.yml @@ -46,17 +46,9 @@ setup: multi-counter: type: long time_series_metric: counter - scaled-counter: - type: scaled_float - scaling_factor: 100 - time_series_metric: counter multi-gauge: type: integer time_series_metric: gauge - scaled-gauge: - type: scaled_float - scaling_factor: 100 - time_series_metric: gauge network: properties: tx: @@ -71,21 +63,21 @@ setup: index: test body: - '{"index": {}}' - - '{"@timestamp": "2021-04-28T18:50:04.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.1", "multi-counter" : [10, 11, 12], "scaled-counter": 10.0, "multi-gauge": [100, 200, 150], "scaled-gauge": 100.0, "network": {"tx": 2001818691, "rx": 802133794}, "created_at": "2021-04-28T19:34:00.000Z", "running": false, "number_of_containers": 2, "tags": ["backend", "prod"], "values": [2, 3, 6]}}}' + - '{"@timestamp": "2021-04-28T18:50:04.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.1", "multi-counter" : [10, 11, 12], "multi-gauge": [100, 200, 150], "network": {"tx": 2001818691, "rx": 802133794}, "created_at": "2021-04-28T19:34:00.000Z", "running": false, "number_of_containers": 2, "tags": ["backend", "prod"], "values": [2, 3, 6]}}}' - '{"index": {}}' - - '{"@timestamp": "2021-04-28T18:50:24.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.26", "multi-counter" : [21, 22, 23], "scaled-counter": 20.0, "multi-gauge": [90, 91, 95], "scaled-gauge": 90.0, "network": {"tx": 2005177954, "rx": 801479970}, "created_at": "2021-04-28T19:35:00.000Z", "running": true, "number_of_containers": 2, "tags": ["backend", "prod", "us-west1"], "values": [1, 1, 3]}}}' + - '{"@timestamp": "2021-04-28T18:50:24.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.26", "multi-counter" : [21, 22, 23], "multi-gauge": [90, 91, 95], "network": {"tx": 2005177954, "rx": 801479970}, "created_at": "2021-04-28T19:35:00.000Z", "running": true, "number_of_containers": 2, "tags": ["backend", "prod", "us-west1"], "values": [1, 1, 3]}}}' - '{"index": {}}' - - '{"@timestamp": "2021-04-28T20:50:44.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.41", "multi-counter" : [1, 5, 10], "scaled-counter": 1.0, "multi-gauge": [103, 110, 109], "scaled-gauge": 104.0, "network": {"tx": 2006223737, "rx": 802337279}, "created_at": "2021-04-28T19:36:00.000Z", "running": true, "number_of_containers": 2, "tags": ["backend", "prod", "us-west2"], "values": [4, 1, 2]}}}' + - '{"@timestamp": "2021-04-28T20:50:44.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.41", "multi-counter" : [1, 5, 10], "multi-gauge": [103, 110, 109], "network": {"tx": 2006223737, "rx": 802337279}, "created_at": "2021-04-28T19:36:00.000Z", "running": true, "number_of_containers": 2, "tags": ["backend", "prod", "us-west2"], "values": [4, 1, 2]}}}' - '{"index": {}}' - - '{"@timestamp": "2021-04-28T20:51:04.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.22", "multi-counter" : [101, 102, 105], "scaled-counter": 100.0, "multi-gauge": [100, 100, 100], "scaled-gauge": 102.0, "network": {"tx": 2012916202, "rx": 803685721}, "created_at": "2021-04-28T19:37:00.000Z", "running": true, "number_of_containers": 2, "tags": ["backend", "prod"], "values": [2, 3, 1]}}}' + - '{"@timestamp": "2021-04-28T20:51:04.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.22", "multi-counter" : [101, 102, 105], "multi-gauge": [100, 100, 100], "network": {"tx": 2012916202, "rx": 803685721}, "created_at": "2021-04-28T19:37:00.000Z", "running": true, "number_of_containers": 2, "tags": ["backend", "prod"], "values": [2, 3, 1]}}}' - '{"index": {}}' - - '{"@timestamp": "2021-04-28T18:50:03.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.33", "multi-counter" : [7, 11, 44], "scaled-counter": 7.0, "multi-gauge": [100, 100, 102], "scaled-gauge": 100.0, "network": {"tx": 1434521831, "rx": 530575198}, "created_at": "2021-04-28T19:42:00.000Z", "running": false, "number_of_containers": 1, "tags": ["backend", "test"], "values": [2, 3, 4]}}}' + - '{"@timestamp": "2021-04-28T18:50:03.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.33", "multi-counter" : [7, 11, 44], "multi-gauge": [100, 100, 102], "network": {"tx": 1434521831, "rx": 530575198}, "created_at": "2021-04-28T19:42:00.000Z", "running": false, "number_of_containers": 1, "tags": ["backend", "test"], "values": [2, 3, 4]}}}' - '{"index": {}}' - - '{"@timestamp": "2021-04-28T18:50:23.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.56", "multi-counter" : [0, 0, 1], "scaled-counter": 0.0, "multi-gauge": [101, 102, 102], "scaled-gauge": 101.0, "network": {"tx": 1434577921, "rx": 530600088}, "created_at": "2021-04-28T19:43:00.000Z", "running": false, "number_of_containers": 1, "tags": ["backend", "test", "us-west2"], "values": [2, 1, 1]}}}' + - '{"@timestamp": "2021-04-28T18:50:23.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.56", "multi-counter" : [0, 0, 1], "multi-gauge": [101, 102, 102], "network": {"tx": 1434577921, "rx": 530600088}, "created_at": "2021-04-28T19:43:00.000Z", "running": false, "number_of_containers": 1, "tags": ["backend", "test", "us-west2"], "values": [2, 1, 1]}}}' - '{"index": {}}' - - '{"@timestamp": "2021-04-28T19:50:53.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.37", "multi-counter" : [1000, 1001, 1002], "scaled-counter": 1000.0, "multi-gauge": [99, 100, 110], "scaled-gauge": 99.0, "network": {"tx": 1434587694, "rx": 530604797}, "created_at": "2021-04-28T19:44:00.000Z", "running": true, "number_of_containers": 1, "tags": ["backend", "test", "us-west1"], "values": [4, 5, 2]}}}' + - '{"@timestamp": "2021-04-28T19:50:53.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.37", "multi-counter" : [1000, 1001, 1002], "multi-gauge": [99, 100, 110], "network": {"tx": 1434587694, "rx": 530604797}, "created_at": "2021-04-28T19:44:00.000Z", "running": true, "number_of_containers": 1, "tags": ["backend", "test", "us-west1"], "values": [4, 5, 2]}}}' - '{"index": {}}' - - '{"@timestamp": "2021-04-28T19:51:03.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.120", "multi-counter" : [76, 77, 78], "scaled-counter": 70.0, "multi-gauge": [95, 98, 100], "scaled-gauge": 95.0, "network": {"tx": 1434595272, "rx": 530605511}, "created_at": "2021-04-28T19:45:00.000Z", "running": true, "number_of_containers": 1, "tags": ["backend", "test", "us-west1"], "values": [3, 2, 1]}}}' + - '{"@timestamp": "2021-04-28T19:51:03.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.120", "multi-counter" : [76, 77, 78], "multi-gauge": [95, 98, 100], "network": {"tx": 1434595272, "rx": 530605511}, "created_at": "2021-04-28T19:45:00.000Z", "running": true, "number_of_containers": 1, "tags": ["backend", "test", "us-west1"], "values": [3, 2, 1]}}}' - do: indices.put_settings: @@ -141,12 +133,5 @@ setup: - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.multi-gauge.time_series_metric: gauge } - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.multi-counter.type: long } - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.multi-counter.time_series_metric: counter } - - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.scaled-counter.type: scaled_float } - - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.scaled-counter.scaling_factor: 100 } - - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.scaled-counter.time_series_metric: counter } - - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.scaled-gauge.type: aggregate_metric_double } - - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.scaled-gauge.metrics: [ "min", "max", "sum", "value_count" ] } - - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.scaled-gauge.default_metric: max } - - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.scaled-gauge.time_series_metric: gauge } - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.uid.type: keyword } - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.uid.time_series_dimension: true } From cf9a333d3a4a470af4a219df17762fd611757a9f Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Fri, 29 Mar 2024 12:44:31 -0700 Subject: [PATCH 010/264] Remove unused code from example plugin (#106928) --- .../customsuggester/CustomSuggestion.java | 50 +------------------ 1 file changed, 2 insertions(+), 48 deletions(-) diff --git a/plugins/examples/custom-suggester/src/main/java/org/elasticsearch/example/customsuggester/CustomSuggestion.java b/plugins/examples/custom-suggester/src/main/java/org/elasticsearch/example/customsuggester/CustomSuggestion.java index 402c4c6ef7920..afffd8266c11e 100644 --- a/plugins/examples/custom-suggester/src/main/java/org/elasticsearch/example/customsuggester/CustomSuggestion.java +++ b/plugins/examples/custom-suggester/src/main/java/org/elasticsearch/example/customsuggester/CustomSuggestion.java @@ -8,20 +8,15 @@ package org.elasticsearch.example.customsuggester; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.text.Text; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.search.suggest.Suggest; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - public class CustomSuggestion extends Suggest.Suggestion { public static final ParseField DUMMY = new ParseField("dummy"); @@ -64,26 +59,8 @@ protected Entry newEntry(StreamInput in) throws IOException { return new Entry(in); } - public static CustomSuggestion fromXContent(XContentParser parser, String name) throws IOException { - CustomSuggestion suggestion = new CustomSuggestion(name, -1, null); - parseEntries(parser, suggestion, Entry::fromXContent); - return suggestion; - } - public static class Entry extends Suggest.Suggestion.Entry { - private static final ObjectParser PARSER = new ObjectParser<>("CustomSuggestionEntryParser", true, Entry::new); - - static { - declareCommonFields(PARSER); - PARSER.declareString((entry, dummy) -> entry.dummy = dummy, DUMMY); - /* - * The use of a lambda expression instead of the method reference Entry::addOptions is a workaround for a JDK 14 compiler bug. - * The bug is: https://bugs.java.com/bugdatabase/view_bug.do?bug_id=JDK-8242214 - */ - PARSER.declareObjectArray((e, o) -> e.addOptions(o), (p, c) -> Option.fromXContent(p), new ParseField(OPTIONS)); - } - private String dummy; public Entry() {} @@ -131,27 +108,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - public static Entry fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - public static class Option extends Suggest.Suggestion.Entry.Option { - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "CustomSuggestionObjectParser", true, - args -> { - Text text = new Text((String) args[0]); - float score = (float) args[1]; - String dummy = (String) args[2]; - return new Option(text, score, dummy); - }); - - static { - PARSER.declareString(constructorArg(), TEXT); - PARSER.declareFloat(constructorArg(), SCORE); - PARSER.declareString(constructorArg(), DUMMY); - } - private String dummy; public Option(Text text, float score, String dummy) { @@ -192,10 +150,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(DUMMY.getPreferredName(), dummy); return builder; } - - public static Option fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } } } } From 6b419c13d318a14d33fdfdc9f209654db4035bcc Mon Sep 17 00:00:00 2001 From: Dianna Hohensee Date: Fri, 29 Mar 2024 15:53:16 -0400 Subject: [PATCH 011/264] TransportNodesAction supports async node response (#106733) Gives children classes of TransportNodesAction the option to access to the request listener so as to respond asynchronously to incoming node requests. Relates ES-6685 --- .../support/nodes/TransportNodesAction.java | 28 +++++++++++++------ .../cluster/routing/ShardRouting.java | 2 ++ 2 files changed, 22 insertions(+), 8 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java b/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java index 48036239793c0..daf3334dcaf65 100644 --- a/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java @@ -15,6 +15,7 @@ import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.CancellableFanOut; +import org.elasticsearch.action.support.ChannelActionListener; import org.elasticsearch.action.support.ThreadedActionListener; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.cluster.ClusterState; @@ -216,11 +217,24 @@ protected void newResponseAsync( protected abstract NodeResponse newNodeResponse(StreamInput in, DiscoveryNode node) throws IOException; + /** + * Implements the request recipient logic. + * If access to the request listener is needed, override {@link #nodeOperationAsync(TransportRequest, Task, ActionListener)}. + */ protected abstract NodeResponse nodeOperation(NodeRequest request, Task task); /** - * resolve node ids to concrete nodes of the incoming request - **/ + * This method can be overridden if a subclass needs to access to a listener in order to asynchronously respond to the node request. + * The default implementation is to fall through to {@link #nodeOperation}. + */ + protected void nodeOperationAsync(NodeRequest request, Task task, ActionListener listener) { + ActionListener.respondAndRelease(listener, nodeOperation(request, task)); + } + + /** + * Resolves node ids to concrete nodes of the incoming request. + * NB: if the request's nodeIds() returns nothing, then the request will be sent to ALL known nodes in the cluster. + */ protected void resolveRequest(NodesRequest request, ClusterState clusterState) { assert request.concreteNodes() == null : "request concreteNodes shouldn't be set"; String[] nodesIds = clusterState.nodes().resolveNodes(request.nodesIds()); @@ -230,12 +244,10 @@ protected void resolveRequest(NodesRequest request, ClusterState clusterState) { class NodeTransportHandler implements TransportRequestHandler { @Override public void messageReceived(NodeRequest request, TransportChannel channel, Task task) throws Exception { - final var nodeResponse = nodeOperation(request, task); - try { - channel.sendResponse(nodeResponse); - } finally { - nodeResponse.decRef(); - } + ActionListener.run( + new ChannelActionListener(channel), + channelListener -> nodeOperationAsync(request, task, channelListener) + ); } } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java b/server/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java index cd4a929052a62..95882e26773e5 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java @@ -33,6 +33,8 @@ /** * {@link ShardRouting} immutably encapsulates information about shard * indexRoutings like id, state, version, etc. + * + * Information about a particular shard instance. */ public final class ShardRouting implements Writeable, ToXContentObject { From f0fa7158868a86c65df540b15e2c081394ff77e2 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Fri, 29 Mar 2024 22:21:54 +0100 Subject: [PATCH 012/264] Do not retain whitelist statically in painless plugin (#106913) We only need this single item list as well as the whitelist map once when setting up the script engine. Not holding on to them statically saves about 1.4M of heap. => don't hold on to the list statically => null out the map that we need across method calls (and fix a potential concurrency issue with the existing solution ...). --- .../painless/PainlessPlugin.java | 124 ++++++++++-------- .../elasticsearch/painless/AliasTests.java | 2 +- .../painless/AugmentationTests.java | 2 +- .../painless/BaseClassTests.java | 54 ++++---- .../painless/BasicStatementTests.java | 2 +- .../elasticsearch/painless/BindingsTests.java | 2 +- .../elasticsearch/painless/DebugTests.java | 2 +- .../org/elasticsearch/painless/Debugger.java | 6 +- .../painless/DefBootstrapTests.java | 4 +- .../painless/DynamicTypeTests.java | 2 +- .../org/elasticsearch/painless/EmitTests.java | 2 +- .../elasticsearch/painless/FactoryTests.java | 16 +-- .../painless/NeedsScoreTests.java | 3 +- .../painless/ScriptTestCase.java | 5 +- .../ScriptedMetricAggContextsTests.java | 8 +- .../painless/SimilarityScriptTests.java | 4 +- .../org/elasticsearch/painless/ThisTests.java | 2 +- 17 files changed, 129 insertions(+), 111 deletions(-) diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java index 068821793e44c..c37fe8866fec8 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; +import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.painless.action.PainlessContextAction; import org.elasticsearch.painless.action.PainlessExecuteAction; @@ -52,71 +53,50 @@ */ public final class PainlessPlugin extends Plugin implements ScriptPlugin, ExtensiblePlugin, ActionPlugin { - private static final Map, List> whitelists; - private static final String[] BASE_WHITELIST_FILES = new String[] { - "org.elasticsearch.txt", - "org.elasticsearch.net.txt", - "org.elasticsearch.script.fields.txt", - "java.lang.txt", - "java.math.txt", - "java.text.txt", - "java.time.txt", - "java.time.chrono.txt", - "java.time.format.txt", - "java.time.temporal.txt", - "java.time.zone.txt", - "java.util.txt", - "java.util.function.txt", - "java.util.regex.txt", - "java.util.stream.txt", - "java.nio.txt" }; - public static final List BASE_WHITELISTS = Collections.singletonList( - WhitelistLoader.loadFromResourceFiles(PainlessPlugin.class, WhitelistAnnotationParser.BASE_ANNOTATION_PARSERS, BASE_WHITELIST_FILES) - ); - - /* - * Contexts from Core that need custom whitelists can add them to the map below. - * Whitelist resources should be added as appropriately named, separate files - * under Painless' resources - */ - static { - whitelists = new HashMap<>(); + private volatile Map, List> whitelists; - for (ScriptContext context : ScriptModule.CORE_CONTEXTS.values()) { - List contextWhitelists = new ArrayList<>(); - if (PainlessPlugin.class.getResource("org.elasticsearch.script." + context.name.replace('-', '_') + ".txt") != null) { - contextWhitelists.add( - WhitelistLoader.loadFromResourceFiles( - PainlessPlugin.class, - "org.elasticsearch.script." + context.name.replace('-', '_') + ".txt" - ) - ); - } - - whitelists.put(context, contextWhitelists); - } + private final SetOnce painlessScriptEngine = new SetOnce<>(); - List testWhitelists = new ArrayList<>(); - for (ScriptContext context : ScriptModule.CORE_CONTEXTS.values()) { - if (ScriptModule.RUNTIME_FIELDS_CONTEXTS.contains(context) == false) { - testWhitelists.addAll(whitelists.get(context)); - } - } - testWhitelists.add(WhitelistLoader.loadFromResourceFiles(PainlessPlugin.class, "org.elasticsearch.json.txt")); - whitelists.put(PainlessTestScript.CONTEXT, testWhitelists); + public static List baseWhiteList() { + return List.of( + WhitelistLoader.loadFromResourceFiles( + PainlessPlugin.class, + WhitelistAnnotationParser.BASE_ANNOTATION_PARSERS, + "org.elasticsearch.txt", + "org.elasticsearch.net.txt", + "org.elasticsearch.script.fields.txt", + "java.lang.txt", + "java.math.txt", + "java.text.txt", + "java.time.txt", + "java.time.chrono.txt", + "java.time.format.txt", + "java.time.temporal.txt", + "java.time.zone.txt", + "java.util.txt", + "java.util.function.txt", + "java.util.regex.txt", + "java.util.stream.txt", + "java.nio.txt" + ) + ); } - private final SetOnce painlessScriptEngine = new SetOnce<>(); - @Override public ScriptEngine getScriptEngine(Settings settings, Collection> contexts) { + final var wl = whitelists; + whitelists = null; + assert wl != null; Map, List> contextsWithWhitelists = new HashMap<>(); + final List baseWhiteList = baseWhiteList(); for (ScriptContext context : contexts) { // we might have a context that only uses the base whitelists, so would not have been filled in by reloadSPI - List mergedWhitelists = new ArrayList<>(BASE_WHITELISTS); - List contextWhitelists = whitelists.get(context); - if (contextWhitelists != null) { - mergedWhitelists.addAll(contextWhitelists); + List contextWhitelists = wl.get(context); + final List mergedWhitelists; + if (contextWhitelists != null && contextWhitelists.isEmpty() == false) { + mergedWhitelists = CollectionUtils.concatLists(baseWhiteList, contextWhitelists); + } else { + mergedWhitelists = baseWhiteList; } contextsWithWhitelists.put(context, mergedWhitelists); } @@ -138,13 +118,43 @@ public List> getSettings() { @Override public void loadExtensions(ExtensionLoader loader) { + final Map, List> whitelistsBuilder = new HashMap<>(); + /* + * Contexts from Core that need custom whitelists can add them to the map below. + * Whitelist resources should be added as appropriately named, separate files + * under Painless' resources + */ + for (ScriptContext context : ScriptModule.CORE_CONTEXTS.values()) { + List contextWhitelists = new ArrayList<>(); + if (PainlessPlugin.class.getResource("org.elasticsearch.script." + context.name.replace('-', '_') + ".txt") != null) { + contextWhitelists.add( + WhitelistLoader.loadFromResourceFiles( + PainlessPlugin.class, + "org.elasticsearch.script." + context.name.replace('-', '_') + ".txt" + ) + ); + } + + whitelistsBuilder.put(context, contextWhitelists); + } + + List testWhitelists = new ArrayList<>(); + for (ScriptContext context : ScriptModule.CORE_CONTEXTS.values()) { + if (ScriptModule.RUNTIME_FIELDS_CONTEXTS.contains(context) == false) { + testWhitelists.addAll(whitelistsBuilder.get(context)); + } + } + testWhitelists.add(WhitelistLoader.loadFromResourceFiles(PainlessPlugin.class, "org.elasticsearch.json.txt")); + whitelistsBuilder.put(PainlessTestScript.CONTEXT, testWhitelists); loader.loadExtensions(PainlessExtension.class) .stream() .flatMap(extension -> extension.getContextWhitelists().entrySet().stream()) .forEach(entry -> { - List existing = whitelists.computeIfAbsent(entry.getKey(), c -> new ArrayList<>()); + List existing = whitelistsBuilder.computeIfAbsent(entry.getKey(), c -> new ArrayList<>()); existing.addAll(entry.getValue()); }); + + this.whitelists = whitelistsBuilder; } @Override diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/AliasTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/AliasTests.java index 2ccc70685e6d7..1d74de8549435 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/AliasTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/AliasTests.java @@ -24,7 +24,7 @@ public class AliasTests extends ScriptTestCase { @Override protected Map, List> scriptContexts() { Map, List> contexts = new HashMap<>(); - List whitelists = new ArrayList<>(PainlessPlugin.BASE_WHITELISTS); + List whitelists = new ArrayList<>(PAINLESS_BASE_WHITELIST); whitelists.add(WhitelistLoader.loadFromResourceFiles(PainlessPlugin.class, "org.elasticsearch.painless.alias")); contexts.put(PainlessTestScript.CONTEXT, whitelists); return contexts; diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/AugmentationTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/AugmentationTests.java index e97bd1bb123ca..6d951299b80c6 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/AugmentationTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/AugmentationTests.java @@ -25,7 +25,7 @@ public class AugmentationTests extends ScriptTestCase { @Override protected Map, List> scriptContexts() { Map, List> contexts = super.scriptContexts(); - List digestWhitelist = new ArrayList<>(PainlessPlugin.BASE_WHITELISTS); + List digestWhitelist = new ArrayList<>(PAINLESS_BASE_WHITELIST); digestWhitelist.add(WhitelistLoader.loadFromResourceFiles(PainlessPlugin.class, "org.elasticsearch.script.ingest.txt")); contexts.put(DigestTestScript.CONTEXT, digestWhitelist); diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BaseClassTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BaseClassTests.java index 685080c8d90f6..1acaa83493ee2 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BaseClassTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BaseClassTests.java @@ -28,27 +28,27 @@ public class BaseClassTests extends ScriptTestCase { protected Map, List> scriptContexts() { Map, List> contexts = new HashMap<>(); - contexts.put(Gets.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(NoArgs.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(OneArg.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(ArrayArg.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(PrimitiveArrayArg.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(DefArrayArg.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(ManyArgs.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(VarArgs.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(DefaultMethods.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(ReturnsVoid.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(ReturnsPrimitiveBoolean.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(ReturnsPrimitiveInt.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(ReturnsPrimitiveFloat.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(ReturnsPrimitiveDouble.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(NoArgsConstant.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(WrongArgsConstant.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(WrongLengthOfArgConstant.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(UnknownArgType.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(UnknownReturnType.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(UnknownArgTypeInArray.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(TwoExecuteMethods.CONTEXT, PainlessPlugin.BASE_WHITELISTS); + contexts.put(Gets.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(NoArgs.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(OneArg.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(ArrayArg.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(PrimitiveArrayArg.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(DefArrayArg.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(ManyArgs.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(VarArgs.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(DefaultMethods.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(ReturnsVoid.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(ReturnsPrimitiveBoolean.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(ReturnsPrimitiveInt.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(ReturnsPrimitiveFloat.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(ReturnsPrimitiveDouble.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(NoArgsConstant.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(WrongArgsConstant.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(WrongLengthOfArgConstant.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(UnknownArgType.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(UnknownReturnType.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(UnknownArgTypeInArray.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(TwoExecuteMethods.CONTEXT, PAINLESS_BASE_WHITELIST); return contexts; } @@ -138,7 +138,7 @@ public void testNoArgs() throws Exception { ); assertEquals("cannot resolve symbol [_score]", e.getMessage()); - String debug = Debugger.toString(NoArgs.class, "int i = 0", new CompilerSettings(), PainlessPlugin.BASE_WHITELISTS); + String debug = Debugger.toString(NoArgs.class, "int i = 0", new CompilerSettings(), PAINLESS_BASE_WHITELIST); assertThat(debug, containsString("ACONST_NULL")); assertThat(debug, containsString("ARETURN")); } @@ -377,7 +377,7 @@ public void testReturnsVoid() throws Exception { scriptEngine.compile("testReturnsVoid1", "map.remove('a')", ReturnsVoid.CONTEXT, emptyMap()).newInstance().execute(map); assertEquals(emptyMap(), map); - String debug = Debugger.toString(ReturnsVoid.class, "int i = 0", new CompilerSettings(), PainlessPlugin.BASE_WHITELISTS); + String debug = Debugger.toString(ReturnsVoid.class, "int i = 0", new CompilerSettings(), PAINLESS_BASE_WHITELIST); // The important thing is that this contains the opcode for returning void assertThat(debug, containsString(" RETURN")); // We shouldn't contain any weird "default to null" logic @@ -434,7 +434,7 @@ public void testReturnsPrimitiveBoolean() throws Exception { .execute() ); - String debug = Debugger.toString(ReturnsPrimitiveBoolean.class, "false", new CompilerSettings(), PainlessPlugin.BASE_WHITELISTS); + String debug = Debugger.toString(ReturnsPrimitiveBoolean.class, "false", new CompilerSettings(), PAINLESS_BASE_WHITELIST); assertThat(debug, containsString("ICONST_0")); // The important thing here is that we have the bytecode for returning an integer instead of an object. booleans are integers. assertThat(debug, containsString("IRETURN")); @@ -540,7 +540,7 @@ public void testReturnsPrimitiveInt() throws Exception { scriptEngine.compile("testReturnsPrimitiveInt7", "1 + 1", ReturnsPrimitiveInt.CONTEXT, emptyMap()).newInstance().execute() ); - String debug = Debugger.toString(ReturnsPrimitiveInt.class, "1", new CompilerSettings(), PainlessPlugin.BASE_WHITELISTS); + String debug = Debugger.toString(ReturnsPrimitiveInt.class, "1", new CompilerSettings(), PAINLESS_BASE_WHITELIST); assertThat(debug, containsString("ICONST_1")); // The important thing here is that we have the bytecode for returning an integer instead of an object assertThat(debug, containsString("IRETURN")); @@ -656,7 +656,7 @@ public void testReturnsPrimitiveFloat() throws Exception { ).newInstance().execute() ); - String debug = Debugger.toString(ReturnsPrimitiveFloat.class, "1f", new CompilerSettings(), PainlessPlugin.BASE_WHITELISTS); + String debug = Debugger.toString(ReturnsPrimitiveFloat.class, "1f", new CompilerSettings(), PAINLESS_BASE_WHITELIST); assertThat(debug, containsString("FCONST_1")); // The important thing here is that we have the bytecode for returning a float instead of an object assertThat(debug, containsString("FRETURN")); @@ -775,7 +775,7 @@ public void testReturnsPrimitiveDouble() throws Exception { 0 ); - String debug = Debugger.toString(ReturnsPrimitiveDouble.class, "1", new CompilerSettings(), PainlessPlugin.BASE_WHITELISTS); + String debug = Debugger.toString(ReturnsPrimitiveDouble.class, "1", new CompilerSettings(), PAINLESS_BASE_WHITELIST); // The important thing here is that we have the bytecode for returning a double instead of an object assertThat(debug, containsString("DRETURN")); diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicStatementTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicStatementTests.java index e264085371701..14f30a895911b 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicStatementTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicStatementTests.java @@ -24,7 +24,7 @@ public class BasicStatementTests extends ScriptTestCase { protected Map, List> scriptContexts() { Map, List> contexts = super.scriptContexts(); - contexts.put(OneArg.CONTEXT, PainlessPlugin.BASE_WHITELISTS); + contexts.put(OneArg.CONTEXT, PAINLESS_BASE_WHITELIST); return contexts; } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BindingsTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BindingsTests.java index 90c517fbdce2a..a751e9c82ec2a 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BindingsTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BindingsTests.java @@ -114,7 +114,7 @@ public interface Factory { @Override protected Map, List> scriptContexts() { Map, List> contexts = super.scriptContexts(); - List whitelists = new ArrayList<>(PainlessPlugin.BASE_WHITELISTS); + List whitelists = new ArrayList<>(PAINLESS_BASE_WHITELIST); whitelists.add(WhitelistLoader.loadFromResourceFiles(PainlessPlugin.class, "org.elasticsearch.painless.test")); InstanceBindingTestClass instanceBindingTestClass = new InstanceBindingTestClass(1); diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/DebugTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DebugTests.java index 7b0b3b500d12c..3d539f7e3cb85 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/DebugTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DebugTests.java @@ -28,7 +28,7 @@ public class DebugTests extends ScriptTestCase { private final PainlessLookup painlessLookup = PainlessLookupBuilder.buildFromWhitelists( - PainlessPlugin.BASE_WHITELISTS, + PAINLESS_BASE_WHITELIST, new HashMap<>(), new HashMap<>() ); diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/Debugger.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/Debugger.java index b44be595b4178..b60d8a0fd7ce8 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/Debugger.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/Debugger.java @@ -22,12 +22,14 @@ import java.util.HashMap; import java.util.List; +import static org.elasticsearch.painless.ScriptTestCase.PAINLESS_BASE_WHITELIST; + /** quick and dirty tools for debugging */ final class Debugger { /** compiles source to bytecode, and returns debugging output */ static String toString(final String source) { - return toString(PainlessTestScript.class, source, new CompilerSettings(), PainlessPlugin.BASE_WHITELISTS); + return toString(PainlessTestScript.class, source, new CompilerSettings(), PAINLESS_BASE_WHITELIST); } /** compiles to bytecode, and returns debugging output */ @@ -84,7 +86,7 @@ static void phases( PainlessTestScript.class, source, new CompilerSettings(), - PainlessPlugin.BASE_WHITELISTS, + PAINLESS_BASE_WHITELIST, semanticPhaseVisitor, irPhaseVisitor, asmPhaseVisitor diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefBootstrapTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefBootstrapTests.java index ed7fef33302bb..9ad29bbe84f7f 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefBootstrapTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefBootstrapTests.java @@ -21,9 +21,11 @@ import java.util.Collections; import java.util.HashMap; +import static org.elasticsearch.painless.ScriptTestCase.PAINLESS_BASE_WHITELIST; + public class DefBootstrapTests extends ESTestCase { private final PainlessLookup painlessLookup = PainlessLookupBuilder.buildFromWhitelists( - PainlessPlugin.BASE_WHITELISTS, + PAINLESS_BASE_WHITELIST, new HashMap<>(), new HashMap<>() ); diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/DynamicTypeTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DynamicTypeTests.java index ffbb7a17137d9..e7f370d4b8a7f 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/DynamicTypeTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DynamicTypeTests.java @@ -23,7 +23,7 @@ public class DynamicTypeTests extends ScriptTestCase { @Override protected Map, List> scriptContexts() { Map, List> contexts = new HashMap<>(); - List whitelists = new ArrayList<>(PainlessPlugin.BASE_WHITELISTS); + List whitelists = new ArrayList<>(PAINLESS_BASE_WHITELIST); whitelists.add(WhitelistLoader.loadFromResourceFiles(PainlessPlugin.class, "org.elasticsearch.painless.test")); whitelists.add(WhitelistLoader.loadFromResourceFiles(PainlessPlugin.class, "org.elasticsearch.painless.dynamic")); contexts.put(PainlessTestScript.CONTEXT, whitelists); diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/EmitTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/EmitTests.java index 344d4aaa822dc..eb1626af454b7 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/EmitTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/EmitTests.java @@ -21,7 +21,7 @@ public class EmitTests extends ScriptTestCase { @Override protected Map, List> scriptContexts() { Map, List> contexts = new HashMap<>(); - List whitelists = new ArrayList<>(PainlessPlugin.BASE_WHITELISTS); + List whitelists = new ArrayList<>(PAINLESS_BASE_WHITELIST); whitelists.add(WhitelistLoader.loadFromResourceFiles(PainlessPlugin.class, "org.elasticsearch.painless.test")); contexts.put(TestFieldScript.CONTEXT, whitelists); return contexts; diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/FactoryTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/FactoryTests.java index eb1a665327258..8d15e0b70d299 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/FactoryTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/FactoryTests.java @@ -23,14 +23,14 @@ public class FactoryTests extends ScriptTestCase { @Override protected Map, List> scriptContexts() { Map, List> contexts = super.scriptContexts(); - contexts.put(StatefulFactoryTestScript.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(FactoryTestScript.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(DeterministicFactoryTestScript.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(EmptyTestScript.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(TemplateScript.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(VoidReturnTestScript.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(FactoryTestConverterScript.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(FactoryTestConverterScriptBadDef.CONTEXT, PainlessPlugin.BASE_WHITELISTS); + contexts.put(StatefulFactoryTestScript.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(FactoryTestScript.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(DeterministicFactoryTestScript.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(EmptyTestScript.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(TemplateScript.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(VoidReturnTestScript.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(FactoryTestConverterScript.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(FactoryTestConverterScriptBadDef.CONTEXT, PAINLESS_BASE_WHITELIST); return contexts; } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/NeedsScoreTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/NeedsScoreTests.java index 33a66c7564df4..96181458bd496 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/NeedsScoreTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/NeedsScoreTests.java @@ -22,6 +22,7 @@ import java.util.Map; import static java.util.Collections.emptyMap; +import static org.elasticsearch.painless.ScriptTestCase.PAINLESS_BASE_WHITELIST; /** * Test that needsScores() is reported correctly depending on whether _score is used @@ -33,7 +34,7 @@ public void testNeedsScores() { IndexService index = createIndex("test", Settings.EMPTY, "type", "d", "type=double"); Map, List> contexts = new HashMap<>(); - contexts.put(NumberSortScript.CONTEXT, PainlessPlugin.BASE_WHITELISTS); + contexts.put(NumberSortScript.CONTEXT, PAINLESS_BASE_WHITELIST); PainlessScriptEngine service = new PainlessScriptEngine(Settings.EMPTY, contexts); SearchExecutionContext searchExecutionContext = index.newSearchExecutionContext(0, 0, null, () -> 0, null, emptyMap()); diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java index 7d9a54b71d5c7..dbcb4e791cdd8 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java @@ -34,6 +34,9 @@ * Typically just asserts the output of {@code exec()} */ public abstract class ScriptTestCase extends ESTestCase { + + protected static final List PAINLESS_BASE_WHITELIST = PainlessPlugin.baseWhiteList(); + protected PainlessScriptEngine scriptEngine; @Before @@ -53,7 +56,7 @@ protected Settings scriptEngineSettings() { */ protected Map, List> scriptContexts() { Map, List> contexts = new HashMap<>(); - List whitelists = new ArrayList<>(PainlessPlugin.BASE_WHITELISTS); + List whitelists = new ArrayList<>(PAINLESS_BASE_WHITELIST); whitelists.add(WhitelistLoader.loadFromResourceFiles(PainlessPlugin.class, "org.elasticsearch.painless.test")); contexts.put(PainlessTestScript.CONTEXT, whitelists); return contexts; diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptedMetricAggContextsTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptedMetricAggContextsTests.java index 3aee371bec453..2d3f09fc7243a 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptedMetricAggContextsTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptedMetricAggContextsTests.java @@ -34,10 +34,10 @@ public class ScriptedMetricAggContextsTests extends ScriptTestCase { @Override protected Map, List> scriptContexts() { Map, List> contexts = new HashMap<>(); - contexts.put(ScriptedMetricAggContexts.InitScript.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(ScriptedMetricAggContexts.MapScript.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(ScriptedMetricAggContexts.CombineScript.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(ScriptedMetricAggContexts.ReduceScript.CONTEXT, PainlessPlugin.BASE_WHITELISTS); + contexts.put(ScriptedMetricAggContexts.InitScript.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(ScriptedMetricAggContexts.MapScript.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(ScriptedMetricAggContexts.CombineScript.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(ScriptedMetricAggContexts.ReduceScript.CONTEXT, PAINLESS_BASE_WHITELIST); return contexts; } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/SimilarityScriptTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/SimilarityScriptTests.java index 6ad7622b35cac..dbfd1327fb998 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/SimilarityScriptTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/SimilarityScriptTests.java @@ -42,8 +42,8 @@ public class SimilarityScriptTests extends ScriptTestCase { @Override protected Map, List> scriptContexts() { Map, List> contexts = new HashMap<>(); - contexts.put(SimilarityScript.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(SimilarityWeightScript.CONTEXT, PainlessPlugin.BASE_WHITELISTS); + contexts.put(SimilarityScript.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(SimilarityWeightScript.CONTEXT, PAINLESS_BASE_WHITELIST); return contexts; } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ThisTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ThisTests.java index 407b36caf1924..cfa62ea33c0ea 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ThisTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ThisTests.java @@ -76,7 +76,7 @@ public interface Factory { @Override protected Map, List> scriptContexts() { Map, List> contexts = new HashMap<>(); - List whitelists = new ArrayList<>(PainlessPlugin.BASE_WHITELISTS); + List whitelists = new ArrayList<>(PAINLESS_BASE_WHITELIST); whitelists.add(WhitelistLoader.loadFromResourceFiles(PainlessPlugin.class, "org.elasticsearch.painless.this")); contexts.put(ThisScript.CONTEXT, whitelists); return contexts; From 4923f96828611b6c8372384aa13b1282bea761e6 Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Fri, 29 Mar 2024 17:22:31 -0400 Subject: [PATCH 013/264] Tidy up the geoip Property enum (#106930) --- .../ingest/geoip/GeoIpProcessor.java | 124 ++++++++++-------- .../geoip/GeoIpProcessorFactoryTests.java | 17 +-- .../ingest/geoip/GeoIpProcessorTests.java | 47 +++---- 3 files changed, 101 insertions(+), 87 deletions(-) diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java index 087f0ac9480f8..18ca9599f183c 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.core.Assertions; +import org.elasticsearch.core.Nullable; import org.elasticsearch.ingest.AbstractProcessor; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Processor; @@ -33,8 +34,8 @@ import java.net.InetAddress; import java.util.ArrayList; import java.util.Arrays; -import java.util.EnumSet; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; @@ -381,23 +382,21 @@ public GeoIpDatabase get() throws IOException { } public static final class Factory implements Processor.Factory { - static final Set DEFAULT_CITY_PROPERTIES = Set.copyOf( - EnumSet.of( - Property.CONTINENT_NAME, - Property.COUNTRY_NAME, - Property.COUNTRY_ISO_CODE, - Property.REGION_ISO_CODE, - Property.REGION_NAME, - Property.CITY_NAME, - Property.LOCATION - ) - ); - static final Set DEFAULT_COUNTRY_PROPERTIES = Set.copyOf( - EnumSet.of(Property.CONTINENT_NAME, Property.COUNTRY_NAME, Property.COUNTRY_ISO_CODE) + static final Set DEFAULT_CITY_PROPERTIES = Set.of( + Property.CONTINENT_NAME, + Property.COUNTRY_NAME, + Property.COUNTRY_ISO_CODE, + Property.REGION_ISO_CODE, + Property.REGION_NAME, + Property.CITY_NAME, + Property.LOCATION ); - static final Set DEFAULT_ASN_PROPERTIES = Set.copyOf( - EnumSet.of(Property.IP, Property.ASN, Property.ORGANIZATION_NAME, Property.NETWORK) + static final Set DEFAULT_COUNTRY_PROPERTIES = Set.of( + Property.CONTINENT_NAME, + Property.COUNTRY_NAME, + Property.COUNTRY_ISO_CODE ); + static final Set DEFAULT_ASN_PROPERTIES = Set.of(Property.IP, Property.ASN, Property.ORGANIZATION_NAME, Property.NETWORK); private final GeoIpDatabaseProvider geoIpDatabaseProvider; @@ -457,27 +456,10 @@ public Processor create( } final Set properties; - if (propertyNames != null) { - Set modifiableProperties = EnumSet.noneOf(Property.class); - for (String fieldName : propertyNames) { - try { - modifiableProperties.add(Property.parseProperty(databaseType, fieldName)); - } catch (IllegalArgumentException e) { - throw newConfigurationException(TYPE, processorTag, "properties", e.getMessage()); - } - } - properties = Set.copyOf(modifiableProperties); - } else { - if (databaseType.endsWith(CITY_DB_SUFFIX)) { - properties = DEFAULT_CITY_PROPERTIES; - } else if (databaseType.endsWith(COUNTRY_DB_SUFFIX)) { - properties = DEFAULT_COUNTRY_PROPERTIES; - } else if (databaseType.endsWith(ASN_DB_SUFFIX)) { - properties = DEFAULT_ASN_PROPERTIES; - } else { - assert false : "unsupported database type [" + databaseType + "]"; - properties = Set.of(); - } + try { + properties = Property.parseProperties(databaseType, propertyNames); + } catch (IllegalArgumentException e) { + throw newConfigurationException(TYPE, processorTag, "properties", e.getMessage()); } return new GeoIpProcessor( processorTag, @@ -518,7 +500,7 @@ enum Property { ORGANIZATION_NAME, NETWORK; - static final EnumSet ALL_CITY_PROPERTIES = EnumSet.of( + static final Set ALL_CITY_PROPERTIES = Set.of( Property.IP, Property.COUNTRY_ISO_CODE, Property.COUNTRY_NAME, @@ -529,42 +511,70 @@ enum Property { Property.TIMEZONE, Property.LOCATION ); - static final EnumSet ALL_COUNTRY_PROPERTIES = EnumSet.of( + static final Set ALL_COUNTRY_PROPERTIES = Set.of( Property.IP, Property.CONTINENT_NAME, Property.COUNTRY_NAME, Property.COUNTRY_ISO_CODE ); - static final EnumSet ALL_ASN_PROPERTIES = EnumSet.of( - Property.IP, - Property.ASN, - Property.ORGANIZATION_NAME, - Property.NETWORK - ); + static final Set ALL_ASN_PROPERTIES = Set.of(Property.IP, Property.ASN, Property.ORGANIZATION_NAME, Property.NETWORK); + + private static Property parseProperty(Set validProperties, String value) { + try { + Property property = valueOf(value.toUpperCase(Locale.ROOT)); + if (validProperties.contains(property) == false) { + throw new IllegalArgumentException("invalid"); + } + return property; + } catch (IllegalArgumentException e) { + // put the properties in natural order before throwing so that we have reliable error messages -- this is a little + // bit inefficient, but we only do this validation at processor construction time so the cost is practically immaterial + Property[] properties = validProperties.toArray(new Property[0]); + Arrays.sort(properties); + throw new IllegalArgumentException( + "illegal property value [" + value + "]. valid values are " + Arrays.toString(properties) + ); + } + } + + /** + * Parse the given list of property names and validate them against the supplied databaseType. + * + * @param databaseType the type of database to use to validate property names + * @param propertyNames a list of property names to parse, or null to use the default properties for the associated databaseType + * @throws IllegalArgumentException if any of the property names are not valid, or if the databaseType is not valid + * @return a set of parsed and validated properties + */ + public static Set parseProperties(final String databaseType, @Nullable final List propertyNames) { + final Set validProperties; + final Set defaultProperties; - public static Property parseProperty(String databaseType, String value) { - Set validProperties = EnumSet.noneOf(Property.class); if (databaseType.endsWith(CITY_DB_SUFFIX)) { validProperties = ALL_CITY_PROPERTIES; + defaultProperties = Factory.DEFAULT_CITY_PROPERTIES; } else if (databaseType.endsWith(COUNTRY_DB_SUFFIX)) { validProperties = ALL_COUNTRY_PROPERTIES; + defaultProperties = Factory.DEFAULT_COUNTRY_PROPERTIES; } else if (databaseType.endsWith(ASN_DB_SUFFIX)) { validProperties = ALL_ASN_PROPERTIES; + defaultProperties = Factory.DEFAULT_ASN_PROPERTIES; } else { - assert false : "unsupported database type [" + databaseType + "]"; + assert false : "Unsupported database type [" + databaseType + "]"; + throw new IllegalArgumentException("Unsupported database type [" + databaseType + "]"); } - try { - Property property = valueOf(value.toUpperCase(Locale.ROOT)); - if (validProperties.contains(property) == false) { - throw new IllegalArgumentException("invalid"); + final Set properties; + if (propertyNames != null) { + Set modifiableProperties = new HashSet<>(); + for (String propertyName : propertyNames) { + modifiableProperties.add(parseProperty(validProperties, propertyName)); // n.b. this throws if a property is invalid } - return property; - } catch (IllegalArgumentException e) { - throw new IllegalArgumentException( - "illegal property value [" + value + "]. valid values are " + Arrays.toString(validProperties.toArray()) - ); + properties = Set.copyOf(modifiableProperties); + } else { + // if propertyNames is null, then use the default properties for the databaseType + properties = defaultProperties; } + return properties; } } diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java index 20e0fa9be6c06..24f2df7e30d16 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.ingest.IngestService; import org.elasticsearch.ingest.Processor; import org.elasticsearch.ingest.RandomDocumentPicks; +import org.elasticsearch.ingest.geoip.GeoIpProcessor.Property; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.StreamsUtils; @@ -37,8 +38,8 @@ import java.nio.file.Path; import java.nio.file.StandardCopyOption; import java.util.ArrayList; -import java.util.EnumSet; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; @@ -185,8 +186,8 @@ public void testBuildWithCountryDbAndAsnFields() throws Exception { Map config = new HashMap<>(); config.put("field", "_field"); config.put("database_file", "GeoLite2-Country.mmdb"); - EnumSet asnOnlyProperties = EnumSet.copyOf(GeoIpProcessor.Property.ALL_ASN_PROPERTIES); - asnOnlyProperties.remove(GeoIpProcessor.Property.IP); + Set asnOnlyProperties = new HashSet<>(Property.ALL_ASN_PROPERTIES); + asnOnlyProperties.remove(Property.IP); String asnProperty = RandomPicks.randomFrom(Randomness.get(), asnOnlyProperties).toString(); config.put("properties", List.of(asnProperty)); Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, null, config)); @@ -205,8 +206,8 @@ public void testBuildWithAsnDbAndCityFields() throws Exception { Map config = new HashMap<>(); config.put("field", "_field"); config.put("database_file", "GeoLite2-ASN.mmdb"); - EnumSet cityOnlyProperties = EnumSet.copyOf(GeoIpProcessor.Property.ALL_CITY_PROPERTIES); - cityOnlyProperties.remove(GeoIpProcessor.Property.IP); + Set cityOnlyProperties = new HashSet<>(Property.ALL_CITY_PROPERTIES); + cityOnlyProperties.remove(Property.IP); String cityProperty = RandomPicks.randomFrom(Randomness.get(), cityOnlyProperties).toString(); config.put("properties", List.of(cityProperty)); Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, null, config)); @@ -245,12 +246,12 @@ public void testBuildBuiltinDatabaseMissing() throws Exception { public void testBuildFields() throws Exception { GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseNodeService); - Set properties = EnumSet.noneOf(GeoIpProcessor.Property.class); + Set properties = new HashSet<>(); List fieldNames = new ArrayList<>(); int counter = 0; - int numFields = scaledRandomIntBetween(1, GeoIpProcessor.Property.values().length); - for (GeoIpProcessor.Property property : GeoIpProcessor.Property.ALL_CITY_PROPERTIES) { + int numFields = scaledRandomIntBetween(1, Property.values().length); + for (Property property : Property.ALL_CITY_PROPERTIES) { properties.add(property); fieldNames.add(property.name().toLowerCase(Locale.ROOT)); if (++counter >= numFields) { diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java index f5ad0e9c0817a..3114d24ee7571 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java @@ -14,15 +14,16 @@ import org.elasticsearch.core.PathUtils; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; +import org.elasticsearch.ingest.geoip.GeoIpProcessor.Property; import org.elasticsearch.test.ESTestCase; import java.io.IOException; import java.io.InputStream; import java.util.Collections; -import java.util.EnumSet; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Supplier; @@ -35,6 +36,8 @@ public class GeoIpProcessorTests extends ESTestCase { + private static final Set ALL_PROPERTIES = Set.of(Property.values()); + public void testCity() throws Exception { GeoIpProcessor processor = new GeoIpProcessor( randomAlphaOfLength(10), @@ -43,7 +46,7 @@ public void testCity() throws Exception { loader("/GeoLite2-City.mmdb"), () -> true, "target_field", - EnumSet.allOf(GeoIpProcessor.Property.class), + ALL_PROPERTIES, false, false, "filename" @@ -77,7 +80,7 @@ public void testNullValueWithIgnoreMissing() throws Exception { loader("/GeoLite2-City.mmdb"), () -> true, "target_field", - EnumSet.allOf(GeoIpProcessor.Property.class), + ALL_PROPERTIES, true, false, "filename" @@ -99,7 +102,7 @@ public void testNonExistentWithIgnoreMissing() throws Exception { loader("/GeoLite2-City.mmdb"), () -> true, "target_field", - EnumSet.allOf(GeoIpProcessor.Property.class), + ALL_PROPERTIES, true, false, "filename" @@ -118,7 +121,7 @@ public void testNullWithoutIgnoreMissing() throws Exception { loader("/GeoLite2-City.mmdb"), () -> true, "target_field", - EnumSet.allOf(GeoIpProcessor.Property.class), + ALL_PROPERTIES, false, false, "filename" @@ -140,7 +143,7 @@ public void testNonExistentWithoutIgnoreMissing() throws Exception { loader("/GeoLite2-City.mmdb"), () -> true, "target_field", - EnumSet.allOf(GeoIpProcessor.Property.class), + ALL_PROPERTIES, false, false, "filename" @@ -159,7 +162,7 @@ public void testCity_withIpV6() throws Exception { loader("/GeoLite2-City.mmdb"), () -> true, "target_field", - EnumSet.allOf(GeoIpProcessor.Property.class), + ALL_PROPERTIES, false, false, "filename" @@ -197,7 +200,7 @@ public void testCityWithMissingLocation() throws Exception { loader("/GeoLite2-City.mmdb"), () -> true, "target_field", - EnumSet.allOf(GeoIpProcessor.Property.class), + ALL_PROPERTIES, false, false, "filename" @@ -223,7 +226,7 @@ public void testCountry() throws Exception { loader("/GeoLite2-Country.mmdb"), () -> true, "target_field", - EnumSet.allOf(GeoIpProcessor.Property.class), + ALL_PROPERTIES, false, false, "filename" @@ -252,7 +255,7 @@ public void testCountryWithMissingLocation() throws Exception { loader("/GeoLite2-Country.mmdb"), () -> true, "target_field", - EnumSet.allOf(GeoIpProcessor.Property.class), + ALL_PROPERTIES, false, false, "filename" @@ -279,7 +282,7 @@ public void testAsn() throws Exception { loader("/GeoLite2-ASN.mmdb"), () -> true, "target_field", - EnumSet.allOf(GeoIpProcessor.Property.class), + ALL_PROPERTIES, false, false, "filename" @@ -308,7 +311,7 @@ public void testAddressIsNotInTheDatabase() throws Exception { loader("/GeoLite2-City.mmdb"), () -> true, "target_field", - EnumSet.allOf(GeoIpProcessor.Property.class), + ALL_PROPERTIES, false, false, "filename" @@ -332,7 +335,7 @@ public void testInvalid() throws Exception { loader("/GeoLite2-City.mmdb"), () -> true, "target_field", - EnumSet.allOf(GeoIpProcessor.Property.class), + ALL_PROPERTIES, false, false, "filename" @@ -353,7 +356,7 @@ public void testListAllValid() throws Exception { loader("/GeoLite2-City.mmdb"), () -> true, "target_field", - EnumSet.allOf(GeoIpProcessor.Property.class), + ALL_PROPERTIES, false, false, "filename" @@ -383,7 +386,7 @@ public void testListPartiallyValid() throws Exception { loader("/GeoLite2-City.mmdb"), () -> true, "target_field", - EnumSet.allOf(GeoIpProcessor.Property.class), + ALL_PROPERTIES, false, false, "filename" @@ -413,7 +416,7 @@ public void testListNoMatches() throws Exception { loader("/GeoLite2-City.mmdb"), () -> true, "target_field", - EnumSet.allOf(GeoIpProcessor.Property.class), + ALL_PROPERTIES, false, false, "filename" @@ -433,7 +436,7 @@ public void testListDatabaseReferenceCounting() throws Exception { GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field", () -> { loader.preLookup(); return loader; - }, () -> true, "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, false, "filename"); + }, () -> true, "target_field", ALL_PROPERTIES, false, false, "filename"); Map document = new HashMap<>(); document.put("source_field", List.of("8.8.8.8", "82.171.64.0")); @@ -464,7 +467,7 @@ public void testListFirstOnly() throws Exception { loader("/GeoLite2-City.mmdb"), () -> true, "target_field", - EnumSet.allOf(GeoIpProcessor.Property.class), + ALL_PROPERTIES, false, true, "filename" @@ -492,7 +495,7 @@ public void testListFirstOnlyNoMatches() throws Exception { loader("/GeoLite2-City.mmdb"), () -> true, "target_field", - EnumSet.allOf(GeoIpProcessor.Property.class), + ALL_PROPERTIES, false, true, "filename" @@ -514,7 +517,7 @@ public void testInvalidDatabase() throws Exception { loader("/GeoLite2-City.mmdb"), () -> false, "target_field", - EnumSet.allOf(GeoIpProcessor.Property.class), + ALL_PROPERTIES, false, true, "filename" @@ -537,7 +540,7 @@ public void testNoDatabase() throws Exception { () -> null, () -> true, "target_field", - EnumSet.allOf(GeoIpProcessor.Property.class), + ALL_PROPERTIES, false, false, "GeoLite2-City" @@ -560,7 +563,7 @@ public void testNoDatabase_ignoreMissing() throws Exception { () -> null, () -> true, "target_field", - EnumSet.allOf(GeoIpProcessor.Property.class), + ALL_PROPERTIES, true, false, "GeoLite2-City" From c6a0d4f0d775f7cee7a0bce8ec878428b42fa768 Mon Sep 17 00:00:00 2001 From: Keith Massey Date: Fri, 29 Mar 2024 16:29:16 -0500 Subject: [PATCH 014/264] Pulling KeyValueProcessor.logAndBuildException() into AbstractProcessor (#106931) --- .../ingest/common/KeyValueProcessor.java | 33 +---- .../ingest/AbstractProcessor.java | 36 +++++ .../ingest/AbstractProcessorTests.java | 134 ++++++++++++++++++ 3 files changed, 173 insertions(+), 30 deletions(-) create mode 100644 server/src/test/java/org/elasticsearch/ingest/AbstractProcessorTests.java diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/KeyValueProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/KeyValueProcessor.java index 0c6e37f675e1d..a7b1efb52efc5 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/KeyValueProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/KeyValueProcessor.java @@ -8,9 +8,6 @@ package org.elasticsearch.ingest.common; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.core.Predicates; import org.elasticsearch.ingest.AbstractProcessor; import org.elasticsearch.ingest.ConfigurationUtils; @@ -32,8 +29,6 @@ */ public final class KeyValueProcessor extends AbstractProcessor { - private static final Logger logger = LogManager.getLogger(KeyValueProcessor.class); - public static final String TYPE = "kv"; private static final Pattern STRIP_BRACKETS = Pattern.compile("(^[\\(\\[<\"'])|([\\]\\)>\"']$)"); @@ -85,7 +80,7 @@ public final class KeyValueProcessor extends AbstractProcessor { ); } - private static Consumer buildExecution( + private Consumer buildExecution( String fieldSplit, String valueSplit, TemplateScript.Factory field, @@ -170,29 +165,7 @@ private static Consumer buildExecution( }; } - /** - * Helper method for buildTrimmer and buildSplitter. - *

- * If trace logging is enabled, then we should log the stacktrace (and so the message can be slightly simpler). - * On the other hand if trace logging isn't enabled, then we'll need to log some context on the original issue (but not a stacktrace). - *

- * Regardless of the logging level, we should throw an exception that has the context in its message, which this method builds. - */ - private static ElasticsearchException logAndBuildException(String message, Throwable error) { - String cause = error.getClass().getName(); - if (error.getMessage() != null) { - cause += ": " + error.getMessage(); - } - String longMessage = message + ": " + cause; - if (logger.isTraceEnabled()) { - logger.trace(message, error); - } else { - logger.warn(longMessage); - } - return new ElasticsearchException(longMessage); - } - - private static Function buildTrimmer(String trim) { + private Function buildTrimmer(String trim) { if (trim == null) { return val -> val; } else { @@ -207,7 +180,7 @@ private static Function buildTrimmer(String trim) { } } - private static Function buildSplitter(String split, boolean fields) { + private Function buildSplitter(String split, boolean fields) { int limit = fields ? 0 : 2; if (split.length() > 2 || split.length() == 2 && split.charAt(0) != '\\') { Pattern splitPattern = Pattern.compile(split); diff --git a/server/src/main/java/org/elasticsearch/ingest/AbstractProcessor.java b/server/src/main/java/org/elasticsearch/ingest/AbstractProcessor.java index d709e442cac1b..6fb47c57d8811 100644 --- a/server/src/main/java/org/elasticsearch/ingest/AbstractProcessor.java +++ b/server/src/main/java/org/elasticsearch/ingest/AbstractProcessor.java @@ -8,6 +8,10 @@ package org.elasticsearch.ingest; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchException; + /** * An Abstract Processor that holds tag and description information * about the processor. @@ -30,4 +34,36 @@ public String getTag() { public String getDescription() { return description; } + + /** + * Helper method to be used by processors that need to catch and log Throwables. + *

+ * If trace logging is enabled, then we log the provided message and the full stacktrace + * On the other hand if trace logging isn't enabled, then we log the provided message and the message from the Throwable (but not a + * stacktrace). + *

+ * Regardless of the logging level, we throw an ElasticsearchException that has the context in its message + * + * @param message A message to be logged and to be included in the message of the returned ElasticsearchException + * @param throwable The Throwable that has been caught + * @return A new ElasticsearchException whose message includes the passed-in message and the message from the passed-in Throwable. It + * will not however wrap the given Throwable. + */ + protected ElasticsearchException logAndBuildException(String message, Throwable throwable) { + String cause = throwable.getClass().getName(); + if (throwable.getMessage() != null) { + cause += ": " + throwable.getMessage(); + } + String longMessage = message + ": " + cause; + // This method will only be called in exceptional situations, so the cost of looking up the logger won't be bad: + Logger logger = LogManager.getLogger(getClass()); + if (logger.isTraceEnabled()) { + logger.trace(message, throwable); + } else { + logger.warn(longMessage); + } + // We don't want to wrap the Throwable here because it is probably not one of the exceptions that ElasticsearchException can + // serialize: + return new ElasticsearchException(longMessage); + } } diff --git a/server/src/test/java/org/elasticsearch/ingest/AbstractProcessorTests.java b/server/src/test/java/org/elasticsearch/ingest/AbstractProcessorTests.java new file mode 100644 index 0000000000000..121f6cc5f5dc6 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/ingest/AbstractProcessorTests.java @@ -0,0 +1,134 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.ingest; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.core.Logger; +import org.apache.logging.log4j.core.LoggerContext; +import org.apache.logging.log4j.spi.LoggerContextFactory; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.test.ESTestCase; +import org.mockito.Mockito; + +import java.io.IOException; +import java.util.concurrent.atomic.AtomicBoolean; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class AbstractProcessorTests extends ESTestCase { + + public void testLogAndBuildException() { + final LoggerContextFactory originalFactory = LogManager.getFactory(); + try { + final String message = randomAlphaOfLength(100); + final String throwableMessage = randomBoolean() ? null : randomAlphaOfLength(100); + AtomicBoolean warnCalled = new AtomicBoolean(false); + AtomicBoolean traceCalled = new AtomicBoolean(false); + final Throwable throwable = randomFrom( + new StackOverflowError(throwableMessage), + new RuntimeException(throwableMessage), + new IOException(throwableMessage) + ); + + { + // Mock logging so that we can make sure we're logging what we expect: + Logger mockLogger = mock(Logger.class); + doAnswer(invocationOnMock -> { + warnCalled.set(true); + String logMessage = invocationOnMock.getArgument(0, String.class); + assertThat(logMessage, containsString(message)); + if (throwableMessage != null) { + assertThat(logMessage, containsString(throwableMessage)); + } + return null; + }).when(mockLogger).warn(anyString()); + + doAnswer(invocationOnMock -> { + traceCalled.set(true); + String logMessage = invocationOnMock.getArgument(0, String.class); + Throwable logThrowable = invocationOnMock.getArgument(1, Throwable.class); + assertThat(logMessage, containsString(message)); + if (throwableMessage != null) { + assertThat(logMessage, not(containsString(throwableMessage))); + } + assertThat(logThrowable, equalTo(throwable)); + return null; + }).when(mockLogger).trace(anyString(), any(Throwable.class)); + + final LoggerContext context = Mockito.mock(LoggerContext.class); + when(context.getLogger(TestProcessor.class)).thenReturn(mockLogger); + + final LoggerContextFactory spy = Mockito.spy(originalFactory); + Mockito.doReturn(context).when(spy).getContext(any(), any(), any(), anyBoolean()); + LogManager.setFactory(spy); + } + + TestProcessor testProcessor = new TestProcessor(); + + { + // Run with trace logging disabled + ElasticsearchException resultException = testProcessor.logAndBuildException(message, throwable); + assertThat(resultException.getRootCause(), equalTo(resultException)); + String resultMessage = resultException.getMessage(); + assertNotNull(resultMessage); + if (throwableMessage != null) { + assertThat(resultMessage, containsString(throwableMessage)); + } + assertThat(resultMessage, containsString(message)); + + assertThat("log.warn not called", warnCalled.get(), is(true)); + assertThat("log.trace called", traceCalled.get(), is(false)); + } + + // reset between tests: + warnCalled.set(false); + traceCalled.set(false); + + { + // Now enable trace logging + when(LogManager.getLogger(TestProcessor.class).isTraceEnabled()).thenReturn(true); + ElasticsearchException resultException = testProcessor.logAndBuildException(message, throwable); + assertThat(resultException.getRootCause(), equalTo(resultException)); + String resultMessage = resultException.getMessage(); + assertNotNull(resultMessage); + if (throwableMessage != null) { + assertThat(resultMessage, containsString(throwableMessage)); + } + assertThat(resultMessage, containsString(message)); + + assertThat("log.warn called", warnCalled.get(), is(false)); + assertThat("log.trace not called", traceCalled.get(), is(true)); + } + } finally { + LogManager.setFactory(originalFactory); + } + } + + class TestProcessor extends AbstractProcessor { + + protected TestProcessor() { + super("", ""); + + } + + @Override + public String getType() { + return "test"; + } + } +} From ee2733fa4f5f319c534dba80aa615ae8735ec4b2 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Fri, 29 Mar 2024 14:57:02 -0700 Subject: [PATCH 015/264] Fix MRJAR test tasks when runtime java is set (#106929) The MRJAR test tasks explicitly set the java toolchain to use for execution. This is incompatible with setting the test executable directly, which is what happens when runtime java is set. This commit guards setting the toolchain to only occur when runtime java isn't set, and adds an onlyIf to ensure the test is runnable when runtime java is set. relates #106828 --- .../gradle/internal/MrjarPlugin.java | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java index 9e2f44323f914..6524247c4c8f6 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java @@ -8,8 +8,10 @@ package org.elasticsearch.gradle.internal; +import org.elasticsearch.gradle.internal.info.BuildParams; import org.elasticsearch.gradle.internal.precommit.CheckForbiddenApisTask; import org.elasticsearch.gradle.util.GradleUtils; +import org.gradle.api.JavaVersion; import org.gradle.api.Plugin; import org.gradle.api.Project; import org.gradle.api.file.FileCollection; @@ -151,8 +153,18 @@ private void createTestTask(Project project, SourceSet sourceSet, int javaVersio testTask.setClasspath(testRuntime.plus(project.files(jarTask))); testTask.setTestClassesDirs(sourceSet.getOutput().getClassesDirs()); - testTask.getJavaLauncher() - .set(javaToolchains.launcherFor(spec -> spec.getLanguageVersion().set(JavaLanguageVersion.of(javaVersion)))); + // only set the jdk if runtime java isn't set because setting the toolchain is incompatible with + // runtime java setting the executable directly + if (BuildParams.getIsRuntimeJavaHomeSet()) { + testTask.onlyIf("runtime java must support java " + javaVersion, t -> { + JavaVersion runtimeJavaVersion = BuildParams.getRuntimeJavaVersion(); + return runtimeJavaVersion.isCompatibleWith(JavaVersion.toVersion(javaVersion)); + }); + } else { + testTask.getJavaLauncher() + .set(javaToolchains.launcherFor(spec -> spec.getLanguageVersion().set(JavaLanguageVersion.of(javaVersion)))); + } + }); project.getTasks().named("check").configure(checkTask -> checkTask.dependsOn(testTaskProvider)); From d125f528136c0c4432046ba9cd56df099e77c365 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Fri, 29 Mar 2024 18:35:52 -0700 Subject: [PATCH 016/264] AwaitsFix #106933 --- .../java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java index 184343349d317..695f5d2a64bc7 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java @@ -11,6 +11,7 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import org.apache.http.util.EntityUtils; +import org.apache.lucene.tests.util.LuceneTestCase.AwaitsFix; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.hamcrest.Matchers; @@ -20,6 +21,7 @@ import static org.hamcrest.CoreMatchers.equalTo; +@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106933") public class HealthNodeUpgradeIT extends ParameterizedRollingUpgradeTestCase { public HealthNodeUpgradeIT(@Name("upgradedNodes") int upgradedNodes) { From 7af3c8db016bc97d45cabbcd1dd7d2ec30cc166e Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Sat, 30 Mar 2024 11:13:05 -0700 Subject: [PATCH 017/264] AwaitsFix #106871 --- .../org/elasticsearch/xpack/search/AsyncSearchSecurityIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/async-search/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/search/AsyncSearchSecurityIT.java b/x-pack/plugin/async-search/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/search/AsyncSearchSecurityIT.java index 8c06ccf750d36..1f4830d8b6d0c 100644 --- a/x-pack/plugin/async-search/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/search/AsyncSearchSecurityIT.java +++ b/x-pack/plugin/async-search/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/search/AsyncSearchSecurityIT.java @@ -176,6 +176,7 @@ public void testWithUsers() throws Exception { * the testWithUsers test is generally testing). * @throws IOException */ + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106871") public void testStatusWithUsersWhileSearchIsRunning() throws IOException { String user = randomFrom("user1", "user2"); String other = user.equals("user1") ? "user2" : "user1"; From 7f83189cb24bf41ddea13126385c47302ea9f9b3 Mon Sep 17 00:00:00 2001 From: David Turner Date: Sat, 30 Mar 2024 19:23:27 +0000 Subject: [PATCH 018/264] Remove executor lookups from `TransportWriteAction` (#106938) Replaces the `String` names (plus associated threadpool) with proper `Executor` instances. Relates #106279 (removes a couple more usages of `SAME`) Relates #105460, #99787, #97879 etc. --- .../action/bulk/TransportShardBulkAction.java | 15 +++++-------- .../TransportResyncReplicationAction.java | 2 +- .../replication/TransportWriteAction.java | 11 +++++----- .../index/seqno/RetentionLeaseSyncAction.java | 21 ++++++++++++++++++- .../indices/ExecutorSelector.java | 13 ++++++------ .../bulk/TransportShardBulkActionTests.java | 18 ++++++---------- .../TransportWriteActionTests.java | 5 +++-- .../ESIndexLevelReplicationTestCase.java | 3 +-- .../TransportBulkShardOperationsAction.java | 2 +- .../authz/AuthorizationServiceTests.java | 3 +-- 10 files changed, 51 insertions(+), 42 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java index 5c1f0e4aa7306..4cf10b3c27824 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java @@ -115,7 +115,7 @@ public TransportShardBulkAction( actionFilters, BulkShardRequest::new, BulkShardRequest::new, - ExecutorSelector::getWriteExecutorForShard, + ExecutorSelector.getWriteExecutorForShard(threadPool), false, indexingPressure, systemIndices @@ -167,7 +167,7 @@ public void onClusterServiceClose() { public void onTimeout(TimeValue timeout) { mappingUpdateListener.onFailure(new MapperException("timed out while waiting for a dynamic mapping update")); } - }), listener, threadPool, executor(primary), postWriteRefresh, postWriteAction, documentParsingProvider); + }), listener, executor(primary), postWriteRefresh, postWriteAction, documentParsingProvider); } @Override @@ -188,8 +188,7 @@ public static void performOnPrimary( MappingUpdatePerformer mappingUpdater, Consumer> waitForMappingUpdate, ActionListener> listener, - ThreadPool threadPool, - String executorName + Executor executor ) { performOnPrimary( request, @@ -199,8 +198,7 @@ public static void performOnPrimary( mappingUpdater, waitForMappingUpdate, listener, - threadPool, - executorName, + executor, null, null, DocumentParsingProvider.EMPTY_INSTANCE @@ -215,16 +213,13 @@ public static void performOnPrimary( MappingUpdatePerformer mappingUpdater, Consumer> waitForMappingUpdate, ActionListener> listener, - ThreadPool threadPool, - String executorName, + Executor executor, @Nullable PostWriteRefresh postWriteRefresh, @Nullable Consumer postWriteAction, DocumentParsingProvider documentParsingProvider ) { new ActionRunnable<>(listener) { - private final Executor executor = threadPool.executor(executorName); - private final BulkPrimaryExecutionContext context = new BulkPrimaryExecutionContext(request, primary); final long startBulkTime = System.nanoTime(); diff --git a/server/src/main/java/org/elasticsearch/action/resync/TransportResyncReplicationAction.java b/server/src/main/java/org/elasticsearch/action/resync/TransportResyncReplicationAction.java index 9a08da40ca282..4684c990299f9 100644 --- a/server/src/main/java/org/elasticsearch/action/resync/TransportResyncReplicationAction.java +++ b/server/src/main/java/org/elasticsearch/action/resync/TransportResyncReplicationAction.java @@ -70,7 +70,7 @@ public TransportResyncReplicationAction( actionFilters, ResyncReplicationRequest::new, ResyncReplicationRequest::new, - ExecutorSelector::getWriteExecutorForShard, + ExecutorSelector.getWriteExecutorForShard(threadPool), true, /* we should never reject resync because of thread pool capacity on primary */ indexingPressure, systemIndices diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/TransportWriteAction.java b/server/src/main/java/org/elasticsearch/action/support/replication/TransportWriteAction.java index ea24d7deb9aa7..8994b428adcbe 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/TransportWriteAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/TransportWriteAction.java @@ -39,6 +39,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; @@ -61,7 +62,7 @@ public abstract class TransportWriteAction< protected final ExecutorSelector executorSelector; protected final PostWriteRefresh postWriteRefresh; - private final BiFunction executorFunction; + private final BiFunction executorFunction; protected TransportWriteAction( Settings settings, @@ -74,7 +75,7 @@ protected TransportWriteAction( ActionFilters actionFilters, Writeable.Reader request, Writeable.Reader replicaRequest, - BiFunction executorFunction, + BiFunction executorFunction, boolean forceExecutionOnPrimary, IndexingPressure indexingPressure, SystemIndices systemIndices @@ -103,7 +104,7 @@ protected TransportWriteAction( this.postWriteRefresh = new PostWriteRefresh(transportService); } - protected String executor(IndexShard shard) { + protected Executor executor(IndexShard shard) { return executorFunction.apply(executorSelector, shard); } @@ -210,7 +211,7 @@ protected void shardOperationOnPrimary( IndexShard primary, ActionListener> listener ) { - threadPool.executor(executorFunction.apply(executorSelector, primary)).execute(new ActionRunnable<>(listener) { + executorFunction.apply(executorSelector, primary).execute(new ActionRunnable<>(listener) { @Override protected void doRun() { dispatchedShardOperationOnPrimary(request, primary, listener); @@ -238,7 +239,7 @@ protected abstract void dispatchedShardOperationOnPrimary( */ @Override protected void shardOperationOnReplica(ReplicaRequest request, IndexShard replica, ActionListener listener) { - threadPool.executor(executorFunction.apply(executorSelector, replica)).execute(new ActionRunnable<>(listener) { + executorFunction.apply(executorSelector, replica).execute(new ActionRunnable<>(listener) { @Override protected void doRun() { dispatchedShardOperationOnReplica(request, replica, listener); diff --git a/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseSyncAction.java b/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseSyncAction.java index d03a29922da07..d69dbc00ff7e6 100644 --- a/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseSyncAction.java +++ b/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseSyncAction.java @@ -36,6 +36,7 @@ import org.elasticsearch.index.shard.IndexShardClosedException; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardNotInPrimaryModeException; +import org.elasticsearch.indices.ExecutorSelector; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.SystemIndices; import org.elasticsearch.node.NodeClosedException; @@ -50,6 +51,7 @@ import java.util.Map; import java.util.Objects; import java.util.concurrent.Executor; +import java.util.function.BiFunction; import static org.elasticsearch.core.Strings.format; @@ -88,7 +90,7 @@ public RetentionLeaseSyncAction( actionFilters, RetentionLeaseSyncAction.Request::new, RetentionLeaseSyncAction.Request::new, - (service, ignore) -> ThreadPool.Names.MANAGEMENT, + new ManagementOnlyExecutorFunction(threadPool), false, indexingPressure, systemIndices @@ -263,4 +265,21 @@ protected Response newResponseInstance(StreamInput in) throws IOException { return new Response(in); } + /** + * A {@code BiFunction} for passing to the super constructor which always returns the + * MANAGEMENT executor (but looks it up once at construction time and caches the result, unlike how the obvious lambda would work). + */ + private static class ManagementOnlyExecutorFunction implements BiFunction { + private final Executor executor; + + ManagementOnlyExecutorFunction(ThreadPool threadPool) { + executor = threadPool.executor(ThreadPool.Names.MANAGEMENT); + } + + @Override + public Executor apply(ExecutorSelector executorSelector, IndexShard indexShard) { + return executor; + } + } + } diff --git a/server/src/main/java/org/elasticsearch/indices/ExecutorSelector.java b/server/src/main/java/org/elasticsearch/indices/ExecutorSelector.java index a92a451e58eec..d5969702ba402 100644 --- a/server/src/main/java/org/elasticsearch/indices/ExecutorSelector.java +++ b/server/src/main/java/org/elasticsearch/indices/ExecutorSelector.java @@ -12,6 +12,8 @@ import org.elasticsearch.threadpool.ThreadPool; import java.util.Objects; +import java.util.concurrent.Executor; +import java.util.function.BiFunction; /** * Some operations need to use different executors for different index patterns. @@ -93,12 +95,11 @@ public String executorForWrite(String indexName) { /** * This is a convenience method for the case when we need to find an executor for a shard. - * Note that it can be passed to methods as a {@link java.util.function.BiFunction}. - * @param executorSelector An executor selector service. - * @param shard A shard for which we need to find an executor. - * @return Name of the executor that should be used for write operations on this shard. + * @return a {@link java.util.function.BiFunction} which returns the executor that should be used for write operations on this shard. */ - public static String getWriteExecutorForShard(ExecutorSelector executorSelector, IndexShard shard) { - return executorSelector.executorForWrite(shard.shardId().getIndexName()); + public static BiFunction getWriteExecutorForShard(ThreadPool threadPool) { + return (executorSelector, indexShard) -> threadPool.executor( + executorSelector.executorForWrite(indexShard.shardId().getIndexName()) + ); } } diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java index 4660e1e4ea97d..31e1a66c8ca44 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java @@ -241,8 +241,7 @@ public void testSkipBulkIndexRequestIfAborted() throws Exception { throw new AssertionError(e); } }), latch::countDown), - threadPool, - Names.WRITE + threadPool.executor(Names.WRITE) ); latch.await(); @@ -936,8 +935,7 @@ public void testRetries() throws Exception { assertThat(response.status(), equalTo(RestStatus.CREATED)); assertThat(response.getSeqNo(), equalTo(13L)); }), latch), - threadPool, - Names.WRITE + threadPool.executor(Names.WRITE) ); latch.await(); } @@ -1026,8 +1024,7 @@ public void testForceExecutionOnRejectionAfterMappingUpdate() throws Exception { new LatchedActionListener<>(ActionTestUtils.assertNoFailureListener(result -> // Assert that we still need to fsync the location that was successfully written assertThat(((WritePrimaryResult) result).location, equalTo(resultLocation1))), latch), - rejectingThreadPool, - Names.WRITE + rejectingThreadPool.executor(Names.WRITE) ); latch.await(); @@ -1098,8 +1095,7 @@ public void testPerformOnPrimaryReportsBulkStats() throws Exception { closeShards(shard); } }), latch), - threadPool, - Names.WRITE + threadPool.executor(Names.WRITE) ); latch.await(); @@ -1148,8 +1144,7 @@ public void testNoopMappingUpdateInfiniteLoopPrevention() throws Exception { (update, shardId, listener) -> fail("the master should not be contacted as the operation yielded a noop mapping update"), listener -> listener.onResponse(null), ActionTestUtils.assertNoFailureListener(result -> {}), - threadPool, - Names.WRITE + threadPool.executor(Names.WRITE) ) ); assertThat( @@ -1219,8 +1214,7 @@ public void testNoopMappingUpdateSuccessOnRetry() throws Exception { BulkItemResponse primaryResponse = result.replicaRequest().items()[0].getPrimaryResponse(); assertFalse(primaryResponse.isFailed()); }), latch), - threadPool, - Names.WRITE + threadPool.executor(Names.WRITE) ); latch.await(); diff --git a/server/src/test/java/org/elasticsearch/action/support/replication/TransportWriteActionTests.java b/server/src/test/java/org/elasticsearch/action/support/replication/TransportWriteActionTests.java index f4c8e2baa94cf..5530ec61fea33 100644 --- a/server/src/test/java/org/elasticsearch/action/support/replication/TransportWriteActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/replication/TransportWriteActionTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Strings; import org.elasticsearch.index.Index; @@ -430,7 +431,7 @@ protected TestAction(boolean withDocumentFailureOnPrimary, boolean withDocumentF new ActionFilters(new HashSet<>()), TestRequest::new, TestRequest::new, - (service, ignore) -> ThreadPool.Names.SAME, + (service, ignore) -> EsExecutors.DIRECT_EXECUTOR_SERVICE, false, new IndexingPressure(Settings.EMPTY), EmptySystemIndices.INSTANCE @@ -458,7 +459,7 @@ protected TestAction( new ActionFilters(new HashSet<>()), TestRequest::new, TestRequest::new, - (service, ignore) -> ThreadPool.Names.SAME, + (service, ignore) -> EsExecutors.DIRECT_EXECUTOR_SERVICE, false, new IndexingPressure(settings), EmptySystemIndices.INSTANCE diff --git a/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java index ba6d7e441ef4a..6157d6997641d 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java @@ -895,8 +895,7 @@ private void executeShardBulkOnPrimary( ); listener.onResponse((TransportWriteAction.WritePrimaryResult) result); }), - threadPool, - Names.WRITE + threadPool.executor(Names.WRITE) ); } catch (Exception e) { listener.onFailure(e); diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/bulk/TransportBulkShardOperationsAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/bulk/TransportBulkShardOperationsAction.java index 3e2f5710c1507..d9592c3df4950 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/bulk/TransportBulkShardOperationsAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/bulk/TransportBulkShardOperationsAction.java @@ -66,7 +66,7 @@ public TransportBulkShardOperationsAction( actionFilters, BulkShardOperationsRequest::new, BulkShardOperationsRequest::new, - ExecutorSelector::getWriteExecutorForShard, + ExecutorSelector.getWriteExecutorForShard(threadPool), false, indexingPressure, systemIndices diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java index 2cc6c7d569f44..4330dc3171047 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java @@ -1583,8 +1583,7 @@ public void testDenialErrorMessagesForBulkIngest() throws Exception { mappingUpdater, waitForMappingUpdate, future, - threadPool, - Names.WRITE + threadPool.executor(Names.WRITE) ); TransportReplicationAction.PrimaryResult result = future.get(); From ddd7c5352ec4cfaf07748a540f6e249e89a982a1 Mon Sep 17 00:00:00 2001 From: David Turner Date: Sat, 30 Mar 2024 19:33:22 +0000 Subject: [PATCH 019/264] Remove executor lookups from `TransportInstanceSingleOperationAction` (#106940) Replaces the `String` names (plus associated threadpool) with proper `Executor` instances. Relates #106279 (removes another usage of `SAME`) Relates #106938, #105460, #99787, #97879 etc. --- .../TransportInstanceSingleOperationAction.java | 8 +++++--- .../action/update/TransportUpdateAction.java | 10 +++++----- .../TransportInstanceSingleOperationActionTests.java | 6 ++++-- 3 files changed, 14 insertions(+), 10 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java b/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java index 80b7a95bbe0de..fb62f0a2b1d61 100644 --- a/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java @@ -43,6 +43,7 @@ import org.elasticsearch.transport.TransportService; import java.io.IOException; +import java.util.concurrent.Executor; import static org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.EXCLUDED_DATA_STREAMS_KEY; @@ -81,7 +82,7 @@ protected void doExecute(Task task, Request request, ActionListener li new AsyncSingleAction(request, listener).start(); } - protected abstract String executor(ShardId shardId); + protected abstract Executor executor(ShardId shardId); protected abstract void shardOperation(Request request, ActionListener listener); @@ -259,7 +260,8 @@ public void onTimeout(TimeValue timeout) { } private void handleShardRequest(Request request, TransportChannel channel, Task task) { - threadPool.executor(executor(request.shardId)) - .execute(ActionRunnable.wrap(new ChannelActionListener(channel), l -> shardOperation(request, l))); + executor(request.shardId).execute( + ActionRunnable.wrap(new ChannelActionListener(channel), l -> shardOperation(request, l)) + ); } } diff --git a/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java b/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java index 63ae56bfbd047..b899d68107975 100644 --- a/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java +++ b/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java @@ -51,7 +51,7 @@ import java.io.IOException; import java.util.Map; -import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executor; import static org.elasticsearch.ExceptionsHelper.unwrapCause; import static org.elasticsearch.action.bulk.TransportBulkAction.unwrappingSingleItemBulkResponse; @@ -88,9 +88,9 @@ public TransportUpdateAction( } @Override - protected String executor(ShardId shardId) { + protected Executor executor(ShardId shardId) { final IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex()); - return indexService.getIndexSettings().getIndexMetadata().isSystem() ? Names.SYSTEM_WRITE : Names.WRITE; + return threadPool.executor(indexService.getIndexSettings().getIndexMetadata().isSystem() ? Names.SYSTEM_WRITE : Names.WRITE); } @Override @@ -321,9 +321,9 @@ private void handleUpdateFailureWithRetry( request.id() ); - final ExecutorService executor; + final Executor executor; try { - executor = threadPool.executor(executor(request.getShardId())); + executor = executor(request.getShardId()); } catch (Exception e) { // might fail if shard no longer exists locally, in which case we cannot retry e.addSuppressed(versionConflictEngineException); diff --git a/server/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java b/server/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java index 4c29b10575fae..b071e26a1fecd 100644 --- a/server/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.shard.ShardId; @@ -52,6 +53,7 @@ import java.util.HashSet; import java.util.Map; import java.util.concurrent.ExecutionException; +import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; @@ -114,8 +116,8 @@ public Map getResults() { } @Override - protected String executor(ShardId shardId) { - return ThreadPool.Names.SAME; + protected Executor executor(ShardId shardId) { + return EsExecutors.DIRECT_EXECUTOR_SERVICE; } @Override From b676d380503e3336160927a35ee19d1f4efe96e4 Mon Sep 17 00:00:00 2001 From: Salvatore Campagna <93581129+salvatore-campagna@users.noreply.github.com> Date: Sun, 31 Mar 2024 00:17:20 +0100 Subject: [PATCH 020/264] Update mixed cluster test skip version for downsampling (#106942) All nodes on the mixed cluster need to be at least on version 8.10 since PR #97557 introduced execution of downsampling tasks using the persisten task framework which is incompatible with how execution was coordinated before. --- x-pack/plugin/downsample/qa/mixed-cluster/build.gradle | 2 +- .../resources/rest-api-spec/test/downsample/10_basic.yml | 7 +++++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/downsample/qa/mixed-cluster/build.gradle b/x-pack/plugin/downsample/qa/mixed-cluster/build.gradle index 2449991a8e1e0..61aa2927e46de 100644 --- a/x-pack/plugin/downsample/qa/mixed-cluster/build.gradle +++ b/x-pack/plugin/downsample/qa/mixed-cluster/build.gradle @@ -26,7 +26,7 @@ restResources { } def supportedVersion = bwcVersion -> { - return bwcVersion.onOrAfter("8.8.0"); + return bwcVersion.onOrAfter("8.10.0"); } BuildParams.bwcVersions.withWireCompatible(supportedVersion) { bwcVersion, baseName -> diff --git a/x-pack/plugin/downsample/qa/mixed-cluster/src/yamlRestTest/resources/rest-api-spec/test/downsample/10_basic.yml b/x-pack/plugin/downsample/qa/mixed-cluster/src/yamlRestTest/resources/rest-api-spec/test/downsample/10_basic.yml index 95c69efa5b36d..2362f21d77d86 100644 --- a/x-pack/plugin/downsample/qa/mixed-cluster/src/yamlRestTest/resources/rest-api-spec/test/downsample/10_basic.yml +++ b/x-pack/plugin/downsample/qa/mixed-cluster/src/yamlRestTest/resources/rest-api-spec/test/downsample/10_basic.yml @@ -1,7 +1,7 @@ setup: - skip: - version: " - 8.4.99" - reason: "rollup renamed to downsample in 8.5.0" + version: " - 8.9.99" + reason: "Downsampling executed using persistent task framework from version 8.10" - do: indices.create: @@ -87,6 +87,9 @@ setup: --- "Downsample index": + - skip: + version: " - 8.9.99" + reason: "Downsampling executed using persistent task framework from version 8.10" - do: indices.downsample: From 60d5083b5f1c632f01ddc0fc7271eebd52666f6e Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Sun, 31 Mar 2024 21:49:30 +0200 Subject: [PATCH 021/264] Remove some more ActionType subclasses (#106947) Cleaning up a couple more of these from the server module. --- .../datastreams/TSDBIndexingIT.java | 6 +- .../lifecycle/DataStreamLifecycleService.java | 4 +- .../system/indices/SystemIndicesQA.java | 2 +- .../action/IndicesRequestIT.java | 24 +- .../admin/cluster/node/tasks/TasksIT.java | 28 +-- .../diskusage/IndexDiskUsageAnalyzerIT.java | 41 ++-- .../indices/recovery/IndexRecoveryIT.java | 4 +- .../elasticsearch/action/ActionModule.java | 24 +- .../node/tasks/cancel/CancelTasksAction.java | 25 --- .../cancel/CancelTasksRequestBuilder.java | 2 +- .../cancel/TransportCancelTasksAction.java | 7 +- .../cluster/node/tasks/get/GetTaskAction.java | 25 --- .../node/tasks/get/GetTaskRequestBuilder.java | 2 +- .../tasks/get/TransportGetTaskAction.java | 8 +- .../cleanup/CleanupRepositoryAction.java | 20 -- .../CleanupRepositoryRequestBuilder.java | 2 +- .../TransportCleanupRepositoryAction.java | 4 +- .../cache/clear/ClearIndicesCacheAction.java | 22 -- .../ClearIndicesCacheRequestBuilder.java | 2 +- .../TransportClearIndicesCacheAction.java | 4 +- .../indices/create/CreateIndexAction.java | 22 -- .../create/CreateIndexRequestBuilder.java | 4 +- .../create/TransportCreateIndexAction.java | 4 +- .../AnalyzeIndexDiskUsageAction.java | 20 -- .../AnalyzeIndexDiskUsageRequest.java | 2 +- .../TransportAnalyzeIndexDiskUsageAction.java | 4 +- .../indices/readonly/AddIndexBlockAction.java | 21 -- .../readonly/AddIndexBlockRequestBuilder.java | 2 +- .../TransportAddIndexBlockAction.java | 4 +- .../TransportVerifyShardIndexBlockAction.java | 5 +- .../elasticsearch/action/bulk/BulkAction.java | 22 -- .../action/bulk/BulkRequestBuilder.java | 2 +- .../action/bulk/TransportBulkAction.java | 4 +- .../action/bulk/TransportShardBulkAction.java | 2 +- .../action/search/SearchTransportService.java | 4 +- .../internal/support/AbstractClient.java | 38 ++-- .../action/RestCancellableNodeClient.java | 2 +- .../RestAnalyzeIndexDiskUsageAction.java | 4 +- .../tasks/TaskResultsService.java | 2 +- .../node/tasks/CancellableTasksTests.java | 4 +- .../cluster/node/tasks/TestTaskPlugin.java | 2 +- .../bulk/TransportBulkActionIngestTests.java | 8 +- .../AbstractClientHeadersTestCase.java | 17 +- .../RestCancellableNodeClientTests.java | 4 +- .../snapshots/SnapshotResiliencyTests.java | 9 +- .../xpack/core/ilm/ReadOnlyStep.java | 4 +- .../privilege/ClusterPrivilegeResolver.java | 7 +- .../authz/privilege/IndexPrivilege.java | 4 +- .../core/security/user/InternalUsers.java | 6 +- .../notifications/AbstractAuditorTests.java | 4 +- .../authz/permission/LimitedRoleTests.java | 28 +-- .../authz/privilege/PrivilegeTests.java | 6 +- .../authz/store/ReservedRolesStoreTests.java | 209 +++++++++++++----- .../security/user/InternalUsersTests.java | 38 ++-- .../xpack/enrich/BasicEnrichTests.java | 4 +- .../enrich/EnrichPolicyExecutorTests.java | 4 +- .../xpack/enrich/EnrichPolicyRunnerTests.java | 4 +- .../action/CrossClustersCancellationIT.java | 4 +- .../xpack/esql/action/EsqlActionTaskIT.java | 6 +- .../ilm/history/ILMHistoryStoreTests.java | 12 +- .../integration/ModelSnapshotRetentionIT.java | 4 +- .../ml/integration/ModelSnapshotSearchIT.java | 4 +- .../ml/integration/AnnotationIndexIT.java | 4 +- .../action/TransportDeleteFilterAction.java | 36 +-- .../ml/action/TransportDeleteJobAction.java | 4 +- .../TransportPostCalendarEventsAction.java | 4 +- .../ml/action/TransportResetJobAction.java | 4 +- .../TransportRevertModelSnapshotAction.java | 4 +- .../TransportUpdateModelSnapshotAction.java | 26 ++- .../xpack/ml/dataframe/DestinationIndex.java | 4 +- .../persistence/TrainedModelProvider.java | 4 +- .../job/persistence/JobResultsProvider.java | 4 +- .../persistence/ResultsPersisterService.java | 4 +- ...ransportDeleteTrainedModelActionTests.java | 8 +- .../annotations/AnnotationPersisterTests.java | 22 +- .../xpack/ml/datafeed/DatafeedJobTests.java | 8 +- .../ml/dataframe/DestinationIndexTests.java | 4 +- .../TrainedModelProviderTests.java | 4 +- .../persistence/JobResultsPersisterTests.java | 18 +- .../ResultsPersisterServiceTests.java | 22 +- .../profiling/ProfilingIndexManagerTests.java | 8 +- .../action/TransportPutRollupJobAction.java | 4 +- .../xpack/rollup/job/RollupJobTask.java | 4 +- .../action/PutJobStateMachineTests.java | 10 +- .../BlobStoreCacheMaintenanceService.java | 4 +- .../authc/apikey/ApiKeySingleNodeTests.java | 4 +- .../security/authz/IndexAliasesTests.java | 4 +- .../security/authz/WriteActionsTests.java | 20 +- .../xpack/security/authc/ApiKeyService.java | 3 +- .../IndexServiceAccountTokenStore.java | 3 +- .../security/authz/AuthorizationUtils.java | 2 +- .../xpack/security/authz/RBACEngine.java | 4 +- .../security/profile/ProfileService.java | 3 +- ...sportSamlInvalidateSessionActionTests.java | 4 +- .../security/authc/ApiKeyServiceTests.java | 6 +- .../service/ElasticServiceAccountsTests.java | 32 +-- .../authz/AuthorizationServiceTests.java | 39 ++-- .../authz/AuthorizationUtilsTests.java | 2 +- .../authz/store/CompositeRolesStoreTests.java | 5 +- .../security/profile/ProfileServiceTests.java | 4 +- .../history/SnapshotHistoryStoreTests.java | 4 +- .../xpack/spatial/SpatialDiskUsageIT.java | 4 +- .../transform/persistence/TransformIndex.java | 4 +- .../transforms/ClientTransformIndexer.java | 4 +- .../persistence/TransformIndexTests.java | 4 +- .../execution/TriggeredWatchStoreTests.java | 4 +- 106 files changed, 569 insertions(+), 620 deletions(-) delete mode 100644 server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksAction.java delete mode 100644 server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/GetTaskAction.java delete mode 100644 server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/cleanup/CleanupRepositoryAction.java delete mode 100644 server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheAction.java delete mode 100644 server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexAction.java delete mode 100644 server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/AnalyzeIndexDiskUsageAction.java delete mode 100644 server/src/main/java/org/elasticsearch/action/admin/indices/readonly/AddIndexBlockAction.java delete mode 100644 server/src/main/java/org/elasticsearch/action/bulk/BulkAction.java diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java index 772cc0f98d757..24c373df72144 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java @@ -8,8 +8,8 @@ package org.elasticsearch.datastreams; import org.elasticsearch.action.DocWriteRequest; -import org.elasticsearch.action.admin.indices.diskusage.AnalyzeIndexDiskUsageAction; import org.elasticsearch.action.admin.indices.diskusage.AnalyzeIndexDiskUsageRequest; +import org.elasticsearch.action.admin.indices.diskusage.TransportAnalyzeIndexDiskUsageAction; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; @@ -468,7 +468,7 @@ public void testTrimId() throws Exception { // Pre check whether _id stored field uses diskspace: var diskUsageResponse = client().execute( - AnalyzeIndexDiskUsageAction.INSTANCE, + TransportAnalyzeIndexDiskUsageAction.TYPE, new AnalyzeIndexDiskUsageRequest(new String[] { dataStreamName }, AnalyzeIndexDiskUsageRequest.DEFAULT_INDICES_OPTIONS, true) ).actionGet(); var map = XContentHelper.convertToMap(XContentType.JSON.xContent(), Strings.toString(diskUsageResponse), false); @@ -510,7 +510,7 @@ public void testTrimId() throws Exception { // Check the _id stored field uses no disk space: diskUsageResponse = client().execute( - AnalyzeIndexDiskUsageAction.INSTANCE, + TransportAnalyzeIndexDiskUsageAction.TYPE, new AnalyzeIndexDiskUsageRequest(new String[] { dataStreamName }, AnalyzeIndexDiskUsageRequest.DEFAULT_INDICES_OPTIONS, true) ).actionGet(); map = XContentHelper.convertToMap(XContentType.JSON.xContent(), Strings.toString(diskUsageResponse), false); diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java index d1dd008e27977..52753f00a39c1 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java @@ -19,9 +19,9 @@ import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeAction; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; -import org.elasticsearch.action.admin.indices.readonly.AddIndexBlockAction; import org.elasticsearch.action.admin.indices.readonly.AddIndexBlockRequest; import org.elasticsearch.action.admin.indices.readonly.AddIndexBlockResponse; +import org.elasticsearch.action.admin.indices.readonly.TransportAddIndexBlockAction; import org.elasticsearch.action.admin.indices.rollover.RolloverAction; import org.elasticsearch.action.admin.indices.rollover.RolloverConfiguration; import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; @@ -738,7 +738,7 @@ private void addIndexBlockOnce(String indexName) { transportActionsDeduplicator.executeOnce( addIndexBlockRequest, new ErrorRecordingActionListener( - AddIndexBlockAction.NAME, + TransportAddIndexBlockAction.TYPE.name(), indexName, errorStore, Strings.format("Data stream lifecycle service encountered an error trying to mark index [%s] as readonly", indexName), diff --git a/qa/system-indices/src/main/java/org/elasticsearch/system/indices/SystemIndicesQA.java b/qa/system-indices/src/main/java/org/elasticsearch/system/indices/SystemIndicesQA.java index b86aefb12a956..9fc256e79873e 100644 --- a/qa/system-indices/src/main/java/org/elasticsearch/system/indices/SystemIndicesQA.java +++ b/qa/system-indices/src/main/java/org/elasticsearch/system/indices/SystemIndicesQA.java @@ -40,7 +40,7 @@ import java.util.function.Predicate; import java.util.function.Supplier; -import static org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskAction.TASKS_ORIGIN; +import static org.elasticsearch.action.admin.cluster.node.tasks.get.TransportGetTaskAction.TASKS_ORIGIN; import static org.elasticsearch.index.mapper.MapperService.SINGLE_MAPPING_NAME; import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.rest.RestRequest.Method.PUT; diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java index 3b34cedcd3635..920677e8c4b4a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java @@ -10,8 +10,8 @@ import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; -import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheAction; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest; +import org.elasticsearch.action.admin.indices.cache.clear.TransportClearIndicesCacheAction; import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; import org.elasticsearch.action.admin.indices.close.TransportCloseIndexAction; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; @@ -42,8 +42,8 @@ import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequest; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryAction; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.explain.ExplainRequest; import org.elasticsearch.action.explain.TransportExplainAction; @@ -206,7 +206,7 @@ public void testAnalyze() { } public void testIndex() { - String[] indexShardActions = new String[] { BulkAction.NAME + "[s][p]", BulkAction.NAME + "[s][r]" }; + String[] indexShardActions = new String[] { TransportBulkAction.NAME + "[s][p]", TransportBulkAction.NAME + "[s][r]" }; interceptTransportActions(indexShardActions); IndexRequest indexRequest = new IndexRequest(randomIndexOrAlias()).id("id").source(Requests.INDEX_CONTENT_TYPE, "field", "value"); @@ -217,7 +217,7 @@ public void testIndex() { } public void testDelete() { - String[] deleteShardActions = new String[] { BulkAction.NAME + "[s][p]", BulkAction.NAME + "[s][r]" }; + String[] deleteShardActions = new String[] { TransportBulkAction.NAME + "[s][p]", TransportBulkAction.NAME + "[s][r]" }; interceptTransportActions(deleteShardActions); DeleteRequest deleteRequest = new DeleteRequest(randomIndexOrAlias()).id("id"); @@ -231,8 +231,8 @@ public void testUpdate() { // update action goes to the primary, index op gets executed locally, then replicated String[] updateShardActions = new String[] { TransportUpdateAction.NAME + "[s]", - BulkAction.NAME + "[s][p]", - BulkAction.NAME + "[s][r]" }; + TransportBulkAction.NAME + "[s][p]", + TransportBulkAction.NAME + "[s][r]" }; interceptTransportActions(updateShardActions); String indexOrAlias = randomIndexOrAlias(); @@ -249,8 +249,8 @@ public void testUpdateUpsert() { // update action goes to the primary, index op gets executed locally, then replicated String[] updateShardActions = new String[] { TransportUpdateAction.NAME + "[s]", - BulkAction.NAME + "[s][p]", - BulkAction.NAME + "[s][r]" }; + TransportBulkAction.NAME + "[s][p]", + TransportBulkAction.NAME + "[s][r]" }; interceptTransportActions(updateShardActions); String indexOrAlias = randomIndexOrAlias(); @@ -267,8 +267,8 @@ public void testUpdateDelete() { // update action goes to the primary, delete op gets executed locally, then replicated String[] updateShardActions = new String[] { TransportUpdateAction.NAME + "[s]", - BulkAction.NAME + "[s][p]", - BulkAction.NAME + "[s][r]" }; + TransportBulkAction.NAME + "[s][p]", + TransportBulkAction.NAME + "[s][r]" }; interceptTransportActions(updateShardActions); String indexOrAlias = randomIndexOrAlias(); @@ -284,7 +284,7 @@ public void testUpdateDelete() { } public void testBulk() { - String[] bulkShardActions = new String[] { BulkAction.NAME + "[s][p]", BulkAction.NAME + "[s][r]" }; + String[] bulkShardActions = new String[] { TransportBulkAction.NAME + "[s][p]", TransportBulkAction.NAME + "[s][r]" }; interceptTransportActions(bulkShardActions); List indicesOrAliases = new ArrayList<>(); @@ -427,7 +427,7 @@ public void testRefresh() { } public void testClearCache() { - String clearCacheAction = ClearIndicesCacheAction.NAME + "[n]"; + String clearCacheAction = TransportClearIndicesCacheAction.TYPE.name() + "[n]"; interceptTransportActions(clearCacheAction); ClearIndicesCacheRequest clearIndicesCacheRequest = new ClearIndicesCacheRequest(randomIndicesOrAliases()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java index 0766b732099c4..8011be1d69a04 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java @@ -23,7 +23,7 @@ import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeAction; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryAction; -import org.elasticsearch.action.bulk.BulkAction; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.index.TransportIndexAction; import org.elasticsearch.action.search.SearchTransportService; import org.elasticsearch.action.search.TransportSearchAction; @@ -297,10 +297,10 @@ public void testTransportBroadcastReplicationTasks() { } public void testTransportBulkTasks() { - registerTaskManagerListeners(BulkAction.NAME); // main task - registerTaskManagerListeners(BulkAction.NAME + "[s]"); // shard task - registerTaskManagerListeners(BulkAction.NAME + "[s][p]"); // shard task on primary - registerTaskManagerListeners(BulkAction.NAME + "[s][r]"); // shard task on replica + registerTaskManagerListeners(TransportBulkAction.NAME); // main task + registerTaskManagerListeners(TransportBulkAction.NAME + "[s]"); // shard task + registerTaskManagerListeners(TransportBulkAction.NAME + "[s][p]"); // shard task on primary + registerTaskManagerListeners(TransportBulkAction.NAME + "[s][r]"); // shard task on replica createIndex("test"); ensureGreen("test"); // Make sure all shards are allocated to catch replication tasks // ensures the mapping is available on all nodes so we won't retry the request (in case replicas don't have the right mapping). @@ -308,13 +308,13 @@ public void testTransportBulkTasks() { client().prepareBulk().add(prepareIndex("test").setId("test_id").setSource("{\"foo\": \"bar\"}", XContentType.JSON)).get(); // the bulk operation should produce one main task - List topTask = findEvents(BulkAction.NAME, Tuple::v1); + List topTask = findEvents(TransportBulkAction.NAME, Tuple::v1); assertEquals(1, topTask.size()); assertEquals("requests[1], indices[test]", topTask.get(0).description()); // we should also get 1 or 2 [s] operation with main operation as a parent // in case the primary is located on the coordinating node we will have 1 operation, otherwise - 2 - List shardTasks = findEvents(BulkAction.NAME + "[s]", Tuple::v1); + List shardTasks = findEvents(TransportBulkAction.NAME + "[s]", Tuple::v1); assertThat(shardTasks.size(), allOf(lessThanOrEqualTo(2), greaterThanOrEqualTo(1))); // Select the effective shard task @@ -323,30 +323,30 @@ public void testTransportBulkTasks() { // we have only one task - it's going to be the parent task for all [s][p] and [s][r] tasks shardTask = shardTasks.get(0); // and it should have the main task as a parent - assertParentTask(shardTask, findEvents(BulkAction.NAME, Tuple::v1).get(0)); + assertParentTask(shardTask, findEvents(TransportBulkAction.NAME, Tuple::v1).get(0)); } else { if (shardTasks.get(0).parentTaskId().equals(shardTasks.get(1).taskId())) { // task 1 is the parent of task 0, that means that task 0 will control [s][p] and [s][r] tasks shardTask = shardTasks.get(0); // in turn the parent of the task 1 should be the main task - assertParentTask(shardTasks.get(1), findEvents(BulkAction.NAME, Tuple::v1).get(0)); + assertParentTask(shardTasks.get(1), findEvents(TransportBulkAction.NAME, Tuple::v1).get(0)); } else { // otherwise task 1 will control [s][p] and [s][r] tasks shardTask = shardTasks.get(1); // in turn the parent of the task 0 should be the main task - assertParentTask(shardTasks.get(0), findEvents(BulkAction.NAME, Tuple::v1).get(0)); + assertParentTask(shardTasks.get(0), findEvents(TransportBulkAction.NAME, Tuple::v1).get(0)); } } assertThat(shardTask.description(), startsWith("requests[1], index[test][")); // we should also get one [s][p] operation with shard operation as a parent - assertEquals(1, numberOfEvents(BulkAction.NAME + "[s][p]", Tuple::v1)); - assertParentTask(findEvents(BulkAction.NAME + "[s][p]", Tuple::v1), shardTask); + assertEquals(1, numberOfEvents(TransportBulkAction.NAME + "[s][p]", Tuple::v1)); + assertParentTask(findEvents(TransportBulkAction.NAME + "[s][p]", Tuple::v1), shardTask); // we should get as many [s][r] operations as we have replica shards // they all should have the same shard task as a parent - assertEquals(getNumShards("test").numReplicas, numberOfEvents(BulkAction.NAME + "[s][r]", Tuple::v1)); - assertParentTask(findEvents(BulkAction.NAME + "[s][r]", Tuple::v1), shardTask); + assertEquals(getNumShards("test").numReplicas, numberOfEvents(TransportBulkAction.NAME + "[s][r]", Tuple::v1)); + assertParentTask(findEvents(TransportBulkAction.NAME + "[s][r]", Tuple::v1), shardTask); } public void testSearchTaskDescriptions() { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerIT.java index a0d437d8baa73..235d1592cf7c7 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerIT.java @@ -129,7 +129,7 @@ public void testSimple() throws Exception { client().admin().indices().prepareForceMerge(index).setMaxNumSegments(1).get(); PlainActionFuture future = new PlainActionFuture<>(); client().execute( - AnalyzeIndexDiskUsageAction.INSTANCE, + TransportAnalyzeIndexDiskUsageAction.TYPE, new AnalyzeIndexDiskUsageRequest(new String[] { index }, AnalyzeIndexDiskUsageRequest.DEFAULT_INDICES_OPTIONS, true), future ); @@ -176,7 +176,7 @@ public void testFailOnFlush() throws Exception { ); failOnFlushShards.addAll(failedShards); AnalyzeIndexDiskUsageResponse resp = client().execute( - AnalyzeIndexDiskUsageAction.INSTANCE, + TransportAnalyzeIndexDiskUsageAction.TYPE, new AnalyzeIndexDiskUsageRequest(new String[] { indexName }, AnalyzeIndexDiskUsageRequest.DEFAULT_INDICES_OPTIONS, true) ).actionGet(); assertThat(resp.getTotalShards(), equalTo(numberOfShards)); @@ -208,7 +208,7 @@ public void testManyShards() throws Exception { } AnalyzeIndexDiskUsageResponse resp = client().execute( - AnalyzeIndexDiskUsageAction.INSTANCE, + TransportAnalyzeIndexDiskUsageAction.TYPE, new AnalyzeIndexDiskUsageRequest(new String[] { "index_*" }, AnalyzeIndexDiskUsageRequest.DEFAULT_INDICES_OPTIONS, true) ).actionGet(); assertThat(Arrays.toString(resp.getShardFailures()), resp.getShardFailures(), emptyArray()); @@ -249,25 +249,28 @@ public void testFailingTargetShards() throws Exception { try { for (String node : internalCluster().getNodeNames()) { MockTransportService.getInstance(node) - .addRequestHandlingBehavior(AnalyzeIndexDiskUsageAction.NAME + "[s]", (handler, request, channel, task) -> { - AnalyzeDiskUsageShardRequest shardRequest = (AnalyzeDiskUsageShardRequest) request; - IndicesService indicesService = internalCluster().getInstance(IndicesService.class, node); - logger.info("--> handling shard request {} on node {}", shardRequest.shardId(), node); - ShardId shardId = shardRequest.shardId(); - if (failingShards.contains(shardId)) { - IndexShard indexShard = indicesService.getShardOrNull(shardId); - assertNotNull("No shard found for shard " + shardId, indexShard); - logger.info("--> failing shard {} on node {}", shardRequest.shardId(), node); - indexShard.close("test", randomBoolean()); - failedShards.incrementAndGet(); - } else { - successfulShards.incrementAndGet(); + .addRequestHandlingBehavior( + TransportAnalyzeIndexDiskUsageAction.TYPE.name() + "[s]", + (handler, request, channel, task) -> { + AnalyzeDiskUsageShardRequest shardRequest = (AnalyzeDiskUsageShardRequest) request; + IndicesService indicesService = internalCluster().getInstance(IndicesService.class, node); + logger.info("--> handling shard request {} on node {}", shardRequest.shardId(), node); + ShardId shardId = shardRequest.shardId(); + if (failingShards.contains(shardId)) { + IndexShard indexShard = indicesService.getShardOrNull(shardId); + assertNotNull("No shard found for shard " + shardId, indexShard); + logger.info("--> failing shard {} on node {}", shardRequest.shardId(), node); + indexShard.close("test", randomBoolean()); + failedShards.incrementAndGet(); + } else { + successfulShards.incrementAndGet(); + } + handler.messageReceived(request, channel, task); } - handler.messageReceived(request, channel, task); - }); + ); } AnalyzeIndexDiskUsageResponse resp = client().execute( - AnalyzeIndexDiskUsageAction.INSTANCE, + TransportAnalyzeIndexDiskUsageAction.TYPE, new AnalyzeIndexDiskUsageRequest(new String[] { indexName }, AnalyzeIndexDiskUsageRequest.DEFAULT_INDICES_OPTIONS, true) ).actionGet(); assertThat(failedShards.get(), equalTo(failingShards.size())); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java index 40982516725b7..23963fe50aa44 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java @@ -35,7 +35,7 @@ import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.admin.indices.stats.ShardStats; -import org.elasticsearch.action.bulk.BulkAction; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.action.support.ActiveShardCount; @@ -1936,7 +1936,7 @@ public void accept(long globalCheckpoint, Exception e) { // delay the delivery of the replica write until the end of the test so the replica never becomes in-sync replicaNodeTransportService.addRequestHandlingBehavior( - BulkAction.NAME + "[s][r]", + TransportBulkAction.NAME + "[s][r]", (handler, request, channel, task) -> recoveryCompleteListener.addListener( assertNoFailureListener(ignored -> handler.messageReceived(request, channel, task)) ) diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index a8f26ab966646..7e03b495438d8 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -36,15 +36,12 @@ import org.elasticsearch.action.admin.cluster.node.shutdown.TransportPrevalidateNodeRemovalAction; import org.elasticsearch.action.admin.cluster.node.shutdown.TransportPrevalidateShardPathAction; import org.elasticsearch.action.admin.cluster.node.stats.TransportNodesStatsAction; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksAction; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.TransportCancelTasksAction; -import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskAction; import org.elasticsearch.action.admin.cluster.node.tasks.get.TransportGetTaskAction; import org.elasticsearch.action.admin.cluster.node.tasks.list.TransportListTasksAction; import org.elasticsearch.action.admin.cluster.node.usage.TransportNodesUsageAction; import org.elasticsearch.action.admin.cluster.remote.RemoteClusterNodesAction; import org.elasticsearch.action.admin.cluster.remote.TransportRemoteInfoAction; -import org.elasticsearch.action.admin.cluster.repositories.cleanup.CleanupRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.cleanup.TransportCleanupRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.delete.TransportDeleteRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesAction; @@ -97,19 +94,16 @@ import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; import org.elasticsearch.action.admin.indices.analyze.TransportAnalyzeAction; import org.elasticsearch.action.admin.indices.analyze.TransportReloadAnalyzersAction; -import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheAction; import org.elasticsearch.action.admin.indices.cache.clear.TransportClearIndicesCacheAction; import org.elasticsearch.action.admin.indices.close.TransportCloseIndexAction; import org.elasticsearch.action.admin.indices.close.TransportVerifyShardBeforeCloseAction; import org.elasticsearch.action.admin.indices.create.AutoCreateAction; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.admin.indices.dangling.delete.TransportDeleteDanglingIndexAction; import org.elasticsearch.action.admin.indices.dangling.find.TransportFindDanglingIndexAction; import org.elasticsearch.action.admin.indices.dangling.import_index.TransportImportDanglingIndexAction; import org.elasticsearch.action.admin.indices.dangling.list.TransportListDanglingIndicesAction; import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; -import org.elasticsearch.action.admin.indices.diskusage.AnalyzeIndexDiskUsageAction; import org.elasticsearch.action.admin.indices.diskusage.TransportAnalyzeIndexDiskUsageAction; import org.elasticsearch.action.admin.indices.flush.FlushAction; import org.elasticsearch.action.admin.indices.flush.TransportFlushAction; @@ -128,7 +122,6 @@ import org.elasticsearch.action.admin.indices.mapping.put.TransportPutMappingAction; import org.elasticsearch.action.admin.indices.open.OpenIndexAction; import org.elasticsearch.action.admin.indices.open.TransportOpenIndexAction; -import org.elasticsearch.action.admin.indices.readonly.AddIndexBlockAction; import org.elasticsearch.action.admin.indices.readonly.TransportAddIndexBlockAction; import org.elasticsearch.action.admin.indices.readonly.TransportVerifyShardIndexBlockAction; import org.elasticsearch.action.admin.indices.recovery.RecoveryAction; @@ -172,7 +165,6 @@ import org.elasticsearch.action.admin.indices.template.put.TransportPutIndexTemplateAction; import org.elasticsearch.action.admin.indices.validate.query.TransportValidateQueryAction; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryAction; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.SimulateBulkAction; import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.bulk.TransportShardBulkAction; @@ -637,8 +629,8 @@ public void reg actions.register(TransportNodesUsageAction.TYPE, TransportNodesUsageAction.class); actions.register(TransportNodesHotThreadsAction.TYPE, TransportNodesHotThreadsAction.class); actions.register(TransportListTasksAction.TYPE, TransportListTasksAction.class); - actions.register(GetTaskAction.INSTANCE, TransportGetTaskAction.class); - actions.register(CancelTasksAction.INSTANCE, TransportCancelTasksAction.class); + actions.register(TransportGetTaskAction.TYPE, TransportGetTaskAction.class); + actions.register(TransportCancelTasksAction.TYPE, TransportCancelTasksAction.class); actions.register(GetHealthAction.INSTANCE, GetHealthAction.LocalAction.class); actions.register(PrevalidateNodeRemovalAction.INSTANCE, TransportPrevalidateNodeRemovalAction.class); actions.register(HealthApiStatsAction.INSTANCE, HealthApiStatsTransportAction.class); @@ -662,7 +654,7 @@ public void reg actions.register(GetRepositoriesAction.INSTANCE, TransportGetRepositoriesAction.class); actions.register(TransportDeleteRepositoryAction.TYPE, TransportDeleteRepositoryAction.class); actions.register(VerifyRepositoryAction.INSTANCE, TransportVerifyRepositoryAction.class); - actions.register(CleanupRepositoryAction.INSTANCE, TransportCleanupRepositoryAction.class); + actions.register(TransportCleanupRepositoryAction.TYPE, TransportCleanupRepositoryAction.class); actions.register(GetSnapshotsAction.INSTANCE, TransportGetSnapshotsAction.class); actions.register(TransportDeleteSnapshotAction.TYPE, TransportDeleteSnapshotAction.class); actions.register(CreateSnapshotAction.INSTANCE, TransportCreateSnapshotAction.class); @@ -678,7 +670,7 @@ public void reg actions.register(IndicesStatsAction.INSTANCE, TransportIndicesStatsAction.class); actions.register(IndicesSegmentsAction.INSTANCE, TransportIndicesSegmentsAction.class); actions.register(TransportIndicesShardStoresAction.TYPE, TransportIndicesShardStoresAction.class); - actions.register(CreateIndexAction.INSTANCE, TransportCreateIndexAction.class); + actions.register(TransportCreateIndexAction.TYPE, TransportCreateIndexAction.class); actions.register(ResizeAction.INSTANCE, TransportResizeAction.class); actions.register(RolloverAction.INSTANCE, TransportRolloverAction.class); actions.register(LazyRolloverAction.INSTANCE, LazyRolloverAction.TransportLazyRolloverAction.class); @@ -686,7 +678,7 @@ public void reg actions.register(GetIndexAction.INSTANCE, TransportGetIndexAction.class); actions.register(OpenIndexAction.INSTANCE, TransportOpenIndexAction.class); actions.register(TransportCloseIndexAction.TYPE, TransportCloseIndexAction.class); - actions.register(AddIndexBlockAction.INSTANCE, TransportAddIndexBlockAction.class); + actions.register(TransportAddIndexBlockAction.TYPE, TransportAddIndexBlockAction.class); actions.register(GetMappingsAction.INSTANCE, TransportGetMappingsAction.class); actions.register(GetFieldMappingsAction.INSTANCE, TransportGetFieldMappingsAction.class); actions.register(TransportGetFieldMappingsIndexAction.TYPE, TransportGetFieldMappingsIndexAction.class); @@ -711,7 +703,7 @@ public void reg actions.register(RefreshAction.INSTANCE, TransportRefreshAction.class); actions.register(FlushAction.INSTANCE, TransportFlushAction.class); actions.register(ForceMergeAction.INSTANCE, TransportForceMergeAction.class); - actions.register(ClearIndicesCacheAction.INSTANCE, TransportClearIndicesCacheAction.class); + actions.register(TransportClearIndicesCacheAction.TYPE, TransportClearIndicesCacheAction.class); actions.register(GetAliasesAction.INSTANCE, TransportGetAliasesAction.class); actions.register(GetSettingsAction.INSTANCE, TransportGetSettingsAction.class); @@ -724,7 +716,7 @@ public void reg actions.register(TransportUpdateAction.TYPE, TransportUpdateAction.class); actions.register(TransportMultiGetAction.TYPE, TransportMultiGetAction.class); actions.register(TransportShardMultiGetAction.TYPE, TransportShardMultiGetAction.class); - actions.register(BulkAction.INSTANCE, TransportBulkAction.class); + actions.register(TransportBulkAction.TYPE, TransportBulkAction.class); actions.register(SimulateBulkAction.INSTANCE, TransportSimulateBulkAction.class); actions.register(TransportShardBulkAction.TYPE, TransportShardBulkAction.class); actions.register(TransportSearchAction.TYPE, TransportSearchAction.class); @@ -740,7 +732,7 @@ public void reg actions.register(AutoCreateAction.INSTANCE, AutoCreateAction.TransportAction.class); actions.register(ResolveIndexAction.INSTANCE, ResolveIndexAction.TransportAction.class); actions.register(TransportResolveClusterAction.TYPE, TransportResolveClusterAction.class); - actions.register(AnalyzeIndexDiskUsageAction.INSTANCE, TransportAnalyzeIndexDiskUsageAction.class); + actions.register(TransportAnalyzeIndexDiskUsageAction.TYPE, TransportAnalyzeIndexDiskUsageAction.class); actions.register(FieldUsageStatsAction.INSTANCE, TransportFieldUsageAction.class); actions.register(MasterHistoryAction.INSTANCE, MasterHistoryAction.TransportAction.class); actions.register(CoordinationDiagnosticsAction.INSTANCE, CoordinationDiagnosticsAction.TransportAction.class); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksAction.java deleted file mode 100644 index 50fea2093da49..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksAction.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.cluster.node.tasks.cancel; - -import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; - -/** - * ActionType for cancelling running tasks - */ -public class CancelTasksAction extends ActionType { - - public static final CancelTasksAction INSTANCE = new CancelTasksAction(); - public static final String NAME = "cluster:admin/tasks/cancel"; - - private CancelTasksAction() { - super(NAME); - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksRequestBuilder.java index 5fdd50e0c9e66..39c19e225d175 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksRequestBuilder.java @@ -18,7 +18,7 @@ public class CancelTasksRequestBuilder extends TasksRequestBuilder { public CancelTasksRequestBuilder(ElasticsearchClient client) { - super(client, CancelTasksAction.INSTANCE, new CancelTasksRequest()); + super(client, TransportCancelTasksAction.TYPE, new CancelTasksRequest()); } public CancelTasksRequestBuilder waitForCompletion(boolean waitForCompletion) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java index 1f3271be79797..d2e79bc63daf8 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.TaskOperationFailure; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; @@ -34,10 +35,14 @@ */ public class TransportCancelTasksAction extends TransportTasksAction { + public static final String NAME = "cluster:admin/tasks/cancel"; + + public static final ActionType TYPE = new ActionType<>(NAME); + @Inject public TransportCancelTasksAction(ClusterService clusterService, TransportService transportService, ActionFilters actionFilters) { super( - CancelTasksAction.NAME, + NAME, clusterService, transportService, actionFilters, diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/GetTaskAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/GetTaskAction.java deleted file mode 100644 index 21be31462ef0d..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/GetTaskAction.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.cluster.node.tasks.get; - -import org.elasticsearch.action.ActionType; - -/** - * ActionType for retrieving a list of currently running tasks - */ -public class GetTaskAction extends ActionType { - public static final String TASKS_ORIGIN = "tasks"; - - public static final GetTaskAction INSTANCE = new GetTaskAction(); - public static final String NAME = "cluster:monitor/task/get"; - - private GetTaskAction() { - super(NAME); - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/GetTaskRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/GetTaskRequestBuilder.java index 49eacd0996111..6d8d4eceed3f5 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/GetTaskRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/GetTaskRequestBuilder.java @@ -18,7 +18,7 @@ */ public class GetTaskRequestBuilder extends ActionRequestBuilder { public GetTaskRequestBuilder(ElasticsearchClient client) { - super(client, GetTaskAction.INSTANCE, new GetTaskRequest()); + super(client, TransportGetTaskAction.TYPE, new GetTaskRequest()); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/TransportGetTaskAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/TransportGetTaskAction.java index 9e0b6937257b4..c8b33e6d569d2 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/TransportGetTaskAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/TransportGetTaskAction.java @@ -13,6 +13,7 @@ import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListenerResponseHandler; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.support.ActionFilters; @@ -44,7 +45,6 @@ import java.io.IOException; import static java.util.Objects.requireNonNullElse; -import static org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskAction.TASKS_ORIGIN; import static org.elasticsearch.core.TimeValue.timeValueSeconds; /** @@ -59,6 +59,8 @@ */ public class TransportGetTaskAction extends HandledTransportAction { + public static final String TASKS_ORIGIN = "tasks"; + public static final ActionType TYPE = new ActionType<>("cluster:monitor/task/get"); private static final TimeValue DEFAULT_WAIT_FOR_COMPLETION_TIMEOUT = timeValueSeconds(30); private final ThreadPool threadPool; @@ -76,7 +78,7 @@ public TransportGetTaskAction( Client client, NamedXContentRegistry xContentRegistry ) { - super(GetTaskAction.NAME, transportService, actionFilters, GetTaskRequest::new, EsExecutors.DIRECT_EXECUTOR_SERVICE); + super(TYPE.name(), transportService, actionFilters, GetTaskRequest::new, EsExecutors.DIRECT_EXECUTOR_SERVICE); this.threadPool = threadPool; this.clusterService = clusterService; this.transportService = transportService; @@ -120,7 +122,7 @@ private void runOnNodeWithTaskIfPossible(Task thisTask, GetTaskRequest request, GetTaskRequest nodeRequest = request.nodeRequest(clusterService.localNode().getId(), thisTask.getId()); transportService.sendRequest( node, - GetTaskAction.NAME, + TYPE.name(), nodeRequest, TransportRequestOptions.timeout(request.getTimeout()), new ActionListenerResponseHandler<>(listener, GetTaskResponse::new, EsExecutors.DIRECT_EXECUTOR_SERVICE) diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/cleanup/CleanupRepositoryAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/cleanup/CleanupRepositoryAction.java deleted file mode 100644 index d71c66fd6f3ca..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/cleanup/CleanupRepositoryAction.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.action.admin.cluster.repositories.cleanup; - -import org.elasticsearch.action.ActionType; - -public final class CleanupRepositoryAction extends ActionType { - - public static final CleanupRepositoryAction INSTANCE = new CleanupRepositoryAction(); - public static final String NAME = "cluster:admin/repository/_cleanup"; - - private CleanupRepositoryAction() { - super(NAME); - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/cleanup/CleanupRepositoryRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/cleanup/CleanupRepositoryRequestBuilder.java index 680502c783a8b..b253264f039e4 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/cleanup/CleanupRepositoryRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/cleanup/CleanupRepositoryRequestBuilder.java @@ -16,7 +16,7 @@ public class CleanupRepositoryRequestBuilder extends MasterNodeOperationRequestB CleanupRepositoryRequestBuilder> { public CleanupRepositoryRequestBuilder(ElasticsearchClient client, String repository) { - super(client, CleanupRepositoryAction.INSTANCE, new CleanupRepositoryRequest(repository)); + super(client, TransportCleanupRepositoryAction.TYPE, new CleanupRepositoryRequest(repository)); } public CleanupRepositoryRequestBuilder setName(String repository) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/cleanup/TransportCleanupRepositoryAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/cleanup/TransportCleanupRepositoryAction.java index 2c8371b0af4f2..4892efaf5ae1f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/cleanup/TransportCleanupRepositoryAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/cleanup/TransportCleanupRepositoryAction.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterState; @@ -59,6 +60,7 @@ */ public final class TransportCleanupRepositoryAction extends TransportMasterNodeAction { + public static final ActionType TYPE = new ActionType<>("cluster:admin/repository/_cleanup"); private static final Logger logger = LogManager.getLogger(TransportCleanupRepositoryAction.class); private final RepositoriesService repositoriesService; @@ -73,7 +75,7 @@ public TransportCleanupRepositoryAction( IndexNameExpressionResolver indexNameExpressionResolver ) { super( - CleanupRepositoryAction.NAME, + TYPE.name(), transportService, clusterService, threadPool, diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheAction.java deleted file mode 100644 index 74184598c6db2..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheAction.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.indices.cache.clear; - -import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.support.broadcast.BroadcastResponse; - -public class ClearIndicesCacheAction extends ActionType { - - public static final ClearIndicesCacheAction INSTANCE = new ClearIndicesCacheAction(); - public static final String NAME = "indices:admin/cache/clear"; - - private ClearIndicesCacheAction() { - super(NAME); - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheRequestBuilder.java index fb6139c0ae4e3..43ad9bff9af8d 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheRequestBuilder.java @@ -18,7 +18,7 @@ public class ClearIndicesCacheRequestBuilder extends BroadcastOperationRequestBu ClearIndicesCacheRequestBuilder> { public ClearIndicesCacheRequestBuilder(ElasticsearchClient client) { - super(client, ClearIndicesCacheAction.INSTANCE, new ClearIndicesCacheRequest()); + super(client, TransportClearIndicesCacheAction.TYPE, new ClearIndicesCacheRequest()); } public ClearIndicesCacheRequestBuilder setQueryCache(boolean queryCache) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java index faeaf0bdb575a..428fd6e083116 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java @@ -9,6 +9,7 @@ package org.elasticsearch.action.admin.indices.cache.clear; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction; @@ -36,6 +37,7 @@ public class TransportClearIndicesCacheAction extends TransportBroadcastByNodeAc BroadcastResponse, TransportBroadcastByNodeAction.EmptyResult> { + public static final ActionType TYPE = new ActionType<>("indices:admin/cache/clear"); private final IndicesService indicesService; @Inject @@ -47,7 +49,7 @@ public TransportClearIndicesCacheAction( IndexNameExpressionResolver indexNameExpressionResolver ) { super( - ClearIndicesCacheAction.NAME, + TYPE.name(), clusterService, transportService, actionFilters, diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexAction.java deleted file mode 100644 index 5560c44f3fcbe..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexAction.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.indices.create; - -import org.elasticsearch.action.ActionType; - -public class CreateIndexAction extends ActionType { - - public static final CreateIndexAction INSTANCE = new CreateIndexAction(); - public static final String NAME = "indices:admin/create"; - - private CreateIndexAction() { - super(NAME); - } - -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilder.java index 307cafbb9b8e1..4e265f4052e72 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilder.java @@ -29,11 +29,11 @@ public class CreateIndexRequestBuilder extends AcknowledgedRequestBuilder< CreateIndexRequestBuilder> { public CreateIndexRequestBuilder(ElasticsearchClient client) { - super(client, CreateIndexAction.INSTANCE, new CreateIndexRequest()); + super(client, TransportCreateIndexAction.TYPE, new CreateIndexRequest()); } public CreateIndexRequestBuilder(ElasticsearchClient client, String index) { - super(client, CreateIndexAction.INSTANCE, new CreateIndexRequest(index)); + super(client, TransportCreateIndexAction.TYPE, new CreateIndexRequest(index)); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/TransportCreateIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/TransportCreateIndexAction.java index c03cba9b40a33..72f4c4676cf1d 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/TransportCreateIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/create/TransportCreateIndexAction.java @@ -11,6 +11,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActiveShardCount; @@ -43,6 +44,7 @@ * Create index action. */ public class TransportCreateIndexAction extends TransportMasterNodeAction { + public static final ActionType TYPE = new ActionType<>("indices:admin/create"); private static final Logger logger = LogManager.getLogger(TransportCreateIndexAction.class); private final MetadataCreateIndexService createIndexService; @@ -59,7 +61,7 @@ public TransportCreateIndexAction( SystemIndices systemIndices ) { super( - CreateIndexAction.NAME, + TYPE.name(), transportService, clusterService, threadPool, diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/AnalyzeIndexDiskUsageAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/AnalyzeIndexDiskUsageAction.java deleted file mode 100644 index 710bf5077b73d..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/AnalyzeIndexDiskUsageAction.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.indices.diskusage; - -import org.elasticsearch.action.ActionType; - -public class AnalyzeIndexDiskUsageAction extends ActionType { - public static final AnalyzeIndexDiskUsageAction INSTANCE = new AnalyzeIndexDiskUsageAction(); - public static final String NAME = "indices:admin/analyze_disk_usage"; - - public AnalyzeIndexDiskUsageAction() { - super(NAME); - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/AnalyzeIndexDiskUsageRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/AnalyzeIndexDiskUsageRequest.java index 08bc4469deaa1..119bb8d9dec61 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/AnalyzeIndexDiskUsageRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/AnalyzeIndexDiskUsageRequest.java @@ -58,7 +58,7 @@ public void setParentTask(String parentTaskNode, long parentTaskId) { @Override public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { - return new CancellableTask(id, AnalyzeIndexDiskUsageAction.NAME, type, "", parentTaskId, headers) { + return new CancellableTask(id, TransportAnalyzeIndexDiskUsageAction.TYPE.name(), type, "", parentTaskId, headers) { @Override public String getDescription() { return AnalyzeIndexDiskUsageRequest.this.getDescription(); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/TransportAnalyzeIndexDiskUsageAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/TransportAnalyzeIndexDiskUsageAction.java index 69e694447bccd..8380edb4cb6ed 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/TransportAnalyzeIndexDiskUsageAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/TransportAnalyzeIndexDiskUsageAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.NoShardAvailableActionException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DefaultShardOperationFailedException; @@ -51,6 +52,7 @@ public class TransportAnalyzeIndexDiskUsageAction extends TransportBroadcastActi AnalyzeIndexDiskUsageResponse, AnalyzeDiskUsageShardRequest, AnalyzeDiskUsageShardResponse> { + public static final ActionType TYPE = new ActionType<>("indices:admin/analyze_disk_usage"); private final IndicesService indicesService; private final ThreadPool threadPool; @@ -63,7 +65,7 @@ public TransportAnalyzeIndexDiskUsageAction( IndexNameExpressionResolver indexNameExpressionResolver ) { super( - AnalyzeIndexDiskUsageAction.NAME, + TYPE.name(), clusterService, transportService, actionFilters, diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/AddIndexBlockAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/AddIndexBlockAction.java deleted file mode 100644 index 460be3cf10c1c..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/AddIndexBlockAction.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.indices.readonly; - -import org.elasticsearch.action.ActionType; - -public class AddIndexBlockAction extends ActionType { - - public static final AddIndexBlockAction INSTANCE = new AddIndexBlockAction(); - public static final String NAME = "indices:admin/block/add"; - - private AddIndexBlockAction() { - super(NAME); - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/AddIndexBlockRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/AddIndexBlockRequestBuilder.java index 5b16e6889ad22..4bcff8d1f6b16 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/AddIndexBlockRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/AddIndexBlockRequestBuilder.java @@ -22,7 +22,7 @@ public class AddIndexBlockRequestBuilder extends AcknowledgedRequestBuilder< AddIndexBlockRequestBuilder> { public AddIndexBlockRequestBuilder(ElasticsearchClient client, APIBlock block, String... indices) { - super(client, AddIndexBlockAction.INSTANCE, new AddIndexBlockRequest(block, indices)); + super(client, TransportAddIndexBlockAction.TYPE, new AddIndexBlockRequest(block, indices)); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/TransportAddIndexBlockAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/TransportAddIndexBlockAction.java index 731257ddabbad..ab2549bf9de67 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/TransportAddIndexBlockAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/TransportAddIndexBlockAction.java @@ -11,6 +11,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DestructiveOperations; import org.elasticsearch.action.support.master.TransportMasterNodeAction; @@ -40,6 +41,7 @@ */ public class TransportAddIndexBlockAction extends TransportMasterNodeAction { + public static final ActionType TYPE = new ActionType<>("indices:admin/block/add"); private static final Logger logger = LogManager.getLogger(TransportAddIndexBlockAction.class); private final MetadataIndexStateService indexStateService; @@ -56,7 +58,7 @@ public TransportAddIndexBlockAction( DestructiveOperations destructiveOperations ) { super( - AddIndexBlockAction.NAME, + TYPE.name(), transportService, clusterService, threadPool, diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/TransportVerifyShardIndexBlockAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/TransportVerifyShardIndexBlockAction.java index ac590d1a4d826..31e9f959f0fe7 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/TransportVerifyShardIndexBlockAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/TransportVerifyShardIndexBlockAction.java @@ -44,8 +44,7 @@ public class TransportVerifyShardIndexBlockAction extends TransportReplicationAc TransportVerifyShardIndexBlockAction.ShardRequest, ReplicationResponse> { - public static final String NAME = AddIndexBlockAction.NAME + "[s]"; - public static final ActionType TYPE = new ActionType<>(NAME); + public static final ActionType TYPE = new ActionType<>(TransportAddIndexBlockAction.TYPE.name() + "[s]"); @Inject public TransportVerifyShardIndexBlockAction( @@ -59,7 +58,7 @@ public TransportVerifyShardIndexBlockAction( ) { super( settings, - NAME, + TYPE.name(), transportService, clusterService, indicesService, diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkAction.java deleted file mode 100644 index bc72e039e6ded..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkAction.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.bulk; - -import org.elasticsearch.action.ActionType; - -public class BulkAction extends ActionType { - - public static final BulkAction INSTANCE = new BulkAction(); - public static final String NAME = "indices:data/write/bulk"; - - private BulkAction() { - super(NAME); - } - -} diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestBuilder.java index 2e2938b63334e..6a90c46fc7fab 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestBuilder.java @@ -57,7 +57,7 @@ public class BulkRequestBuilder extends ActionRequestLazyBuilder { + public static final String NAME = "indices:data/write/bulk"; + public static final ActionType TYPE = new ActionType<>(NAME); private static final Logger logger = LogManager.getLogger(TransportBulkAction.class); public static final String LAZY_ROLLOVER_ORIGIN = "lazy_rollover"; @@ -141,7 +143,7 @@ public TransportBulkAction( LongSupplier relativeTimeProvider ) { this( - BulkAction.INSTANCE, + TYPE, BulkRequest::new, threadPool, transportService, diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java index 4cf10b3c27824..265719b4738c0 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java @@ -78,7 +78,7 @@ /** Performs shard-level bulk (index, delete or update) operations */ public class TransportShardBulkAction extends TransportWriteAction { - public static final String ACTION_NAME = BulkAction.NAME + "[s]"; + public static final String ACTION_NAME = TransportBulkAction.NAME + "[s]"; public static final ActionType TYPE = new ActionType<>(ACTION_NAME); private static final Logger logger = LogManager.getLogger(TransportShardBulkAction.class); diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java b/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java index d0ae8d1ccb3f1..66c395cf51d96 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java @@ -13,7 +13,7 @@ import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; -import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskAction; +import org.elasticsearch.action.admin.cluster.node.tasks.get.TransportGetTaskAction; import org.elasticsearch.action.support.ChannelActionListener; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.OriginSettingClient; @@ -656,6 +656,6 @@ public void cancelSearchTask(SearchTask task, String reason) { CancelTasksRequest req = new CancelTasksRequest().setTargetTaskId(new TaskId(client.getLocalNodeId(), task.getId())) .setReason("Fatal failure during search: " + reason); // force the origin to execute the cancellation as a system user - new OriginSettingClient(client, GetTaskAction.TASKS_ORIGIN).admin().cluster().cancelTasks(req, ActionListener.noop()); + new OriginSettingClient(client, TransportGetTaskAction.TASKS_ORIGIN).admin().cluster().cancelTasks(req, ActionListener.noop()); } } diff --git a/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java b/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java index c6d9c3a8f3563..76073696b0b27 100644 --- a/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java +++ b/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java @@ -32,13 +32,13 @@ import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequestBuilder; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.action.admin.cluster.node.stats.TransportNodesStatsAction; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksAction; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequestBuilder; -import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskAction; +import org.elasticsearch.action.admin.cluster.node.tasks.cancel.TransportCancelTasksAction; import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskRequest; import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskRequestBuilder; import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskResponse; +import org.elasticsearch.action.admin.cluster.node.tasks.get.TransportGetTaskAction; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequestBuilder; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; @@ -46,10 +46,10 @@ import org.elasticsearch.action.admin.cluster.node.usage.NodesUsageRequest; import org.elasticsearch.action.admin.cluster.node.usage.NodesUsageResponse; import org.elasticsearch.action.admin.cluster.node.usage.TransportNodesUsageAction; -import org.elasticsearch.action.admin.cluster.repositories.cleanup.CleanupRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.cleanup.CleanupRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.cleanup.CleanupRepositoryRequestBuilder; import org.elasticsearch.action.admin.cluster.repositories.cleanup.CleanupRepositoryResponse; +import org.elasticsearch.action.admin.cluster.repositories.cleanup.TransportCleanupRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequestBuilder; import org.elasticsearch.action.admin.cluster.repositories.delete.TransportDeleteRepositoryAction; @@ -125,17 +125,17 @@ import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse; import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequestBuilder; -import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheAction; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequestBuilder; +import org.elasticsearch.action.admin.indices.cache.clear.TransportClearIndicesCacheAction; import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; import org.elasticsearch.action.admin.indices.close.CloseIndexRequestBuilder; import org.elasticsearch.action.admin.indices.close.CloseIndexResponse; import org.elasticsearch.action.admin.indices.close.TransportCloseIndexAction; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequestBuilder; import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; @@ -164,10 +164,10 @@ import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; import org.elasticsearch.action.admin.indices.open.OpenIndexRequestBuilder; import org.elasticsearch.action.admin.indices.open.OpenIndexResponse; -import org.elasticsearch.action.admin.indices.readonly.AddIndexBlockAction; import org.elasticsearch.action.admin.indices.readonly.AddIndexBlockRequest; import org.elasticsearch.action.admin.indices.readonly.AddIndexBlockRequestBuilder; import org.elasticsearch.action.admin.indices.readonly.AddIndexBlockResponse; +import org.elasticsearch.action.admin.indices.readonly.TransportAddIndexBlockAction; import org.elasticsearch.action.admin.indices.recovery.RecoveryAction; import org.elasticsearch.action.admin.indices.recovery.RecoveryRequest; import org.elasticsearch.action.admin.indices.recovery.RecoveryRequestBuilder; @@ -212,10 +212,10 @@ import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequestBuilder; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryResponse; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.delete.DeleteRequestBuilder; import org.elasticsearch.action.delete.DeleteResponse; @@ -428,12 +428,12 @@ public DeleteRequestBuilder prepareDelete(String index, String id) { @Override public ActionFuture bulk(final BulkRequest request) { - return execute(BulkAction.INSTANCE, request); + return execute(TransportBulkAction.TYPE, request); } @Override public void bulk(final BulkRequest request, final ActionListener listener) { - execute(BulkAction.INSTANCE, request, listener); + execute(TransportBulkAction.TYPE, request, listener); } @Override @@ -782,12 +782,12 @@ public ListTasksRequestBuilder prepareListTasks(String... nodesIds) { @Override public ActionFuture getTask(final GetTaskRequest request) { - return execute(GetTaskAction.INSTANCE, request); + return execute(TransportGetTaskAction.TYPE, request); } @Override public void getTask(final GetTaskRequest request, final ActionListener listener) { - execute(GetTaskAction.INSTANCE, request, listener); + execute(TransportGetTaskAction.TYPE, request, listener); } @Override @@ -802,12 +802,12 @@ public GetTaskRequestBuilder prepareGetTask(TaskId taskId) { @Override public ActionFuture cancelTasks(CancelTasksRequest request) { - return execute(CancelTasksAction.INSTANCE, request); + return execute(TransportCancelTasksAction.TYPE, request); } @Override public void cancelTasks(CancelTasksRequest request, ActionListener listener) { - execute(CancelTasksAction.INSTANCE, request, listener); + execute(TransportCancelTasksAction.TYPE, request, listener); } @Override @@ -917,7 +917,7 @@ public CleanupRepositoryRequestBuilder prepareCleanupRepository(String repositor @Override public void cleanupRepository(CleanupRepositoryRequest request, ActionListener listener) { - execute(CleanupRepositoryAction.INSTANCE, request, listener); + execute(TransportCleanupRepositoryAction.TYPE, request, listener); } @Override @@ -1114,7 +1114,7 @@ public GetAliasesRequestBuilder prepareGetAliases(String... aliases) { @Override public ActionFuture clearCache(final ClearIndicesCacheRequest request) { - return execute(ClearIndicesCacheAction.INSTANCE, request); + return execute(TransportClearIndicesCacheAction.TYPE, request); } @Override @@ -1134,7 +1134,7 @@ public GetIndexRequestBuilder prepareGetIndex() { @Override public void clearCache(final ClearIndicesCacheRequest request, final ActionListener listener) { - execute(ClearIndicesCacheAction.INSTANCE, request, listener); + execute(TransportClearIndicesCacheAction.TYPE, request, listener); } @Override @@ -1144,12 +1144,12 @@ public ClearIndicesCacheRequestBuilder prepareClearCache(String... indices) { @Override public ActionFuture create(final CreateIndexRequest request) { - return execute(CreateIndexAction.INSTANCE, request); + return execute(TransportCreateIndexAction.TYPE, request); } @Override public void create(final CreateIndexRequest request, final ActionListener listener) { - execute(CreateIndexAction.INSTANCE, request, listener); + execute(TransportCreateIndexAction.TYPE, request, listener); } @Override @@ -1204,7 +1204,7 @@ public AddIndexBlockRequestBuilder prepareAddBlock(APIBlock block, String... ind @Override public void addBlock(AddIndexBlockRequest request, ActionListener listener) { - execute(AddIndexBlockAction.INSTANCE, request, listener); + execute(TransportAddIndexBlockAction.TYPE, request, listener); } @Override diff --git a/server/src/main/java/org/elasticsearch/rest/action/RestCancellableNodeClient.java b/server/src/main/java/org/elasticsearch/rest/action/RestCancellableNodeClient.java index 3c1aa54bc7056..6406b7b2b93c3 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/RestCancellableNodeClient.java +++ b/server/src/main/java/org/elasticsearch/rest/action/RestCancellableNodeClient.java @@ -30,7 +30,7 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicReference; -import static org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskAction.TASKS_ORIGIN; +import static org.elasticsearch.action.admin.cluster.node.tasks.get.TransportGetTaskAction.TASKS_ORIGIN; /** * A {@linkplain Client} that cancels tasks executed locally when the provided {@link HttpChannel} diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeIndexDiskUsageAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeIndexDiskUsageAction.java index c139675a1ded3..e1732f267a814 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeIndexDiskUsageAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeIndexDiskUsageAction.java @@ -8,8 +8,8 @@ package org.elasticsearch.rest.action.admin.indices; -import org.elasticsearch.action.admin.indices.diskusage.AnalyzeIndexDiskUsageAction; import org.elasticsearch.action.admin.indices.diskusage.AnalyzeIndexDiskUsageRequest; +import org.elasticsearch.action.admin.indices.diskusage.TransportAnalyzeIndexDiskUsageAction; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; @@ -54,7 +54,7 @@ public BaseRestHandler.RestChannelConsumer prepareRequest(final RestRequest requ final AnalyzeIndexDiskUsageRequest analyzeRequest = new AnalyzeIndexDiskUsageRequest(indices, indicesOptions, flush); return channel -> { final RestCancellableNodeClient cancelClient = new RestCancellableNodeClient(client, request.getHttpChannel()); - cancelClient.execute(AnalyzeIndexDiskUsageAction.INSTANCE, analyzeRequest, new RestToXContentListener<>(channel)); + cancelClient.execute(TransportAnalyzeIndexDiskUsageAction.TYPE, analyzeRequest, new RestToXContentListener<>(channel)); }; } } diff --git a/server/src/main/java/org/elasticsearch/tasks/TaskResultsService.java b/server/src/main/java/org/elasticsearch/tasks/TaskResultsService.java index a72ef9e83ccf2..8632cd6cfea77 100644 --- a/server/src/main/java/org/elasticsearch/tasks/TaskResultsService.java +++ b/server/src/main/java/org/elasticsearch/tasks/TaskResultsService.java @@ -35,7 +35,7 @@ import java.util.Iterator; import java.util.Map; -import static org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskAction.TASKS_ORIGIN; +import static org.elasticsearch.action.admin.cluster.node.tasks.get.TransportGetTaskAction.TASKS_ORIGIN; import static org.elasticsearch.core.TimeValue.timeValueMillis; import static org.elasticsearch.tasks.TaskInfo.INCLUDE_CANCELLED_PARAM; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java index adefd71f93590..22953f9959c1d 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java @@ -12,8 +12,8 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksAction; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; +import org.elasticsearch.action.admin.cluster.node.tasks.cancel.TransportCancelTasksAction; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.support.ActionTestUtils; @@ -555,7 +555,7 @@ public void testNonExistingTaskCancellation() throws Exception { // Make sure that main task is no longer running ListTasksResponse listTasksResponse = ActionTestUtils.executeBlocking( testNodes[randomIntBetween(0, testNodes.length - 1)].transportListTasksAction, - new ListTasksRequest().setActions(CancelTasksAction.NAME + "*") + new ListTasksRequest().setActions(TransportCancelTasksAction.NAME + "*") ); assertEquals(0, listTasksResponse.getTasks().size()); }); diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java index d714105d9a13a..8b8d4e52d33d4 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java @@ -61,7 +61,7 @@ import java.util.Map; import java.util.Objects; -import static org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskAction.TASKS_ORIGIN; +import static org.elasticsearch.action.admin.cluster.node.tasks.get.TransportGetTaskAction.TASKS_ORIGIN; import static org.elasticsearch.test.ESTestCase.waitUntil; /** diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java index 6815d634292a4..b97e8303a8eb5 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java @@ -442,7 +442,7 @@ public void testIngestForward() throws Exception { verify(ingestService, never()).executeBulkRequest(anyInt(), any(), any(), any(), any(), any(), any(), any()); // but instead should have sent to a remote node with the transport service ArgumentCaptor node = ArgumentCaptor.forClass(DiscoveryNode.class); - verify(transportService).sendRequest(node.capture(), eq(BulkAction.NAME), any(), remoteResponseHandler.capture()); + verify(transportService).sendRequest(node.capture(), eq(TransportBulkAction.NAME), any(), remoteResponseHandler.capture()); boolean usedNode1 = node.getValue() == remoteNode1; // make sure we used one of the nodes if (usedNode1 == false) { assertSame(remoteNode2, node.getValue()); @@ -457,7 +457,7 @@ public void testIngestForward() throws Exception { // now make sure ingest nodes are rotated through with a subsequent request reset(transportService); ActionTestUtils.execute(action, null, bulkRequest, listener); - verify(transportService).sendRequest(node.capture(), eq(BulkAction.NAME), any(), remoteResponseHandler.capture()); + verify(transportService).sendRequest(node.capture(), eq(TransportBulkAction.NAME), any(), remoteResponseHandler.capture()); if (usedNode1) { assertSame(remoteNode2, node.getValue()); } else { @@ -482,7 +482,7 @@ public void testSingleItemBulkActionIngestForward() throws Exception { verify(ingestService, never()).executeBulkRequest(anyInt(), any(), any(), any(), any(), any(), any(), any()); // but instead should have sent to a remote node with the transport service ArgumentCaptor node = ArgumentCaptor.forClass(DiscoveryNode.class); - verify(transportService).sendRequest(node.capture(), eq(BulkAction.NAME), any(), remoteResponseHandler.capture()); + verify(transportService).sendRequest(node.capture(), eq(TransportBulkAction.NAME), any(), remoteResponseHandler.capture()); boolean usedNode1 = node.getValue() == remoteNode1; // make sure we used one of the nodes if (usedNode1 == false) { assertSame(remoteNode2, node.getValue()); @@ -500,7 +500,7 @@ public void testSingleItemBulkActionIngestForward() throws Exception { // now make sure ingest nodes are rotated through with a subsequent request reset(transportService); ActionTestUtils.execute(singleItemBulkWriteAction, null, indexRequest, listener); - verify(transportService).sendRequest(node.capture(), eq(BulkAction.NAME), any(), remoteResponseHandler.capture()); + verify(transportService).sendRequest(node.capture(), eq(TransportBulkAction.NAME), any(), remoteResponseHandler.capture()); if (usedNode1) { assertSame(remoteNode2, node.getValue()); } else { diff --git a/server/src/test/java/org/elasticsearch/client/internal/AbstractClientHeadersTestCase.java b/server/src/test/java/org/elasticsearch/client/internal/AbstractClientHeadersTestCase.java index 97c52ef2edc37..0a490898b7fa7 100644 --- a/server/src/test/java/org/elasticsearch/client/internal/AbstractClientHeadersTestCase.java +++ b/server/src/test/java/org/elasticsearch/client/internal/AbstractClientHeadersTestCase.java @@ -15,8 +15,8 @@ import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotAction; import org.elasticsearch.action.admin.cluster.stats.ClusterStatsAction; import org.elasticsearch.action.admin.cluster.storedscripts.TransportDeleteStoredScriptAction; -import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheAction; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; +import org.elasticsearch.action.admin.indices.cache.clear.TransportClearIndicesCacheAction; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.admin.indices.flush.FlushAction; import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; import org.elasticsearch.action.delete.TransportDeleteAction; @@ -60,9 +60,9 @@ public abstract class AbstractClientHeadersTestCase extends ESTestCase { ClusterRerouteAction.INSTANCE, // indices admin actions - CreateIndexAction.INSTANCE, + TransportCreateIndexAction.TYPE, IndicesStatsAction.INSTANCE, - ClearIndicesCacheAction.INSTANCE, + TransportClearIndicesCacheAction.TYPE, FlushAction.INSTANCE }; protected ThreadPool threadPool; @@ -118,12 +118,15 @@ public void testActions() { client.admin().cluster().prepareReroute().execute(new AssertingActionListener<>(ClusterRerouteAction.NAME, client.threadPool())); // choosing arbitrary indices admin actions to test - client.admin().indices().prepareCreate("idx").execute(new AssertingActionListener<>(CreateIndexAction.NAME, client.threadPool())); + client.admin() + .indices() + .prepareCreate("idx") + .execute(new AssertingActionListener<>(TransportCreateIndexAction.TYPE.name(), client.threadPool())); client.admin().indices().prepareStats().execute(new AssertingActionListener<>(IndicesStatsAction.NAME, client.threadPool())); client.admin() .indices() .prepareClearCache("idx1", "idx2") - .execute(new AssertingActionListener<>(ClearIndicesCacheAction.NAME, client.threadPool())); + .execute(new AssertingActionListener<>(TransportClearIndicesCacheAction.TYPE.name(), client.threadPool())); client.admin().indices().prepareFlush().execute(new AssertingActionListener<>(FlushAction.NAME, client.threadPool())); } @@ -144,7 +147,7 @@ public void testOverrideHeader() throws Exception { client.admin() .indices() .prepareCreate("idx") - .execute(new AssertingActionListener<>(CreateIndexAction.NAME, expected, client.threadPool())); + .execute(new AssertingActionListener<>(TransportCreateIndexAction.TYPE.name(), expected, client.threadPool())); } protected static void assertHeaders(Map headers, Map expected) { diff --git a/server/src/test/java/org/elasticsearch/rest/action/RestCancellableNodeClientTests.java b/server/src/test/java/org/elasticsearch/rest/action/RestCancellableNodeClientTests.java index 3f425ac202b6c..a1ad29ef2ddd6 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/RestCancellableNodeClientTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/RestCancellableNodeClientTests.java @@ -12,8 +12,8 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksAction; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; +import org.elasticsearch.action.admin.cluster.node.tasks.cancel.TransportCancelTasksAction; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.TransportSearchAction; @@ -165,7 +165,7 @@ public Task exe ActionListener listener ) { switch (action.name()) { - case CancelTasksAction.NAME -> { + case TransportCancelTasksAction.NAME -> { CancelTasksRequest cancelTasksRequest = (CancelTasksRequest) request; assertTrue("tried to cancel the same task more than once", cancelledTasks.add(cancelTasksRequest.getTargetTaskId())); Task task = request.createTask(counter.getAndIncrement(), "cancel_task", action.name(), null, Collections.emptyMap()); diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index 0a53db94b9aaf..dafe994b502f0 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -17,7 +17,6 @@ import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.RequestValidators; -import org.elasticsearch.action.admin.cluster.repositories.cleanup.CleanupRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.cleanup.CleanupRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.cleanup.CleanupRepositoryResponse; import org.elasticsearch.action.admin.cluster.repositories.cleanup.TransportCleanupRepositoryAction; @@ -39,7 +38,6 @@ import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.admin.cluster.state.TransportClusterStateAction; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; @@ -48,7 +46,6 @@ import org.elasticsearch.action.admin.indices.mapping.put.TransportAutoPutMappingAction; import org.elasticsearch.action.admin.indices.mapping.put.TransportPutMappingAction; import org.elasticsearch.action.admin.indices.shards.TransportIndicesShardStoresAction; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.bulk.TransportBulkAction; @@ -2324,7 +2321,7 @@ protected void assertSnapshotOrGenericThread() { new IndexSettingProviders(Set.of()) ); actions.put( - CreateIndexAction.INSTANCE, + TransportCreateIndexAction.TYPE, new TransportCreateIndexAction( transportService, clusterService, @@ -2339,7 +2336,7 @@ protected void assertSnapshotOrGenericThread() { final IndexingPressure indexingMemoryLimits = new IndexingPressure(settings); mappingUpdatedAction.setClient(client); actions.put( - BulkAction.INSTANCE, + TransportBulkAction.TYPE, new TransportBulkAction( threadPool, transportService, @@ -2477,7 +2474,7 @@ protected void assertSnapshotOrGenericThread() { ) ); actions.put( - CleanupRepositoryAction.INSTANCE, + TransportCleanupRepositoryAction.TYPE, new TransportCleanupRepositoryAction( transportService, clusterService, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ReadOnlyStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ReadOnlyStep.java index e5bffdd5f4c11..208b6bb1b4fd0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ReadOnlyStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ReadOnlyStep.java @@ -8,8 +8,8 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.indices.readonly.AddIndexBlockAction; import org.elasticsearch.action.admin.indices.readonly.AddIndexBlockRequest; +import org.elasticsearch.action.admin.indices.readonly.TransportAddIndexBlockAction; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateObserver; @@ -38,7 +38,7 @@ public void performAction( getClient().admin() .indices() .execute( - AddIndexBlockAction.INSTANCE, + TransportAddIndexBlockAction.TYPE, new AddIndexBlockRequest(WRITE, indexMetadata.getIndex().getName()).masterNodeTimeout(TimeValue.MAX_VALUE), listener.delegateFailureAndWrap((l, response) -> { if (response.isAcknowledged() == false) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java index f87123d48ccea..47e4a6913897b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java @@ -9,7 +9,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksAction; +import org.elasticsearch.action.admin.cluster.node.tasks.cancel.TransportCancelTasksAction; import org.elasticsearch.action.admin.cluster.remote.RemoteClusterNodesAction; import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesAction; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotAction; @@ -311,7 +311,10 @@ public class ClusterPrivilegeResolver { Set.of("cluster:admin/fleet/secrets/post", "cluster:admin/fleet/secrets/delete") ); - public static final NamedClusterPrivilege CANCEL_TASK = new ActionClusterPrivilege("cancel_task", Set.of(CancelTasksAction.NAME + "*")); + public static final NamedClusterPrivilege CANCEL_TASK = new ActionClusterPrivilege( + "cancel_task", + Set.of(TransportCancelTasksAction.NAME + "*") + ); public static final NamedClusterPrivilege MANAGE_SEARCH_APPLICATION = new ActionClusterPrivilege( "manage_search_application", diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java index ba00864148c24..f373453f779d7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java @@ -13,7 +13,7 @@ import org.elasticsearch.action.admin.indices.alias.get.GetAliasesAction; import org.elasticsearch.action.admin.indices.close.TransportCloseIndexAction; import org.elasticsearch.action.admin.indices.create.AutoCreateAction; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.admin.indices.get.GetIndexAction; import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsAction; @@ -119,7 +119,7 @@ public final class IndexPrivilege extends Privilege { ) ); private static final Automaton CREATE_INDEX_AUTOMATON = patterns( - CreateIndexAction.NAME, + TransportCreateIndexAction.TYPE.name(), AutoCreateAction.NAME, CreateDataStreamAction.NAME ); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/InternalUsers.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/InternalUsers.java index 43863d1b203d1..23431e184422a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/InternalUsers.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/InternalUsers.java @@ -9,7 +9,7 @@ import org.elasticsearch.action.admin.indices.analyze.TransportReloadAnalyzersAction; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeAction; -import org.elasticsearch.action.admin.indices.readonly.AddIndexBlockAction; +import org.elasticsearch.action.admin.indices.readonly.TransportAddIndexBlockAction; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.rollover.LazyRolloverAction; import org.elasticsearch.action.admin.indices.rollover.RolloverAction; @@ -149,7 +149,7 @@ public class InternalUsers { IndicesStatsAction.NAME + "*", TransportUpdateSettingsAction.TYPE.name(), DownsampleAction.NAME, - AddIndexBlockAction.NAME + TransportAddIndexBlockAction.TYPE.name() ) .allowRestrictedIndices(false) .build(), @@ -168,7 +168,7 @@ public class InternalUsers { IndicesStatsAction.NAME + "*", TransportUpdateSettingsAction.TYPE.name(), DownsampleAction.NAME, - AddIndexBlockAction.NAME + TransportAddIndexBlockAction.TYPE.name() ) .allowRestrictedIndices(true) .build() }, diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditorTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditorTests.java index 3879a8df0fbe6..1f28afbbc75b7 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditorTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditorTests.java @@ -8,8 +8,8 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.template.put.TransportPutComposableIndexTemplateAction; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.TransportIndexAction; import org.elasticsearch.action.support.master.AcknowledgedResponse; @@ -188,7 +188,7 @@ public void testAuditingBeforeTemplateInstalled() throws Exception { // the back log will be written some point later ArgumentCaptor bulkCaptor = ArgumentCaptor.forClass(BulkRequest.class); - assertBusy(() -> verify(client, times(1)).execute(eq(BulkAction.INSTANCE), bulkCaptor.capture(), any())); + assertBusy(() -> verify(client, times(1)).execute(eq(TransportBulkAction.TYPE), bulkCaptor.capture(), any())); BulkRequest bulkRequest = bulkCaptor.getValue(); assertThat(bulkRequest.numberOfActions(), equalTo(3)); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/LimitedRoleTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/LimitedRoleTests.java index 46c393d9f0de2..91cf339e46018 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/LimitedRoleTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/LimitedRoleTests.java @@ -8,9 +8,9 @@ package org.elasticsearch.xpack.core.security.authz.permission; import org.apache.lucene.util.automaton.Automaton; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; -import org.elasticsearch.action.bulk.BulkAction; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.cluster.metadata.AliasMetadata; import org.elasticsearch.cluster.metadata.IndexAbstraction; @@ -310,7 +310,7 @@ public void testAuthorize() { assertThat(iac.getIndexPermissions("_index1"), is(nullValue())); assertThat(iac.hasIndexPermissions("_index1"), is(false)); iac = fromRole.authorize( - CreateIndexAction.NAME, + TransportCreateIndexAction.TYPE.name(), Sets.newHashSet("_index", "_index1"), md.getIndicesLookup(), fieldPermissionsCache @@ -350,7 +350,7 @@ public void testAuthorize() { assertThat(iac.getIndexPermissions("_index1"), is(nullValue())); assertThat(iac.hasIndexPermissions("_index1"), is(false)); iac = limitedByRole.authorize( - CreateIndexAction.NAME, + TransportCreateIndexAction.TYPE.name(), Sets.newHashSet("_index", "_alias1"), md.getIndicesLookup(), fieldPermissionsCache @@ -390,7 +390,7 @@ public void testAuthorize() { assertThat(iac.getIndexPermissions("_index1"), is(nullValue())); assertThat(iac.hasIndexPermissions("_index1"), is(false)); iac = role.authorize( - CreateIndexAction.NAME, + TransportCreateIndexAction.TYPE.name(), Sets.newHashSet("_index", "_index1"), md.getIndicesLookup(), fieldPermissionsCache @@ -446,12 +446,12 @@ public void testCheckClusterAction() { public void testCheckIndicesAction() { Role fromRole = Role.builder(EMPTY_RESTRICTED_INDICES, "a-role").add(IndexPrivilege.READ, "ind-1").build(); assertThat(fromRole.checkIndicesAction(TransportSearchAction.TYPE.name()), is(true)); - assertThat(fromRole.checkIndicesAction(CreateIndexAction.NAME), is(false)); + assertThat(fromRole.checkIndicesAction(TransportCreateIndexAction.TYPE.name()), is(false)); { Role limitedByRole = Role.builder(EMPTY_RESTRICTED_INDICES, "limited-role").add(IndexPrivilege.ALL, "ind-1").build(); assertThat(limitedByRole.checkIndicesAction(TransportSearchAction.TYPE.name()), is(true)); - assertThat(limitedByRole.checkIndicesAction(CreateIndexAction.NAME), is(true)); + assertThat(limitedByRole.checkIndicesAction(TransportCreateIndexAction.TYPE.name()), is(true)); Role role; if (randomBoolean()) { role = limitedByRole.limitedBy(fromRole); @@ -459,7 +459,7 @@ public void testCheckIndicesAction() { role = fromRole.limitedBy(limitedByRole); } assertThat(role.checkIndicesAction(TransportSearchAction.TYPE.name()), is(true)); - assertThat(role.checkIndicesAction(CreateIndexAction.NAME), is(false)); + assertThat(role.checkIndicesAction(TransportCreateIndexAction.TYPE.name()), is(false)); } { Role limitedByRole = Role.builder(EMPTY_RESTRICTED_INDICES, "limited-role").add(IndexPrivilege.NONE, "ind-1").build(); @@ -471,7 +471,7 @@ public void testCheckIndicesAction() { role = fromRole.limitedBy(limitedByRole); } assertThat(role.checkIndicesAction(TransportSearchAction.TYPE.name()), is(false)); - assertThat(role.checkIndicesAction(CreateIndexAction.NAME), is(false)); + assertThat(role.checkIndicesAction(TransportCreateIndexAction.TYPE.name()), is(false)); } } @@ -564,13 +564,13 @@ public void testAllowedActionsMatcher() { Automaton fromRoleAutomaton = fromRole.allowedActionsMatcher("index1"); Predicate fromRolePredicate = Automatons.predicate(fromRoleAutomaton); assertThat(fromRolePredicate.test(TransportSearchAction.TYPE.name()), is(true)); - assertThat(fromRolePredicate.test(BulkAction.NAME), is(true)); + assertThat(fromRolePredicate.test(TransportBulkAction.NAME), is(true)); Role limitedByRole = Role.builder(EMPTY_RESTRICTED_INDICES, "limitedRole").add(IndexPrivilege.READ, "index1", "index2").build(); Automaton limitedByRoleAutomaton = limitedByRole.allowedActionsMatcher("index1"); Predicate limitedByRolePredicated = Automatons.predicate(limitedByRoleAutomaton); assertThat(limitedByRolePredicated.test(TransportSearchAction.TYPE.name()), is(true)); - assertThat(limitedByRolePredicated.test(BulkAction.NAME), is(false)); + assertThat(limitedByRolePredicated.test(TransportBulkAction.NAME), is(false)); Role role; if (randomBoolean()) { role = limitedByRole.limitedBy(fromRole); @@ -581,17 +581,17 @@ public void testAllowedActionsMatcher() { Automaton roleAutomaton = role.allowedActionsMatcher("index1"); Predicate rolePredicate = Automatons.predicate(roleAutomaton); assertThat(rolePredicate.test(TransportSearchAction.TYPE.name()), is(true)); - assertThat(rolePredicate.test(BulkAction.NAME), is(false)); + assertThat(rolePredicate.test(TransportBulkAction.NAME), is(false)); roleAutomaton = role.allowedActionsMatcher("index2"); rolePredicate = Automatons.predicate(roleAutomaton); assertThat(rolePredicate.test(TransportSearchAction.TYPE.name()), is(true)); - assertThat(rolePredicate.test(BulkAction.NAME), is(false)); + assertThat(rolePredicate.test(TransportBulkAction.NAME), is(false)); roleAutomaton = role.allowedActionsMatcher("other"); rolePredicate = Automatons.predicate(roleAutomaton); assertThat(rolePredicate.test(TransportSearchAction.TYPE.name()), is(false)); - assertThat(rolePredicate.test(BulkAction.NAME), is(false)); + assertThat(rolePredicate.test(TransportBulkAction.NAME), is(false)); } public void testCheckClusterPrivilege() { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java index 21827c4b9a373..aa9bb1dd579bf 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java @@ -8,7 +8,7 @@ import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.action.admin.cluster.health.TransportClusterHealthAction; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksAction; +import org.elasticsearch.action.admin.cluster.node.tasks.cancel.TransportCancelTasksAction; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteAction; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsAction; import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; @@ -531,8 +531,8 @@ public void testIngestPipelinePrivileges() { } public void testCancelTasksPrivilege() { - verifyClusterActionAllowed(ClusterPrivilegeResolver.CANCEL_TASK, CancelTasksAction.NAME); - verifyClusterActionAllowed(ClusterPrivilegeResolver.CANCEL_TASK, CancelTasksAction.NAME + "[n]"); + verifyClusterActionAllowed(ClusterPrivilegeResolver.CANCEL_TASK, TransportCancelTasksAction.NAME); + verifyClusterActionAllowed(ClusterPrivilegeResolver.CANCEL_TASK, TransportCancelTasksAction.NAME + "[n]"); verifyClusterActionDenied(ClusterPrivilegeResolver.CANCEL_TASK, "cluster:admin/whatever"); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java index 4ff250c3a68b3..b0d25949947e3 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java @@ -22,7 +22,7 @@ import org.elasticsearch.action.admin.indices.alias.TransportIndicesAliasesAction; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesAction; import org.elasticsearch.action.admin.indices.create.AutoCreateAction; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.admin.indices.get.GetIndexAction; import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsAction; @@ -40,7 +40,7 @@ import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesAction; import org.elasticsearch.action.admin.indices.template.put.TransportPutIndexTemplateAction; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryAction; -import org.elasticsearch.action.bulk.BulkAction; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.datastreams.CreateDataStreamAction; import org.elasticsearch.action.datastreams.DeleteDataStreamAction; import org.elasticsearch.action.datastreams.GetDataStreamAction; @@ -622,7 +622,10 @@ public void testKibanaSystemRole() { kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), is(false) ); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat( @@ -656,7 +659,10 @@ public void testKibanaSystemRole() { kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), is(false) ); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat( @@ -690,7 +696,10 @@ public void testKibanaSystemRole() { kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), is(false) ); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(mockIndexAbstraction(index)), is(true)); assertThat( @@ -721,7 +730,10 @@ public void testKibanaSystemRole() { is(false) ); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat( @@ -753,7 +765,10 @@ public void testKibanaSystemRole() { assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:foo").test(mockIndexAbstraction(index)), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:bar").test(mockIndexAbstraction(index)), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat( @@ -776,7 +791,10 @@ public void testKibanaSystemRole() { assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:foo").test(mockIndexAbstraction(index)), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:bar").test(mockIndexAbstraction(index)), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat( @@ -820,7 +838,10 @@ public void testKibanaSystemRole() { is(false) ); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(indexAbstraction), is(true)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(indexAbstraction), is(true)); @@ -845,7 +866,10 @@ public void testKibanaSystemRole() { is(false) ); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(indexAbstraction), is(true)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(indexAbstraction), is(true)); @@ -874,7 +898,7 @@ public void testKibanaSystemRole() { assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:foo").test(dotFleetSecretsIndex), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:bar").test(dotFleetSecretsIndex), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(dotFleetSecretsIndex), is(true)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(dotFleetSecretsIndex), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(dotFleetSecretsIndex), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(dotFleetSecretsIndex), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(dotFleetSecretsIndex), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(dotFleetSecretsIndex), is(false)); @@ -897,7 +921,10 @@ public void testKibanaSystemRole() { assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:foo").test(mockIndexAbstraction(index)), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:bar").test(mockIndexAbstraction(index)), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat( @@ -924,7 +951,10 @@ public void testKibanaSystemRole() { is(false) ); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(indexAbstraction), is(true)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(indexAbstraction), is(true)); @@ -948,7 +978,7 @@ public void testKibanaSystemRole() { is(false) ); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(indexAbstraction), is(true)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(indexAbstraction), is(true)); @@ -973,7 +1003,10 @@ public void testKibanaSystemRole() { is(false) ); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(indexAbstraction), is(true)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(indexAbstraction), is(true)); @@ -998,7 +1031,10 @@ public void testKibanaSystemRole() { is(false) ); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(indexAbstraction), is(true)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(indexAbstraction), is(true)); @@ -1025,7 +1061,10 @@ public void testKibanaSystemRole() { kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), is(false) ); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat( @@ -1079,7 +1118,10 @@ public void testKibanaSystemRole() { is(false) ); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(indexAbstraction), is(true)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(indexAbstraction), is(true)); @@ -1104,7 +1146,10 @@ public void testKibanaSystemRole() { is(false) ); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(indexAbstraction), is(true)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(indexAbstraction), is(true)); @@ -1127,7 +1172,10 @@ public void testKibanaSystemRole() { kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), is(false) ); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(true) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(mockIndexAbstraction(index)), is(true)); assertThat( @@ -1205,7 +1253,10 @@ public void testKibanaSystemRole() { final boolean isAlsoAutoCreateIndex = indexName.startsWith(".logs-endpoint.actions-") || indexName.startsWith(".logs-endpoint.action.responses-"); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(AutoCreateAction.NAME).test(indexAbstraction), is(isAlsoAutoCreateIndex)); assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateDataStreamAction.NAME).test(indexAbstraction), is(false)); assertThat( @@ -1289,9 +1340,9 @@ public void testKibanaSystemRole() { assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportGetAction.TYPE.name()).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportUpdateAction.NAME).test(indexAbstraction), is(true)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(BulkAction.NAME).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportBulkAction.NAME).test(indexAbstraction), is(true)); // Allow create and delete index, modifying aliases, and updating index settings - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(AutoCreateAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateDataStreamAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(indexAbstraction), is(true)); @@ -1330,9 +1381,9 @@ public void testKibanaSystemRole() { assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportGetAction.TYPE.name()).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportUpdateAction.NAME).test(indexAbstraction), is(true)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(BulkAction.NAME).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportBulkAction.NAME).test(indexAbstraction), is(true)); // Allow create and delete index, modifying aliases, and updating index settings - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetAliasesAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndicesAliasesAction.NAME).test(indexAbstraction), is(true)); @@ -1364,7 +1415,10 @@ public void testKibanaSystemRole() { // Allow read-only assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:foo").test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:bar").test(indexAbstraction), is(false)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(indexAbstraction), is(false)); assertThat( @@ -1397,9 +1451,9 @@ public void testKibanaSystemRole() { assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportGetAction.TYPE.name()).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportUpdateAction.NAME).test(indexAbstraction), is(true)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(BulkAction.NAME).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportBulkAction.NAME).test(indexAbstraction), is(true)); // Allow create and delete index - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(AutoCreateAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateDataStreamAction.NAME).test(indexAbstraction), is(true)); @@ -1421,7 +1475,10 @@ public void testKibanaSystemRole() { is(false) ); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(indexAbstraction), is(true)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(indexAbstraction), is(true)); @@ -1445,7 +1502,10 @@ public void testKibanaSystemRole() { is(false) ); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(indexAbstraction), is(true)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(indexAbstraction), is(true)); @@ -1475,9 +1535,9 @@ public void testKibanaSystemRole() { assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportGetAction.TYPE.name()).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportUpdateAction.NAME).test(indexAbstraction), is(true)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(BulkAction.NAME).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportBulkAction.NAME).test(indexAbstraction), is(true)); // Allow create and delete index, modifying aliases, and updating index settings - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(AutoCreateAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateDataStreamAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetAliasesAction.NAME).test(indexAbstraction), is(true)); @@ -1511,7 +1571,10 @@ public void testKibanaSystemRole() { is(false) ); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(indexAbstraction), is(true)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(indexAbstraction), is(true)); @@ -1562,9 +1625,9 @@ public void testKibanaSystemRole() { assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportGetAction.TYPE.name()).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportUpdateAction.NAME).test(indexAbstraction), is(true)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(BulkAction.NAME).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportBulkAction.NAME).test(indexAbstraction), is(true)); // Allow create and delete index, modifying aliases, and updating index settings - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetAliasesAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndicesAliasesAction.NAME).test(indexAbstraction), is(true)); @@ -1586,7 +1649,7 @@ public void testKibanaSystemRole() { Arrays.asList(".asset-criticality.asset-criticality-" + randomAlphaOfLength(randomIntBetween(0, 13))).forEach(indexName -> { final IndexAbstraction indexAbstraction = mockIndexAbstraction(indexName); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(indexAbstraction), is(true)); assertViewIndexMetadata(kibanaRole, indexName); }); @@ -1807,7 +1870,10 @@ public void testMonitoringUserRole() { monitoringUserRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), is(false) ); - assertThat(monitoringUserRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); + assertThat( + monitoringUserRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(false) + ); assertThat( monitoringUserRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), is(false) @@ -1962,7 +2028,9 @@ public void testRemoteMonitoringAgentRole() { is(true) ); assertThat( - remoteMonitoringAgentRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(monitoringIndex)), + remoteMonitoringAgentRole.indices() + .allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()) + .test(mockIndexAbstraction(monitoringIndex)), is(true) ); assertThat( @@ -2016,7 +2084,9 @@ public void testRemoteMonitoringAgentRole() { is(false) ); assertThat( - remoteMonitoringAgentRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(metricbeatIndex)), + remoteMonitoringAgentRole.indices() + .allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()) + .test(mockIndexAbstraction(metricbeatIndex)), is(true) ); assertThat( @@ -2171,7 +2241,9 @@ public void testRemoteMonitoringCollectorRole() { is(false) ); assertThat( - remoteMonitoringCollectorRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), + remoteMonitoringCollectorRole.indices() + .allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()) + .test(mockIndexAbstraction(index)), is(false) ); assertThat( @@ -2424,7 +2496,10 @@ public void testReportingUserRole() { reportingUserRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), is(false) ); - assertThat(reportingUserRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); + assertThat( + reportingUserRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(false) + ); assertThat( reportingUserRole.indices().allowedIndicesMatcher(TransportUpdateSettingsAction.TYPE.name()).test(mockIndexAbstraction(index)), is(false) @@ -2449,7 +2524,10 @@ public void testReportingUserRole() { reportingUserRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(mockIndexAbstraction(index)), is(false) ); - assertThat(reportingUserRole.indices().allowedIndicesMatcher(BulkAction.NAME).test(mockIndexAbstraction(index)), is(false)); + assertThat( + reportingUserRole.indices().allowedIndicesMatcher(TransportBulkAction.NAME).test(mockIndexAbstraction(index)), + is(false) + ); assertNoAccessAllowed(reportingUserRole, TestRestrictedIndices.SAMPLE_RESTRICTED_NAMES); assertNoAccessAllowed(reportingUserRole, XPackPlugin.ASYNC_RESULTS_INDEX + randomAlphaOfLengthBetween(0, 2)); @@ -2667,7 +2745,10 @@ public void testBeatsAdminRole() { beatsAdminRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), is(true) ); - assertThat(beatsAdminRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); + assertThat( + beatsAdminRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(true) + ); assertThat(beatsAdminRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); assertThat(beatsAdminRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(mockIndexAbstraction(index)), is(true)); assertThat( @@ -2727,13 +2808,16 @@ public void testBeatsSystemRole() { beatsSystemRole.indices().allowedIndicesMatcher("indices:foo").test(mockIndexAbstraction(randomAlphaOfLengthBetween(8, 24))), is(false) ); - assertThat(beatsSystemRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); + assertThat( + beatsSystemRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(true) + ); assertThat(beatsSystemRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); assertThat( beatsSystemRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(mockIndexAbstraction(index)), is(false) ); - assertThat(beatsSystemRole.indices().allowedIndicesMatcher(BulkAction.NAME).test(mockIndexAbstraction(index)), is(true)); + assertThat(beatsSystemRole.indices().allowedIndicesMatcher(TransportBulkAction.NAME).test(mockIndexAbstraction(index)), is(true)); assertNoAccessAllowed(beatsSystemRole, TestRestrictedIndices.SAMPLE_RESTRICTED_NAMES); assertNoAccessAllowed(beatsSystemRole, XPackPlugin.ASYNC_RESULTS_INDEX + randomAlphaOfLengthBetween(0, 2)); @@ -2772,13 +2856,16 @@ public void testAPMSystemRole() { final String index = ".monitoring-beats-" + randomIntBetween(10, 15); logger.info("APM beats monitoring index name [{}]", index); - assertThat(APMSystemRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); + assertThat( + APMSystemRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(true) + ); assertThat( APMSystemRole.indices().allowedIndicesMatcher("indices:data/write/index:op_type/create").test(mockIndexAbstraction(index)), is(true) ); assertThat(APMSystemRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(mockIndexAbstraction(index)), is(false)); - assertThat(APMSystemRole.indices().allowedIndicesMatcher(BulkAction.NAME).test(mockIndexAbstraction(index)), is(true)); + assertThat(APMSystemRole.indices().allowedIndicesMatcher(TransportBulkAction.NAME).test(mockIndexAbstraction(index)), is(true)); assertThat( APMSystemRole.indices().allowedIndicesMatcher("indices:data/write/index:op_type/index").test(mockIndexAbstraction(index)), @@ -3599,7 +3686,10 @@ private void assertAllIndicesAccessAllowed(Role role, String index) { is(true) ); assertThat(role.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); - assertThat(role.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); + assertThat( + role.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(true) + ); assertThat(role.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(mockIndexAbstraction(index)), is(true)); assertThat( @@ -3626,7 +3716,7 @@ private void assertReadWriteDocsAndMaintenanceButNotDeleteIndexAllowed(Role role assertThat(role.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportUpdateAction.NAME).test(mockIndexAbstraction(index)), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(mockIndexAbstraction(index)), is(true)); - assertThat(role.indices().allowedIndicesMatcher(BulkAction.NAME).test(mockIndexAbstraction(index)), is(true)); + assertThat(role.indices().allowedIndicesMatcher(TransportBulkAction.NAME).test(mockIndexAbstraction(index)), is(true)); assertThat(role.indices().allowedIndicesMatcher("indices:admin/refresh*").test(mockIndexAbstraction(index)), is(true)); assertThat(role.indices().allowedIndicesMatcher("indices:admin/flush*").test(mockIndexAbstraction(index)), is(true)); assertThat(role.indices().allowedIndicesMatcher("indices:admin/synced_flush").test(mockIndexAbstraction(index)), is(true)); @@ -3643,7 +3733,7 @@ private void assertReadWriteDocsButNotDeleteIndexAllowed(Role role, String index assertThat(role.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportUpdateAction.NAME).test(mockIndexAbstraction(index)), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(mockIndexAbstraction(index)), is(true)); - assertThat(role.indices().allowedIndicesMatcher(BulkAction.NAME).test(mockIndexAbstraction(index)), is(true)); + assertThat(role.indices().allowedIndicesMatcher(TransportBulkAction.NAME).test(mockIndexAbstraction(index)), is(true)); } private void assertOnlyReadAllowed(Role role, String index) { @@ -3651,7 +3741,10 @@ private void assertOnlyReadAllowed(Role role, String index) { role.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), is(false) ); - assertThat(role.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); + assertThat( + role.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(false) + ); assertThat( role.indices().allowedIndicesMatcher(TransportUpdateSettingsAction.TYPE.name()).test(mockIndexAbstraction(index)), is(false) @@ -3661,7 +3754,7 @@ private void assertOnlyReadAllowed(Role role, String index) { assertThat(role.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat(role.indices().allowedIndicesMatcher(TransportUpdateAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat(role.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(mockIndexAbstraction(index)), is(false)); - assertThat(role.indices().allowedIndicesMatcher(BulkAction.NAME).test(mockIndexAbstraction(index)), is(false)); + assertThat(role.indices().allowedIndicesMatcher(TransportBulkAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertNoAccessAllowed(role, TestRestrictedIndices.SAMPLE_RESTRICTED_NAMES); assertNoAccessAllowed(role, XPackPlugin.ASYNC_RESULTS_INDEX + randomAlphaOfLengthBetween(0, 2)); @@ -3696,7 +3789,10 @@ private void assertNoAccessAllowed(Role role, String index) { role.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), is(false) ); - assertThat(role.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); + assertThat( + role.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(false) + ); assertThat( role.indices().allowedIndicesMatcher(TransportUpdateSettingsAction.TYPE.name()).test(mockIndexAbstraction(index)), is(false) @@ -3706,7 +3802,7 @@ private void assertNoAccessAllowed(Role role, String index) { assertThat(role.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat(role.indices().allowedIndicesMatcher(TransportUpdateAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat(role.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(mockIndexAbstraction(index)), is(false)); - assertThat(role.indices().allowedIndicesMatcher(BulkAction.NAME).test(mockIndexAbstraction(index)), is(false)); + assertThat(role.indices().allowedIndicesMatcher(TransportBulkAction.NAME).test(mockIndexAbstraction(index)), is(false)); } public void testLogstashAdminRole() { @@ -3761,7 +3857,10 @@ public void testLogstashAdminRole() { logstashAdminRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), is(true) ); - assertThat(logstashAdminRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); + assertThat( + logstashAdminRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(true) + ); assertThat( logstashAdminRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), is(true) diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/user/InternalUsersTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/user/InternalUsersTests.java index 31642cbf5e34f..3878977df9359 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/user/InternalUsersTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/user/InternalUsersTests.java @@ -10,22 +10,22 @@ import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CharacterRunAutomaton; import org.apache.lucene.util.automaton.Operations; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksAction; -import org.elasticsearch.action.admin.cluster.repositories.cleanup.CleanupRepositoryAction; +import org.elasticsearch.action.admin.cluster.node.tasks.cancel.TransportCancelTasksAction; +import org.elasticsearch.action.admin.cluster.repositories.cleanup.TransportCleanupRepositoryAction; import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; import org.elasticsearch.action.admin.cluster.storedscripts.TransportDeleteStoredScriptAction; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeAction; import org.elasticsearch.action.admin.indices.mapping.put.TransportPutMappingAction; -import org.elasticsearch.action.admin.indices.readonly.AddIndexBlockAction; +import org.elasticsearch.action.admin.indices.readonly.TransportAddIndexBlockAction; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.refresh.TransportUnpromotableShardRefreshAction; import org.elasticsearch.action.admin.indices.rollover.RolloverAction; import org.elasticsearch.action.admin.indices.settings.put.TransportUpdateSettingsAction; import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; import org.elasticsearch.action.admin.indices.template.put.PutComponentTemplateAction; -import org.elasticsearch.action.bulk.BulkAction; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.downsample.DownsampleAction; import org.elasticsearch.action.get.TransportGetAction; import org.elasticsearch.cluster.metadata.DataStream; @@ -82,15 +82,15 @@ public void testXPackUser() { PutComponentTemplateAction.NAME, TransportDeleteStoredScriptAction.TYPE.name(), UpdateJobAction.NAME, - CleanupRepositoryAction.NAME + TransportCleanupRepositoryAction.TYPE.name() ); checkClusterAccess(InternalUsers.XPACK_USER, role, randomFrom(sampleClusterActions), true); final List sampleIndexActions = List.of( TransportGetAction.TYPE.name(), - BulkAction.NAME, + TransportBulkAction.NAME, RefreshAction.NAME, - CreateIndexAction.NAME, + TransportCreateIndexAction.TYPE.name(), TransportPutMappingAction.TYPE.name(), TransportDeleteIndexAction.TYPE.name() ); @@ -117,15 +117,15 @@ public void testXPackSecurityUser() { PutComponentTemplateAction.NAME, TransportDeleteStoredScriptAction.TYPE.name(), UpdateJobAction.NAME, - CleanupRepositoryAction.NAME + TransportCleanupRepositoryAction.TYPE.name() ); checkClusterAccess(InternalUsers.XPACK_SECURITY_USER, role, randomFrom(sampleClusterActions), true); final List sampleIndexActions = List.of( TransportGetAction.TYPE.name(), - BulkAction.NAME, + TransportBulkAction.NAME, RefreshAction.NAME, - CreateIndexAction.NAME, + TransportCreateIndexAction.TYPE.name(), TransportPutMappingAction.TYPE.name(), TransportDeleteIndexAction.TYPE.name() ); @@ -150,9 +150,9 @@ public void testSecurityProfileUser() { final List sampleAllowedActions = List.of( TransportGetAction.TYPE.name(), - BulkAction.NAME, + TransportBulkAction.NAME, RefreshAction.NAME, - CreateIndexAction.NAME, + TransportCreateIndexAction.TYPE.name(), TransportPutMappingAction.TYPE.name(), TransportDeleteIndexAction.TYPE.name() ); @@ -176,14 +176,14 @@ public void testAsyncSearchUser() { assertThat(role.application(), is(ApplicationPermission.NONE)); assertThat(role.remoteIndices(), is(RemoteIndicesPermission.NONE)); - checkClusterAccess(InternalUsers.ASYNC_SEARCH_USER, role, CancelTasksAction.NAME, true); + checkClusterAccess(InternalUsers.ASYNC_SEARCH_USER, role, TransportCancelTasksAction.NAME, true); checkClusterAccess(InternalUsers.ASYNC_SEARCH_USER, role, ClusterStateAction.NAME, false); final List sampleAllowedActions = List.of( TransportGetAction.TYPE.name(), - BulkAction.NAME, + TransportBulkAction.NAME, RefreshAction.NAME, - CreateIndexAction.NAME, + TransportCreateIndexAction.TYPE.name(), TransportPutMappingAction.TYPE.name(), TransportDeleteIndexAction.TYPE.name() ); @@ -214,7 +214,7 @@ public void testStorageUser() { final List sampleDeniedActions = List.of( TransportGetAction.TYPE.name(), - BulkAction.NAME, + TransportBulkAction.NAME, TransportPutMappingAction.TYPE.name(), TransportDeleteIndexAction.TYPE.name() ); @@ -251,7 +251,7 @@ public void testDataStreamLifecycleUser() { IndicesStatsAction.NAME, TransportUpdateSettingsAction.TYPE.name(), DownsampleAction.NAME, - AddIndexBlockAction.NAME + TransportAddIndexBlockAction.TYPE.name() ); final List sampleSystemDataStreamActions = List.of( @@ -261,7 +261,7 @@ public void testDataStreamLifecycleUser() { IndicesStatsAction.NAME, TransportUpdateSettingsAction.TYPE.name(), DownsampleAction.NAME, - AddIndexBlockAction.NAME + TransportAddIndexBlockAction.TYPE.name() ); final String dataStream = randomAlphaOfLengthBetween(3, 12); checkIndexAccess(role, randomFrom(sampleIndexActions), dataStream, true); diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/BasicEnrichTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/BasicEnrichTests.java index ac5f7f2baf43e..4081d7108b0e7 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/BasicEnrichTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/BasicEnrichTests.java @@ -6,9 +6,9 @@ */ package org.elasticsearch.xpack.enrich; -import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskAction; import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskRequest; import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskResponse; +import org.elasticsearch.action.admin.cluster.node.tasks.get.TransportGetTaskAction; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; @@ -301,7 +301,7 @@ public void testAsyncTaskExecute() throws Exception { assertThat(executeResponse.getTaskId(), is(not(nullValue()))); GetTaskRequest getPolicyTaskRequest = new GetTaskRequest().setTaskId(executeResponse.getTaskId()).setWaitForCompletion(true); assertBusy(() -> { - GetTaskResponse taskResponse = client().execute(GetTaskAction.INSTANCE, getPolicyTaskRequest).actionGet(); + GetTaskResponse taskResponse = client().execute(TransportGetTaskAction.TYPE, getPolicyTaskRequest).actionGet(); assertThat( ((ExecuteEnrichPolicyStatus) taskResponse.getTask().getTask().status()).getPhase(), is(ExecuteEnrichPolicyStatus.PolicyPhases.COMPLETE) diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyExecutorTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyExecutorTests.java index 457da7f65294b..9f0b18679666b 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyExecutorTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyExecutorTests.java @@ -15,7 +15,7 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.LatchedActionListener; -import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskAction; +import org.elasticsearch.action.admin.cluster.node.tasks.get.TransportGetTaskAction; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterName; @@ -229,7 +229,7 @@ protected void Thread.currentThread().interrupt(); } - if (GetTaskAction.INSTANCE.equals(action)) { + if (TransportGetTaskAction.TYPE.equals(action)) { if (shouldGetTaskApiReturnTimeout.get() == false) { // This is the second call to the Get Task API, so count down the latch to let the main test logic know. secondGetTaskWasCalled.countDown(); diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java index 13e1df133f00b..8ce1e7f350ccb 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java @@ -14,9 +14,9 @@ import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.LatchedActionListener; import org.elasticsearch.action.admin.cluster.health.TransportClusterHealthAction; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeAction; @@ -2073,7 +2073,7 @@ public void testRunnerCancel() throws Exception { ActionType randomActionType = randomFrom( EnrichReindexAction.INSTANCE, GetIndexAction.INSTANCE, - CreateIndexAction.INSTANCE, + TransportCreateIndexAction.TYPE, ForceMergeAction.INSTANCE, RefreshAction.INSTANCE, IndicesSegmentsAction.INSTANCE, diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java index 8d7cbc5cd41be..bc4708cc19c1f 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java @@ -7,8 +7,8 @@ package org.elasticsearch.xpack.esql.action; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksAction; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; +import org.elasticsearch.action.admin.cluster.node.tasks.cancel.TransportCancelTasksAction; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.PlainActionFuture; @@ -172,7 +172,7 @@ public void testCancel() throws Exception { rootTasks.addAll(tasks); }); var cancelRequest = new CancelTasksRequest().setTargetTaskId(rootTasks.get(0).taskId()).setReason("proxy timeout"); - client().execute(CancelTasksAction.INSTANCE, cancelRequest); + client().execute(TransportCancelTasksAction.TYPE, cancelRequest); assertBusy(() -> { List drivers = client(REMOTE_CLUSTER).admin() .cluster() diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java index 23fa3f862a3ff..92987db865ac7 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java @@ -10,8 +10,8 @@ import org.apache.lucene.search.DocIdSetIterator; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionFuture; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksAction; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; +import org.elasticsearch.action.admin.cluster.node.tasks.cancel.TransportCancelTasksAction; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.collect.Iterators; @@ -231,12 +231,12 @@ private void cancelTask(TaskId taskId) { CancelTasksRequest request = new CancelTasksRequest().setTargetTaskId(taskId).setReason("test cancel"); request.setWaitForCompletion(false); LOGGER.debug("--> cancelling task [{}] without waiting for completion", taskId); - client().admin().cluster().execute(CancelTasksAction.INSTANCE, request).actionGet(); + client().admin().cluster().execute(TransportCancelTasksAction.TYPE, request).actionGet(); scriptPermits.release(numberOfDocs()); request = new CancelTasksRequest().setTargetTaskId(taskId).setReason("test cancel"); request.setWaitForCompletion(true); LOGGER.debug("--> cancelling task [{}] with waiting for completion", taskId); - client().admin().cluster().execute(CancelTasksAction.INSTANCE, request).actionGet(); + client().admin().cluster().execute(TransportCancelTasksAction.TYPE, request).actionGet(); } /** diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/history/ILMHistoryStoreTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/history/ILMHistoryStoreTests.java index 8675c27325b4b..cbdda089e8328 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/history/ILMHistoryStoreTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/history/ILMHistoryStoreTests.java @@ -13,13 +13,13 @@ import org.elasticsearch.action.ActionType; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; -import org.elasticsearch.action.bulk.BulkAction; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.cluster.ClusterState; @@ -137,7 +137,7 @@ public void testPut() throws Exception { AtomicInteger calledTimes = new AtomicInteger(0); client.setVerifier((action, request, listener) -> { calledTimes.incrementAndGet(); - assertThat(action, instanceOf(BulkAction.class)); + assertSame(TransportBulkAction.TYPE, action); assertThat(request, instanceOf(BulkRequest.class)); BulkRequest bulkRequest = (BulkRequest) request; bulkRequest.requests().forEach(dwr -> assertEquals(ILM_HISTORY_DATA_STREAM, dwr.index())); @@ -177,11 +177,11 @@ public void testPut() throws Exception { AtomicInteger calledTimes = new AtomicInteger(0); client.setVerifier((action, request, listener) -> { - if (action instanceof CreateIndexAction && request instanceof CreateIndexRequest) { + if (action == TransportCreateIndexAction.TYPE && request instanceof CreateIndexRequest) { return new CreateIndexResponse(true, true, ((CreateIndexRequest) request).index()); } calledTimes.incrementAndGet(); - assertThat(action, instanceOf(BulkAction.class)); + assertSame(TransportBulkAction.TYPE, action); assertThat(request, instanceOf(BulkRequest.class)); BulkRequest bulkRequest = (BulkRequest) request; bulkRequest.requests().forEach(dwr -> { @@ -230,7 +230,7 @@ public void testMultipleFlushes() throws Exception { long numberOfDocs = 400_000; CountDownLatch latch = new CountDownLatch((int) numberOfDocs); client.setVerifier((action, request, listener) -> { - assertThat(action, instanceOf(BulkAction.class)); + assertSame(TransportBulkAction.TYPE, action); assertThat(request, instanceOf(BulkRequest.class)); BulkRequest bulkRequest = (BulkRequest) request; List> realRequests = bulkRequest.requests(); diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotRetentionIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotRetentionIT.java index f28f6eff25b04..57aba2bb80d68 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotRetentionIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotRetentionIT.java @@ -8,9 +8,9 @@ package org.elasticsearch.xpack.ml.integration; import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.TransportIndexAction; import org.elasticsearch.action.search.SearchRequest; @@ -255,7 +255,7 @@ private void persistModelStateDocs(String jobId, String snapshotId, int numDocs) bulkRequest.add(indexRequest); } - BulkResponse bulkResponse = client().execute(BulkAction.INSTANCE, bulkRequest).actionGet(); + BulkResponse bulkResponse = client().execute(TransportBulkAction.TYPE, bulkRequest).actionGet(); assertFalse(bulkResponse.buildFailureMessage(), bulkResponse.hasFailures()); } } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotSearchIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotSearchIT.java index aa85f78355fb3..2e16436736e89 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotSearchIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotSearchIT.java @@ -8,9 +8,9 @@ package org.elasticsearch.xpack.ml.integration; import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.TransportIndexAction; import org.elasticsearch.action.support.PlainActionFuture; @@ -144,7 +144,7 @@ private void persistModelStateDocs(String jobId, String snapshotId, int numDocs) bulkRequest.add(indexRequest); } - BulkResponse bulkResponse = client().execute(BulkAction.INSTANCE, bulkRequest).actionGet(); + BulkResponse bulkResponse = client().execute(TransportBulkAction.TYPE, bulkRequest).actionGet(); assertFalse(bulkResponse.buildFailureMessage(), bulkResponse.hasFailures()); } } diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AnnotationIndexIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AnnotationIndexIT.java index 9daf353b11380..6572a6c286519 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AnnotationIndexIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AnnotationIndexIT.java @@ -10,8 +10,8 @@ import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.admin.indices.get.GetIndexResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; @@ -242,7 +242,7 @@ public void testAliasesMovedFromOldToNew() throws Exception { ) .alias(new Alias(AnnotationIndex.READ_ALIAS_NAME).isHidden(true)) .alias(new Alias(AnnotationIndex.WRITE_ALIAS_NAME).isHidden(true)); - client().execute(CreateIndexAction.INSTANCE, createIndexRequest).actionGet(); + client().execute(TransportCreateIndexAction.TYPE, createIndexRequest).actionGet(); // Because the old annotations index name began with .ml, it will trigger the new annotations index to be created. // When this happens the read alias should be changed to cover both indices, and the write alias should be diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteFilterAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteFilterAction.java index ceae2a680feb0..4437a36318452 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteFilterAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteFilterAction.java @@ -8,9 +8,9 @@ import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; @@ -94,22 +94,28 @@ private void deleteFilter(String filterId, ActionListener BulkRequestBuilder bulkRequestBuilder = client.prepareBulk(); bulkRequestBuilder.add(deleteRequest); bulkRequestBuilder.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); - executeAsyncWithOrigin(client, ML_ORIGIN, BulkAction.INSTANCE, bulkRequestBuilder.request(), new ActionListener() { - @Override - public void onResponse(BulkResponse bulkResponse) { - if (bulkResponse.getItems()[0].status() == RestStatus.NOT_FOUND) { - listener.onFailure( - new ResourceNotFoundException("Could not delete filter with ID [" + filterId + "] because it does not exist") - ); - } else { - listener.onResponse(AcknowledgedResponse.TRUE); + executeAsyncWithOrigin( + client, + ML_ORIGIN, + TransportBulkAction.TYPE, + bulkRequestBuilder.request(), + new ActionListener() { + @Override + public void onResponse(BulkResponse bulkResponse) { + if (bulkResponse.getItems()[0].status() == RestStatus.NOT_FOUND) { + listener.onFailure( + new ResourceNotFoundException("Could not delete filter with ID [" + filterId + "] because it does not exist") + ); + } else { + listener.onResponse(AcknowledgedResponse.TRUE); + } } - } - @Override - public void onFailure(Exception e) { - listener.onFailure(ExceptionsHelper.serverError("Could not delete filter with ID [" + filterId + "]", e)); + @Override + public void onFailure(Exception e) { + listener.onFailure(ExceptionsHelper.serverError("Could not delete filter with ID [" + filterId + "]", e)); + } } - }); + ); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java index 19f99a329d309..104c92fe5dfd7 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java @@ -11,8 +11,8 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksAction; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; +import org.elasticsearch.action.admin.cluster.node.tasks.cancel.TransportCancelTasksAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.AcknowledgedTransportMasterNodeAction; @@ -361,7 +361,7 @@ private void cancelResetTaskIfExists(String jobId, ActionListener liste executeAsyncWithOrigin( client, ML_ORIGIN, - CancelTasksAction.INSTANCE, + TransportCancelTasksAction.TYPE, cancelTasksRequest, ActionListener.wrap(cancelTasksResponse -> listener.onResponse(true), e -> { if (ExceptionsHelper.unwrapCause(e) instanceof ResourceNotFoundException) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPostCalendarEventsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPostCalendarEventsAction.java index d31488cda02bd..e94dae01f236b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPostCalendarEventsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPostCalendarEventsAction.java @@ -7,9 +7,9 @@ package org.elasticsearch.xpack.ml.action; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; @@ -97,7 +97,7 @@ protected void doExecute( executeAsyncWithOrigin( client, ML_ORIGIN, - BulkAction.INSTANCE, + TransportBulkAction.TYPE, bulkRequestBuilder.request(), new ActionListener() { @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportResetJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportResetJobAction.java index 030e25ea7797a..d7071e4f973cc 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportResetJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportResetJobAction.java @@ -11,8 +11,8 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskAction; import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskRequest; +import org.elasticsearch.action.admin.cluster.node.tasks.get.TransportGetTaskAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.AcknowledgedTransportMasterNodeAction; @@ -157,7 +157,7 @@ private void waitExistingResetTaskToComplete( getTaskRequest.setTaskId(existingTaskId); getTaskRequest.setWaitForCompletion(true); getTaskRequest.setTimeout(request.timeout()); - executeAsyncWithOrigin(client, ML_ORIGIN, GetTaskAction.INSTANCE, getTaskRequest, ActionListener.wrap(getTaskResponse -> { + executeAsyncWithOrigin(client, ML_ORIGIN, TransportGetTaskAction.TYPE, getTaskRequest, ActionListener.wrap(getTaskResponse -> { TaskResult taskResult = getTaskResponse.getTask(); if (taskResult.isCompleted()) { listener.onResponse(AcknowledgedResponse.of(true)); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportRevertModelSnapshotAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportRevertModelSnapshotAction.java index c01c1f46b3d13..9d5abbfc06f65 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportRevertModelSnapshotAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportRevertModelSnapshotAction.java @@ -10,8 +10,8 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskAction; import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskRequest; +import org.elasticsearch.action.admin.cluster.node.tasks.get.TransportGetTaskAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.client.internal.Client; @@ -215,7 +215,7 @@ private void isBlocked(Job job, RevertModelSnapshotAction.Request request, Actio executeAsyncWithOrigin( client, ML_ORIGIN, - GetTaskAction.INSTANCE, + TransportGetTaskAction.TYPE, getTaskRequest, ActionListener.wrap(r -> listener.onResponse(r.getTask().isCompleted() == false), e -> { if (ExceptionsHelper.unwrapCause(e) instanceof ResourceNotFoundException) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateModelSnapshotAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateModelSnapshotAction.java index 097be745996ab..a2bb420c1e705 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateModelSnapshotAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateModelSnapshotAction.java @@ -10,9 +10,9 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; @@ -114,16 +114,22 @@ private void indexModelSnapshot(Result modelSnapshot, Consumer() { - @Override - public void onResponse(BulkResponse indexResponse) { - handler.accept(true); - } + executeAsyncWithOrigin( + client, + ML_ORIGIN, + TransportBulkAction.TYPE, + bulkRequestBuilder.request(), + new ActionListener() { + @Override + public void onResponse(BulkResponse indexResponse) { + handler.accept(true); + } - @Override - public void onFailure(Exception e) { - errorHandler.accept(e); + @Override + public void onFailure(Exception e) { + errorHandler.accept(e); + } } - }); + ); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndex.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndex.java index 8623f456b2035..280984feab4d4 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndex.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndex.java @@ -9,9 +9,9 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.admin.indices.get.GetIndexResponse; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.mapping.put.TransportPutMappingAction; @@ -115,7 +115,7 @@ public static void createDestinationIndex( analyticsConfig.getHeaders(), ClientHelper.ML_ORIGIN, client, - CreateIndexAction.INSTANCE, + TransportCreateIndexAction.TYPE, createIndexRequest, listener ); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProvider.java index b502e0d6db341..b9b38cb07fa39 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProvider.java @@ -16,10 +16,10 @@ import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.TransportIndexAction; import org.elasticsearch.action.search.MultiSearchRequest; @@ -524,7 +524,7 @@ private void storeTrainedModelAndDefinition( wrappedListener.onResponse(true); }, wrappedListener::onFailure); - executeAsyncWithOrigin(client, ML_ORIGIN, BulkAction.INSTANCE, bulkRequest.request(), bulkResponseActionListener); + executeAsyncWithOrigin(client, ML_ORIGIN, TransportBulkAction.TYPE, bulkRequest.request(), bulkResponseActionListener); } /** diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java index f8f1e95fecd2e..1abb466a20f1a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java @@ -21,10 +21,10 @@ import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.search.MultiSearchRequest; @@ -1940,7 +1940,7 @@ public void removeJobFromCalendars(String jobId, ActionListener listene bulkUpdate.add(updateRequest); } if (bulkUpdate.numberOfActions() > 0) { - executeAsyncWithOrigin(client, ML_ORIGIN, BulkAction.INSTANCE, bulkUpdate.request(), updateCalendarsListener); + executeAsyncWithOrigin(client, ML_ORIGIN, TransportBulkAction.TYPE, bulkUpdate.request(), updateCalendarsListener); } else { listener.onResponse(true); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterService.java index 5630f16e63351..82d19f9d72273 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterService.java @@ -11,10 +11,10 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; @@ -218,7 +218,7 @@ public BulkResponse bulkIndexWithHeadersWithRetry( headers, ClientHelper.ML_ORIGIN, client, - BulkAction.INSTANCE, + TransportBulkAction.TYPE, providedBulkRequest, listener ) diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelActionTests.java index 9402a358dc305..ac71ab2b59865 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelActionTests.java @@ -10,8 +10,8 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksAction; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequestBuilder; +import org.elasticsearch.action.admin.cluster.node.tasks.cancel.TransportCancelTasksAction; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.admin.cluster.node.tasks.list.TransportListTasksAction; import org.elasticsearch.action.support.PlainActionFuture; @@ -73,7 +73,7 @@ public void testCancelDownloadTaskCallsOnFailureWithErrorWhenCancellingFailsWith listener.onFailure(new Exception("cancel error")); return Void.TYPE; - }).when(client).execute(same(CancelTasksAction.INSTANCE), any(), any()); + }).when(client).execute(same(TransportCancelTasksAction.TYPE), any(), any()); var listener = new PlainActionFuture(); @@ -94,7 +94,7 @@ public void testCancelDownloadTaskCallsOnResponseNullWhenTheTaskNoLongerExistsWh listener.onFailure(new ResourceNotFoundException("task no longer there")); return Void.TYPE; - }).when(client).execute(same(CancelTasksAction.INSTANCE), any(), any()); + }).when(client).execute(same(TransportCancelTasksAction.TYPE), any(), any()); var listener = new PlainActionFuture(); @@ -150,6 +150,6 @@ private static void mockCancelTasksResponse(Client client, ListTasksResponse res listener.onResponse(response); return Void.TYPE; - }).when(client).execute(same(CancelTasksAction.INSTANCE), any(), any()); + }).when(client).execute(same(TransportCancelTasksAction.TYPE), any(), any()); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/annotations/AnnotationPersisterTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/annotations/AnnotationPersisterTests.java index b19b27785a539..075c10ac6dc90 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/annotations/AnnotationPersisterTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/annotations/AnnotationPersisterTests.java @@ -9,10 +9,10 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.client.internal.Client; @@ -88,14 +88,14 @@ public void verifyNoMoreInteractionsWithMocks() { public void testPersistAnnotation_Create() throws IOException { doAnswer(withResponse(new BulkResponse(new BulkItemResponse[] { bulkItemSuccess(ANNOTATION_ID) }, 0L))).when(client) - .execute(eq(BulkAction.INSTANCE), any(), any()); + .execute(eq(TransportBulkAction.TYPE), any(), any()); AnnotationPersister persister = new AnnotationPersister(resultsPersisterService); Annotation annotation = AnnotationTests.randomAnnotation(JOB_ID); Tuple result = persister.persistAnnotation(null, annotation); assertThat(result, is(equalTo(tuple(ANNOTATION_ID, annotation)))); - verify(client).execute(eq(BulkAction.INSTANCE), bulkRequestCaptor.capture(), any()); + verify(client).execute(eq(TransportBulkAction.TYPE), bulkRequestCaptor.capture(), any()); List bulkRequests = bulkRequestCaptor.getAllValues(); assertThat(bulkRequests, hasSize(1)); @@ -111,14 +111,14 @@ public void testPersistAnnotation_Create() throws IOException { public void testPersistAnnotation_Update() throws IOException { doAnswer(withResponse(new BulkResponse(new BulkItemResponse[] { bulkItemSuccess(ANNOTATION_ID) }, 0L))).when(client) - .execute(eq(BulkAction.INSTANCE), any(), any()); + .execute(eq(TransportBulkAction.TYPE), any(), any()); AnnotationPersister persister = new AnnotationPersister(resultsPersisterService); Annotation annotation = AnnotationTests.randomAnnotation(JOB_ID); Tuple result = persister.persistAnnotation(ANNOTATION_ID, annotation); assertThat(result, is(equalTo(tuple(ANNOTATION_ID, annotation)))); - verify(client).execute(eq(BulkAction.INSTANCE), bulkRequestCaptor.capture(), any()); + verify(client).execute(eq(TransportBulkAction.TYPE), bulkRequestCaptor.capture(), any()); List bulkRequests = bulkRequestCaptor.getAllValues(); assertThat(bulkRequests, hasSize(1)); @@ -134,7 +134,7 @@ public void testPersistAnnotation_Update() throws IOException { public void testPersistMultipleAnnotationsWithBulk() { doAnswer(withResponse(new BulkResponse(new BulkItemResponse[] { bulkItemSuccess(ANNOTATION_ID) }, 0L))).when(client) - .execute(eq(BulkAction.INSTANCE), any(), any()); + .execute(eq(TransportBulkAction.TYPE), any(), any()); AnnotationPersister persister = new AnnotationPersister(resultsPersisterService); persister.bulkPersisterBuilder(JOB_ID) @@ -145,7 +145,7 @@ public void testPersistMultipleAnnotationsWithBulk() { .persistAnnotation(AnnotationTests.randomAnnotation(JOB_ID)) .executeRequest(); - verify(client).execute(eq(BulkAction.INSTANCE), bulkRequestCaptor.capture(), any()); + verify(client).execute(eq(TransportBulkAction.TYPE), bulkRequestCaptor.capture(), any()); List bulkRequests = bulkRequestCaptor.getAllValues(); assertThat(bulkRequests, hasSize(1)); @@ -154,7 +154,7 @@ public void testPersistMultipleAnnotationsWithBulk() { public void testPersistMultipleAnnotationsWithBulk_LowBulkLimit() { doAnswer(withResponse(new BulkResponse(new BulkItemResponse[] { bulkItemSuccess(ANNOTATION_ID) }, 0L))).when(client) - .execute(eq(BulkAction.INSTANCE), any(), any()); + .execute(eq(TransportBulkAction.TYPE), any(), any()); AnnotationPersister persister = new AnnotationPersister(resultsPersisterService, 2); persister.bulkPersisterBuilder(JOB_ID) @@ -165,7 +165,7 @@ public void testPersistMultipleAnnotationsWithBulk_LowBulkLimit() { .persistAnnotation(AnnotationTests.randomAnnotation(JOB_ID)) .executeRequest(); - verify(client, times(3)).execute(eq(BulkAction.INSTANCE), bulkRequestCaptor.capture(), any()); + verify(client, times(3)).execute(eq(TransportBulkAction.TYPE), bulkRequestCaptor.capture(), any()); List bulkRequests = bulkRequestCaptor.getAllValues(); assertThat(bulkRequests, hasSize(3)); @@ -184,7 +184,7 @@ public void testPersistMultipleAnnotationsWithBulk_Failure() { .doAnswer(withResponse(new BulkResponse(new BulkItemResponse[] { bulkItemSuccess("1"), bulkItemFailure("2") }, 0L))) // (2) .doAnswer(withResponse(new BulkResponse(new BulkItemResponse[] { bulkItemFailure("2") }, 0L))) // (3) .when(client) - .execute(eq(BulkAction.INSTANCE), any(), any()); + .execute(eq(TransportBulkAction.TYPE), any(), any()); AnnotationPersister persister = new AnnotationPersister(resultsPersisterService); AnnotationPersister.Builder persisterBuilder = persister.bulkPersisterBuilder(JOB_ID) @@ -193,7 +193,7 @@ public void testPersistMultipleAnnotationsWithBulk_Failure() { ElasticsearchException e = expectThrows(ElasticsearchException.class, persisterBuilder::executeRequest); assertThat(e.getMessage(), containsString("Failed execution")); - verify(client, atLeastOnce()).execute(eq(BulkAction.INSTANCE), bulkRequestCaptor.capture(), any()); + verify(client, atLeastOnce()).execute(eq(TransportBulkAction.TYPE), bulkRequestCaptor.capture(), any()); List bulkRequests = bulkRequestCaptor.getAllValues(); assertThat(bulkRequests.get(0).numberOfActions(), equalTo(2)); // Original bulk request of size 2 diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobTests.java index e98c8a10b577f..1d52f278323dd 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobTests.java @@ -9,10 +9,10 @@ import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.client.internal.Client; @@ -166,7 +166,7 @@ public void setup() throws Exception { when(client.execute(same(FlushJobAction.INSTANCE), flushJobRequests.capture())).thenReturn(flushJobFuture); doAnswer(withResponse(new BulkResponse(new BulkItemResponse[] { bulkItemSuccess(annotationDocId) }, 0L))).when(client) - .execute(eq(BulkAction.INSTANCE), any(), any()); + .execute(eq(TransportBulkAction.TYPE), any(), any()); } public void testLookBackRunWithEndTime() throws Exception { @@ -334,7 +334,7 @@ public void testRealtimeRun() throws Exception { ); ArgumentCaptor bulkRequestArgumentCaptor = ArgumentCaptor.forClass(BulkRequest.class); - verify(client, atMost(2)).execute(eq(BulkAction.INSTANCE), bulkRequestArgumentCaptor.capture(), any()); + verify(client, atMost(2)).execute(eq(TransportBulkAction.TYPE), bulkRequestArgumentCaptor.capture(), any()); BulkRequest bulkRequest = bulkRequestArgumentCaptor.getValue(); assertThat(bulkRequest.requests(), hasSize(1)); IndexRequest indexRequest = (IndexRequest) bulkRequest.requests().get(0); @@ -383,7 +383,7 @@ public void testRealtimeRun() throws Exception { ); ArgumentCaptor bulkRequestArgumentCaptor = ArgumentCaptor.forClass(BulkRequest.class); - verify(client, atMost(2)).execute(eq(BulkAction.INSTANCE), bulkRequestArgumentCaptor.capture(), any()); + verify(client, atMost(2)).execute(eq(TransportBulkAction.TYPE), bulkRequestArgumentCaptor.capture(), any()); BulkRequest bulkRequest = bulkRequestArgumentCaptor.getValue(); assertThat(bulkRequest.requests(), hasSize(1)); IndexRequest indexRequest = (IndexRequest) bulkRequest.requests().get(0); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndexTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndexTests.java index 998edd6044bab..2f3ccaa313b0d 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndexTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndexTests.java @@ -8,8 +8,8 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.admin.indices.get.GetIndexResponse; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsAction; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; @@ -113,7 +113,7 @@ private Map testCreateDestinationIndex(DataFrameAnalysis analysi ArgumentCaptor createIndexRequestCaptor = ArgumentCaptor.forClass(CreateIndexRequest.class); doAnswer(callListenerOnResponse(null)).when(client) - .execute(eq(CreateIndexAction.INSTANCE), createIndexRequestCaptor.capture(), any()); + .execute(eq(TransportCreateIndexAction.TYPE), createIndexRequestCaptor.capture(), any()); Map analysisSettings1 = Map.ofEntries( Map.entry("index.analysis.filter.bigram_joiner.max_shingle_size", "2"), diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProviderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProviderTests.java index 94e0c533ef5fc..3daeed561e88b 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProviderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProviderTests.java @@ -8,8 +8,8 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.DocWriteRequest; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.TransportIndexAction; import org.elasticsearch.action.support.PlainActionFuture; @@ -548,7 +548,7 @@ private void assertThatIndexRequestHasOperation(Client client, DocWriteRequest.O private void assertThatBulkIndexRequestHasOperation(Client client, DocWriteRequest.OpType operation) { var bulkIndexRequestArg = ArgumentCaptor.forClass(BulkRequest.class); - verify(client).execute(eq(BulkAction.INSTANCE), bulkIndexRequestArg.capture(), any()); + verify(client).execute(eq(TransportBulkAction.TYPE), bulkIndexRequestArg.capture(), any()); var requests = bulkIndexRequestArg.getValue().requests(); assertThat(bulkIndexRequestArg.getValue().requests().size(), Matchers.greaterThan(0)); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java index 654ce7bf965bd..0a7fc75115d2a 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java @@ -8,9 +8,9 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.index.TransportIndexAction; @@ -85,7 +85,7 @@ public class JobResultsPersisterTests extends ESTestCase { public void setUpTests() { bulkRequestCaptor = ArgumentCaptor.forClass(BulkRequest.class); client = mock(Client.class); - doAnswer(withResponse(mock(BulkResponse.class))).when(client).execute(eq(BulkAction.INSTANCE), any(), any()); + doAnswer(withResponse(mock(BulkResponse.class))).when(client).execute(eq(TransportBulkAction.TYPE), any(), any()); OriginSettingClient originSettingClient = MockOriginSettingClient.mockOriginSettingClient(client, ClientHelper.ML_ORIGIN); persister = new JobResultsPersister(originSettingClient, buildResultsPersisterService(originSettingClient)); } @@ -111,7 +111,7 @@ public void testPersistBucket_OneRecord() { persister.bulkPersisterBuilder(JOB_ID).persistBucket(bucket).executeRequest(); - verify(client).execute(eq(BulkAction.INSTANCE), bulkRequestCaptor.capture(), any()); + verify(client).execute(eq(TransportBulkAction.TYPE), bulkRequestCaptor.capture(), any()); BulkRequest bulkRequest = bulkRequestCaptor.getValue(); assertEquals(2, bulkRequest.numberOfActions()); @@ -162,7 +162,7 @@ public void testPersistRecords() { persister.bulkPersisterBuilder(JOB_ID).persistRecords(records).executeRequest(); - verify(client).execute(eq(BulkAction.INSTANCE), bulkRequestCaptor.capture(), any()); + verify(client).execute(eq(TransportBulkAction.TYPE), bulkRequestCaptor.capture(), any()); BulkRequest bulkRequest = bulkRequestCaptor.getValue(); assertEquals(1, bulkRequest.numberOfActions()); @@ -197,7 +197,7 @@ public void testPersistInfluencers() { persister.bulkPersisterBuilder(JOB_ID).persistInfluencers(influencers).executeRequest(); - verify(client).execute(eq(BulkAction.INSTANCE), bulkRequestCaptor.capture(), any()); + verify(client).execute(eq(TransportBulkAction.TYPE), bulkRequestCaptor.capture(), any()); BulkRequest bulkRequest = bulkRequestCaptor.getValue(); assertEquals(1, bulkRequest.numberOfActions()); @@ -233,7 +233,7 @@ public void testBulkRequestExecutesWhenReachMaxDocs() { InOrder inOrder = inOrder(client); inOrder.verify(client).settings(); inOrder.verify(client, times(3)).threadPool(); - inOrder.verify(client).execute(eq(BulkAction.INSTANCE), bulkRequestCaptor.capture(), any()); + inOrder.verify(client).execute(eq(TransportBulkAction.TYPE), bulkRequestCaptor.capture(), any()); verifyNoMoreInteractions(client); } @@ -252,7 +252,7 @@ public void testPersistTimingStats() { InOrder inOrder = inOrder(client); inOrder.verify(client).settings(); inOrder.verify(client, times(3)).threadPool(); - inOrder.verify(client).execute(eq(BulkAction.INSTANCE), bulkRequestCaptor.capture(), any()); + inOrder.verify(client).execute(eq(TransportBulkAction.TYPE), bulkRequestCaptor.capture(), any()); verifyNoMoreInteractions(client); BulkRequest bulkRequest = bulkRequestCaptor.getValue(); @@ -302,7 +302,7 @@ public void testPersistDatafeedTimingStats() { InOrder inOrder = inOrder(client); inOrder.verify(client).settings(); inOrder.verify(client, times(3)).threadPool(); - inOrder.verify(client).execute(eq(BulkAction.INSTANCE), bulkRequestCaptor.capture(), any()); + inOrder.verify(client).execute(eq(TransportBulkAction.TYPE), bulkRequestCaptor.capture(), any()); verifyNoMoreInteractions(client); // Refresh policy is set on the bulk request, not the individual index requests @@ -342,7 +342,7 @@ private void testPersistQuantilesSync(SearchHits searchHits, String expectedInde InOrder inOrder = inOrder(client); inOrder.verify(client).execute(eq(TransportSearchAction.TYPE), any(), any()); - inOrder.verify(client).execute(eq(BulkAction.INSTANCE), bulkRequestCaptor.capture(), any()); + inOrder.verify(client).execute(eq(TransportBulkAction.TYPE), bulkRequestCaptor.capture(), any()); inOrder.verifyNoMoreInteractions(); BulkRequest bulkRequest = bulkRequestCaptor.getValue(); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterServiceTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterServiceTests.java index f8ffed0864372..7aaeabac3af8b 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterServiceTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterServiceTests.java @@ -10,10 +10,10 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchRequest; @@ -263,7 +263,7 @@ public void testBulkRequestChangeOnFailures() { doAnswerWithResponses( new BulkResponse(new BulkItemResponse[] { BULK_ITEM_RESPONSE_FAILURE, BULK_ITEM_RESPONSE_SUCCESS }, 0L), new BulkResponse(new BulkItemResponse[0], 0L) - ).when(client).execute(eq(BulkAction.INSTANCE), any(), any()); + ).when(client).execute(eq(TransportBulkAction.TYPE), any(), any()); BulkRequest bulkRequest = new BulkRequest(); bulkRequest.add(INDEX_REQUEST_FAILURE); @@ -274,7 +274,7 @@ public void testBulkRequestChangeOnFailures() { resultsPersisterService.bulkIndexWithRetry(bulkRequest, JOB_ID, () -> true, lastMessage::set); ArgumentCaptor captor = ArgumentCaptor.forClass(BulkRequest.class); - verify(client, times(2)).execute(eq(BulkAction.INSTANCE), captor.capture(), any()); + verify(client, times(2)).execute(eq(TransportBulkAction.TYPE), captor.capture(), any()); List requests = captor.getAllValues(); @@ -294,7 +294,7 @@ public void testBulkRequestChangeOnIrrecoverableFailures() { doAnswerWithResponses( new BulkResponse(new BulkItemResponse[] { irrecoverable, BULK_ITEM_RESPONSE_SUCCESS }, 0L), new BulkResponse(new BulkItemResponse[0], 0L) - ).when(client).execute(eq(BulkAction.INSTANCE), any(), any()); + ).when(client).execute(eq(TransportBulkAction.TYPE), any(), any()); BulkRequest bulkRequest = new BulkRequest(); bulkRequest.add(INDEX_REQUEST_FAILURE); @@ -305,7 +305,7 @@ public void testBulkRequestChangeOnIrrecoverableFailures() { () -> resultsPersisterService.bulkIndexWithRetry(bulkRequest, JOB_ID, () -> true, (s) -> {}) ); - verify(client).execute(eq(BulkAction.INSTANCE), any(), any()); + verify(client).execute(eq(TransportBulkAction.TYPE), any(), any()); assertThat(ex.getMessage(), containsString("experienced failure that cannot be automatically retried.")); } @@ -313,7 +313,7 @@ public void testBulkRequestDoesNotRetryWhenSupplierIsFalse() { doAnswerWithResponses( new BulkResponse(new BulkItemResponse[] { BULK_ITEM_RESPONSE_FAILURE, BULK_ITEM_RESPONSE_SUCCESS }, 0L), new BulkResponse(new BulkItemResponse[0], 0L) - ).when(client).execute(eq(BulkAction.INSTANCE), any(), any()); + ).when(client).execute(eq(TransportBulkAction.TYPE), any(), any()); BulkRequest bulkRequest = new BulkRequest(); bulkRequest.add(INDEX_REQUEST_FAILURE); @@ -325,7 +325,7 @@ public void testBulkRequestDoesNotRetryWhenSupplierIsFalse() { ElasticsearchException.class, () -> resultsPersisterService.bulkIndexWithRetry(bulkRequest, JOB_ID, () -> false, lastMessage::set) ); - verify(client, times(1)).execute(eq(BulkAction.INSTANCE), any(), any()); + verify(client, times(1)).execute(eq(TransportBulkAction.TYPE), any(), any()); assertThat(lastMessage.get(), is(nullValue())); } @@ -335,7 +335,7 @@ public void testBulkRequestRetriesConfiguredAttemptNumber() { resultsPersisterService.setMaxFailureRetries(maxFailureRetries); doAnswer(withResponse(new BulkResponse(new BulkItemResponse[] { BULK_ITEM_RESPONSE_FAILURE }, 0L))).when(client) - .execute(eq(BulkAction.INSTANCE), any(), any()); + .execute(eq(TransportBulkAction.TYPE), any(), any()); BulkRequest bulkRequest = new BulkRequest(); bulkRequest.add(INDEX_REQUEST_FAILURE); @@ -346,7 +346,7 @@ public void testBulkRequestRetriesConfiguredAttemptNumber() { ElasticsearchException.class, () -> resultsPersisterService.bulkIndexWithRetry(bulkRequest, JOB_ID, () -> true, lastMessage::set) ); - verify(client, times(maxFailureRetries + 1)).execute(eq(BulkAction.INSTANCE), any(), any()); + verify(client, times(maxFailureRetries + 1)).execute(eq(TransportBulkAction.TYPE), any(), any()); assertThat(lastMessage.get(), containsString("failed to index after [10] attempts. Will attempt again")); } @@ -355,7 +355,7 @@ public void testBulkRequestRetriesMsgHandlerIsCalled() { doAnswerWithResponses( new BulkResponse(new BulkItemResponse[] { BULK_ITEM_RESPONSE_FAILURE, BULK_ITEM_RESPONSE_SUCCESS }, 0L), new BulkResponse(new BulkItemResponse[0], 0L) - ).when(client).execute(eq(BulkAction.INSTANCE), any(), any()); + ).when(client).execute(eq(TransportBulkAction.TYPE), any(), any()); BulkRequest bulkRequest = new BulkRequest(); bulkRequest.add(INDEX_REQUEST_FAILURE); @@ -366,7 +366,7 @@ public void testBulkRequestRetriesMsgHandlerIsCalled() { resultsPersisterService.bulkIndexWithRetry(bulkRequest, JOB_ID, () -> true, lastMessage::set); ArgumentCaptor captor = ArgumentCaptor.forClass(BulkRequest.class); - verify(client, times(2)).execute(eq(BulkAction.INSTANCE), captor.capture(), any()); + verify(client, times(2)).execute(eq(TransportBulkAction.TYPE), captor.capture(), any()); List requests = captor.getAllValues(); diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexManagerTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexManagerTests.java index 4b7819693aedb..ae1aa7072510d 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexManagerTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexManagerTests.java @@ -11,9 +11,9 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; @@ -397,9 +397,8 @@ private ActionResponse verifyIndexInstalled( ActionRequest request, ActionListener listener ) { - if (action instanceof CreateIndexAction) { + if (action == TransportCreateIndexAction.TYPE) { calledTimes.incrementAndGet(); - assertThat(action, instanceOf(CreateIndexAction.class)); assertThat(request, instanceOf(CreateIndexRequest.class)); assertNotNull(listener); return new CreateIndexResponse(true, true, ((CreateIndexRequest) request).index()); @@ -416,9 +415,8 @@ private ActionResponse verifyIndexUpgraded( ActionRequest request, ActionListener listener ) { - if (action instanceof CreateIndexAction) { + if (action == TransportCreateIndexAction.TYPE) { indicesCreated.incrementAndGet(); - assertThat(action, instanceOf(CreateIndexAction.class)); assertThat(request, instanceOf(CreateIndexRequest.class)); assertNotNull(listener); return new CreateIndexResponse(true, true, ((CreateIndexRequest) request).index()); diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java index a276971762c81..9c3c34e2d63bd 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java @@ -13,8 +13,8 @@ import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsAction; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; @@ -165,7 +165,7 @@ static void createIndex( } client.execute( - CreateIndexAction.INSTANCE, + TransportCreateIndexAction.TYPE, request, ActionListener.wrap(createIndexResponse -> startPersistentTask(job, listener, persistentTasksService), e -> { if (e instanceof ResourceAlreadyExistsException) { diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupJobTask.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupJobTask.java index b2e1ed42440a2..f4c420db47ac3 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupJobTask.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupJobTask.java @@ -11,9 +11,9 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.action.support.broadcast.BroadcastResponse; @@ -142,7 +142,7 @@ protected void doNextBulk(BulkRequest request, ActionListener next job.getHeaders(), ClientHelper.ROLLUP_ORIGIN, client, - BulkAction.INSTANCE, + TransportBulkAction.TYPE, request, nextPhase ); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/PutJobStateMachineTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/PutJobStateMachineTests.java index b1455c4738623..ee8b4c79d1893 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/PutJobStateMachineTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/PutJobStateMachineTests.java @@ -9,8 +9,8 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsAction; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; @@ -67,12 +67,12 @@ public void testCreateIndexException() { doAnswer(invocation -> { requestCaptor.getValue().onFailure(new RuntimeException("something bad")); return null; - }).when(client).execute(eq(CreateIndexAction.INSTANCE), any(CreateIndexRequest.class), requestCaptor.capture()); + }).when(client).execute(eq(TransportCreateIndexAction.TYPE), any(CreateIndexRequest.class), requestCaptor.capture()); TransportPutRollupJobAction.createIndex(job, testListener, mock(PersistentTasksService.class), client, logger); // ResourceAlreadyExists should trigger a GetMapping next - verify(client).execute(eq(CreateIndexAction.INSTANCE), any(CreateIndexRequest.class), any()); + verify(client).execute(eq(TransportCreateIndexAction.TYPE), any(CreateIndexRequest.class), any()); } @SuppressWarnings({ "unchecked", "rawtypes" }) @@ -90,7 +90,7 @@ public void testIndexAlreadyExists() { doAnswer(invocation -> { requestCaptor.getValue().onFailure(new ResourceAlreadyExistsException(job.getConfig().getRollupIndex())); return null; - }).when(client).execute(eq(CreateIndexAction.INSTANCE), any(CreateIndexRequest.class), requestCaptor.capture()); + }).when(client).execute(eq(TransportCreateIndexAction.TYPE), any(CreateIndexRequest.class), requestCaptor.capture()); ArgumentCaptor requestCaptor2 = ArgumentCaptor.forClass(ActionListener.class); doAnswer(invocation -> { @@ -130,7 +130,7 @@ public void testIndexMetadata() throws InterruptedException { listenerCaptor.getValue().onFailure(new ResourceAlreadyExistsException(job.getConfig().getRollupIndex())); latch.countDown(); return null; - }).when(client).execute(eq(CreateIndexAction.INSTANCE), requestCaptor.capture(), listenerCaptor.capture()); + }).when(client).execute(eq(TransportCreateIndexAction.TYPE), requestCaptor.capture(), listenerCaptor.capture()); ArgumentCaptor requestCaptor2 = ArgumentCaptor.forClass(ActionListener.class); doAnswer(invocation -> { diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheMaintenanceService.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheMaintenanceService.java index 1713be9feac65..a40f21c0de08d 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheMaintenanceService.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheMaintenanceService.java @@ -11,9 +11,9 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRunnable; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.search.ClosePointInTimeRequest; @@ -636,7 +636,7 @@ private void handleSearchResponse(SearchResponse searchResponse, RefCounted refs if (bulkRequest.numberOfActions() > 0) { refs.mustIncRef(); clientWithOrigin.execute( - BulkAction.INSTANCE, + TransportBulkAction.TYPE, bulkRequest, ActionListener.releaseAfter(listeners.acquire(bulkResponse -> { for (BulkItemResponse itemResponse : bulkResponse.getItems()) { diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/apikey/ApiKeySingleNodeTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/apikey/ApiKeySingleNodeTests.java index b2417d8f18fa5..fe9c1f37e7d49 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/apikey/ApiKeySingleNodeTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/apikey/ApiKeySingleNodeTests.java @@ -13,8 +13,8 @@ import org.elasticsearch.action.admin.cluster.health.TransportClusterHealthAction; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; import org.elasticsearch.action.admin.cluster.node.info.TransportNodesInfoAction; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.get.TransportGetAction; @@ -211,7 +211,7 @@ public void testCreatingApiKeyWithNoAccess() { ElasticsearchSecurityException.class, () -> client().filterWithHeader(Map.of("Authorization", "ApiKey " + base64ApiKeyKeyValue)) .execute( - CreateIndexAction.INSTANCE, + TransportCreateIndexAction.TYPE, new CreateIndexRequest(randomFrom(randomAlphaOfLengthBetween(3, 8), SECURITY_MAIN_ALIAS)) ) .actionGet() diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java index 7a0cb604f4ce9..8324763c57bd2 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java @@ -12,7 +12,7 @@ import org.elasticsearch.action.admin.indices.alias.get.GetAliasesAction; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequestBuilder; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.settings.Settings; @@ -700,7 +700,7 @@ public void testCreateIndexAliasesOnlyPermission() { basicAuthHeaderValue("aliases_only", SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING) ) ).admin().indices().prepareCreate("test_1")::get, - CreateIndexAction.NAME, + TransportCreateIndexAction.TYPE.name(), "aliases_only" ); } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/WriteActionsTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/WriteActionsTests.java index 159228dd1c1b2..a3a4f1b074232 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/WriteActionsTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/WriteActionsTests.java @@ -8,8 +8,8 @@ import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.DocWriteRequest; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.update.TransportUpdateAction; @@ -55,14 +55,14 @@ public void testIndex() { assertThrowsAuthorizationExceptionDefaultUsers( prepareIndex("index1").setId("id").setSource("field", "value")::get, - BulkAction.NAME + "[s]" + TransportBulkAction.NAME + "[s]" ); prepareIndex("test4").setId("id").setSource("field", "value").get(); // the missing index gets automatically created (user has permissions for that), but indexing fails due to missing authorization assertThrowsAuthorizationExceptionDefaultUsers( prepareIndex("missing").setId("id").setSource("field", "value")::get, - BulkAction.NAME + "[s]" + TransportBulkAction.NAME + "[s]" ); ensureGreen(); } @@ -72,7 +72,7 @@ public void testDelete() { prepareIndex("test1").setId("id").setSource("field", "value").get(); assertEquals(RestStatus.OK, client().prepareDelete("test1", "id").get().status()); - assertThrowsAuthorizationExceptionDefaultUsers(client().prepareDelete("index1", "id")::get, BulkAction.NAME + "[s]"); + assertThrowsAuthorizationExceptionDefaultUsers(client().prepareDelete("index1", "id")::get, TransportBulkAction.NAME + "[s]"); expectThrows(IndexNotFoundException.class, () -> client().prepareDelete("test4", "id").get()); ensureGreen(); @@ -130,7 +130,7 @@ public void testBulk() { assertThat(bulkResponse.getItems()[1].isFailed(), equalTo(true)); assertThat(bulkResponse.getItems()[1].getOpType(), equalTo(DocWriteRequest.OpType.INDEX)); assertThat(bulkResponse.getItems()[1].getFailure().getIndex(), equalTo("index1")); - assertAuthorizationExceptionDefaultUsers(bulkResponse.getItems()[1].getFailure().getCause(), BulkAction.NAME + "[s]"); + assertAuthorizationExceptionDefaultUsers(bulkResponse.getItems()[1].getFailure().getCause(), TransportBulkAction.NAME + "[s]"); assertThat( bulkResponse.getItems()[1].getFailure().getCause().getMessage(), containsString("[indices:data/write/bulk[s]] is unauthorized") @@ -145,7 +145,7 @@ public void testBulk() { // the missing index gets automatically created (user has permissions for that), but indexing fails due to missing authorization assertThat(bulkResponse.getItems()[3].getFailure().getIndex(), equalTo("missing")); assertThat(bulkResponse.getItems()[3].getFailure().getCause(), instanceOf(ElasticsearchSecurityException.class)); - assertAuthorizationExceptionDefaultUsers(bulkResponse.getItems()[3].getFailure().getCause(), BulkAction.NAME + "[s]"); + assertAuthorizationExceptionDefaultUsers(bulkResponse.getItems()[3].getFailure().getCause(), TransportBulkAction.NAME + "[s]"); assertThat( bulkResponse.getItems()[3].getFailure().getCause().getMessage(), containsString("[indices:data/write/bulk[s]] is unauthorized") @@ -158,7 +158,7 @@ public void testBulk() { assertThat(bulkResponse.getItems()[5].isFailed(), equalTo(true)); assertThat(bulkResponse.getItems()[5].getOpType(), equalTo(DocWriteRequest.OpType.DELETE)); assertThat(bulkResponse.getItems()[5].getFailure().getIndex(), equalTo("index1")); - assertAuthorizationExceptionDefaultUsers(bulkResponse.getItems()[5].getFailure().getCause(), BulkAction.NAME + "[s]"); + assertAuthorizationExceptionDefaultUsers(bulkResponse.getItems()[5].getFailure().getCause(), TransportBulkAction.NAME + "[s]"); assertThat( bulkResponse.getItems()[5].getFailure().getCause().getMessage(), containsString("[indices:data/write/bulk[s]] is unauthorized") @@ -171,7 +171,7 @@ public void testBulk() { assertThat(bulkResponse.getItems()[7].isFailed(), equalTo(true)); assertThat(bulkResponse.getItems()[7].getOpType(), equalTo(DocWriteRequest.OpType.DELETE)); assertThat(bulkResponse.getItems()[7].getFailure().getIndex(), equalTo("missing")); - assertAuthorizationExceptionDefaultUsers(bulkResponse.getItems()[7].getFailure().getCause(), BulkAction.NAME + "[s]"); + assertAuthorizationExceptionDefaultUsers(bulkResponse.getItems()[7].getFailure().getCause(), TransportBulkAction.NAME + "[s]"); assertThat( bulkResponse.getItems()[7].getFailure().getCause().getMessage(), containsString("[indices:data/write/bulk[s]] is unauthorized") @@ -188,7 +188,7 @@ public void testBulk() { assertThat(bulkResponse.getItems()[10].isFailed(), equalTo(true)); assertThat(bulkResponse.getItems()[10].getOpType(), equalTo(DocWriteRequest.OpType.UPDATE)); assertThat(bulkResponse.getItems()[10].getFailure().getIndex(), equalTo("index1")); - assertAuthorizationExceptionDefaultUsers(bulkResponse.getItems()[10].getFailure().getCause(), BulkAction.NAME + "[s]"); + assertAuthorizationExceptionDefaultUsers(bulkResponse.getItems()[10].getFailure().getCause(), TransportBulkAction.NAME + "[s]"); assertThat( bulkResponse.getItems()[10].getFailure().getCause().getMessage(), containsString("[indices:data/write/bulk[s]] is unauthorized") @@ -203,7 +203,7 @@ public void testBulk() { assertThat(bulkResponse.getItems()[12].getOpType(), equalTo(DocWriteRequest.OpType.UPDATE)); assertThat(bulkResponse.getItems()[12].getFailure().getIndex(), equalTo("missing")); assertThat(bulkResponse.getItems()[12].getFailure().getCause(), instanceOf(ElasticsearchSecurityException.class)); - assertAuthorizationExceptionDefaultUsers(bulkResponse.getItems()[12].getFailure().getCause(), BulkAction.NAME + "[s]"); + assertAuthorizationExceptionDefaultUsers(bulkResponse.getItems()[12].getFailure().getCause(), TransportBulkAction.NAME + "[s]"); assertThat( bulkResponse.getItems()[12].getFailure().getCause().getMessage(), containsString("[indices:data/write/bulk[s]] is unauthorized") diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java index 9667fd336112f..ec0e54e96f1af 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java @@ -19,7 +19,6 @@ import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkRequestBuilder; @@ -445,7 +444,7 @@ private void createApiKeyAndIndexIt( () -> executeAsyncWithOrigin( client, SECURITY_ORIGIN, - BulkAction.INSTANCE, + TransportBulkAction.TYPE, bulkRequest, TransportBulkAction.unwrappingSingleItemBulkResponse(ActionListener.wrap(indexResponse -> { assert request.getId().equals(indexResponse.getId()); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/service/IndexServiceAccountTokenStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/service/IndexServiceAccountTokenStore.java index c2d8be1c26629..e59bcff2c9240 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/service/IndexServiceAccountTokenStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/service/IndexServiceAccountTokenStore.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest.OpType; import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.delete.DeleteRequest; @@ -155,7 +154,7 @@ void createToken( executeAsyncWithOrigin( client, SECURITY_ORIGIN, - BulkAction.INSTANCE, + TransportBulkAction.TYPE, bulkRequest, TransportBulkAction.unwrappingSingleItemBulkResponse(ActionListener.wrap(response -> { assert DocWriteResponse.Result.CREATED == response.getResult() diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java index 629a1a476995f..194440722545a 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java @@ -19,7 +19,7 @@ import java.util.function.Consumer; import java.util.function.Predicate; -import static org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskAction.TASKS_ORIGIN; +import static org.elasticsearch.action.admin.cluster.node.tasks.get.TransportGetTaskAction.TASKS_ORIGIN; import static org.elasticsearch.action.bulk.TransportBulkAction.LAZY_ROLLOVER_ORIGIN; import static org.elasticsearch.action.support.replication.PostWriteRefresh.POST_WRITE_REFRESH_ORIGIN; import static org.elasticsearch.cluster.metadata.DataStreamLifecycle.DATA_STREAM_LIFECYCLE_ORIGIN; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java index 39d14d6685553..320dd4c6f8e09 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java @@ -18,9 +18,9 @@ import org.elasticsearch.action.CompositeIndicesRequest; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkShardRequest; import org.elasticsearch.action.bulk.SimulateBulkAction; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.delete.TransportDeleteAction; import org.elasticsearch.action.get.TransportMultiGetAction; import org.elasticsearch.action.index.TransportIndexAction; @@ -258,7 +258,7 @@ static boolean checkSameUserPermissions(String action, TransportRequest request, private static boolean shouldAuthorizeIndexActionNameOnly(String action, TransportRequest request) { switch (action) { - case BulkAction.NAME: + case TransportBulkAction.NAME: case SimulateBulkAction.NAME: case TransportIndexAction.NAME: case TransportDeleteAction.NAME: diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java index 9b06fe23b903a..55be659512c52 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java @@ -18,7 +18,6 @@ import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.bulk.BackoffPolicy; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.get.GetRequest; @@ -748,7 +747,7 @@ void createNewProfile(Subject subject, String uid, ActionListener liste () -> executeAsyncWithOrigin( client, getActionOrigin(), - BulkAction.INSTANCE, + TransportBulkAction.TYPE, bulkRequest, TransportBulkAction.unwrappingSingleItemBulkResponse(ActionListener.wrap(indexResponse -> { assert docId.equals(indexResponse.getId()); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java index 57ea876935d39..540a0758db43a 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java @@ -16,10 +16,10 @@ import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.index.TransportIndexAction; @@ -174,7 +174,7 @@ protected void indexRequests.add(indexRequest); final IndexResponse response = new IndexResponse(new ShardId("test", "test", 0), indexRequest.id(), 1, 1, 1, true); listener.onResponse((Response) response); - } else if (BulkAction.NAME.equals(action.name())) { + } else if (TransportBulkAction.NAME.equals(action.name())) { assertThat(request, instanceOf(BulkRequest.class)); BulkRequest bulkRequest = (BulkRequest) request; bulkRequests.add(bulkRequest); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java index 1754ffe3e9812..d2ca214723416 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java @@ -18,11 +18,11 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequestBuilder; @@ -261,7 +261,7 @@ public void testCreateApiKeyUsesBulkIndexAction() throws Exception { assertThat(indexRequest.opType(), is(DocWriteRequest.OpType.CREATE)); bulkActionInvoked.set(true); return null; - }).when(client).execute(eq(BulkAction.INSTANCE), any(BulkRequest.class), any()); + }).when(client).execute(eq(TransportBulkAction.TYPE), any(BulkRequest.class), any()); service.createApiKey(authentication, createApiKeyRequest, Set.of(), new PlainActionFuture<>()); assertBusy(() -> assertTrue(bulkActionInvoked.get())); } @@ -627,7 +627,7 @@ public void testCreateApiKeyWillCacheOnCreation() { ) ); return null; - }).when(client).execute(eq(BulkAction.INSTANCE), any(BulkRequest.class), any()); + }).when(client).execute(eq(TransportBulkAction.TYPE), any(BulkRequest.class), any()); final Cache> apiKeyAuthCache = service.getApiKeyAuthCache(); assertNull(apiKeyAuthCache.get(createApiKeyRequest.getId())); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccountsTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccountsTests.java index 8ec06c7273bbd..756d53285a8f6 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccountsTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccountsTests.java @@ -10,7 +10,7 @@ import org.elasticsearch.action.admin.cluster.health.TransportClusterHealthAction; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsAction; import org.elasticsearch.action.admin.indices.create.AutoCreateAction; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.admin.indices.mapping.put.TransportAutoPutMappingAction; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; @@ -19,7 +19,7 @@ import org.elasticsearch.action.admin.indices.template.delete.TransportDeleteIndexTemplateAction; import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesAction; import org.elasticsearch.action.admin.indices.template.put.TransportPutIndexTemplateAction; -import org.elasticsearch.action.bulk.BulkAction; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.delete.TransportDeleteAction; import org.elasticsearch.action.get.TransportGetAction; import org.elasticsearch.action.get.TransportMultiGetAction; @@ -134,9 +134,9 @@ public void testElasticFleetServerPrivileges() { assertThat(role.indices().allowedIndicesMatcher(TransportAutoPutMappingAction.TYPE.name()).test(index), is(true)); assertThat(role.indices().allowedIndicesMatcher(AutoCreateAction.NAME).test(index), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(index), is(true)); - assertThat(role.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(index), is(true)); + assertThat(role.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(index), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(index), is(true)); - assertThat(role.indices().allowedIndicesMatcher(BulkAction.NAME).test(index), is(true)); + assertThat(role.indices().allowedIndicesMatcher(TransportBulkAction.NAME).test(index), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(index), is(false)); assertThat(role.indices().allowedIndicesMatcher(TransportGetAction.TYPE.name()).test(index), is(false)); assertThat(role.indices().allowedIndicesMatcher(TransportMultiGetAction.NAME).test(index), is(false)); @@ -149,9 +149,9 @@ public void testElasticFleetServerPrivileges() { assertThat(role.indices().allowedIndicesMatcher(TransportAutoPutMappingAction.TYPE.name()).test(profilingIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(AutoCreateAction.NAME).test(profilingIndex), is(false)); assertThat(role.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(profilingIndex), is(true)); - assertThat(role.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(profilingIndex), is(false)); + assertThat(role.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(profilingIndex), is(false)); assertThat(role.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(profilingIndex), is(true)); - assertThat(role.indices().allowedIndicesMatcher(BulkAction.NAME).test(profilingIndex), is(true)); + assertThat(role.indices().allowedIndicesMatcher(TransportBulkAction.NAME).test(profilingIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(profilingIndex), is(false)); assertThat(role.indices().allowedIndicesMatcher(TransportGetAction.TYPE.name()).test(profilingIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportMultiGetAction.NAME).test(profilingIndex), is(true)); @@ -163,9 +163,9 @@ public void testElasticFleetServerPrivileges() { assertThat(role.indices().allowedIndicesMatcher(TransportAutoPutMappingAction.TYPE.name()).test(index), is(true)); assertThat(role.indices().allowedIndicesMatcher(AutoCreateAction.NAME).test(index), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(index), is(true)); - assertThat(role.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(index), is(true)); + assertThat(role.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(index), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(index), is(true)); - assertThat(role.indices().allowedIndicesMatcher(BulkAction.NAME).test(index), is(true)); + assertThat(role.indices().allowedIndicesMatcher(TransportBulkAction.NAME).test(index), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(index), is(false)); assertThat(role.indices().allowedIndicesMatcher(TransportGetAction.TYPE.name()).test(index), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportMultiGetAction.NAME).test(index), is(true)); @@ -187,9 +187,9 @@ public void testElasticFleetServerPrivileges() { ).forEach(index -> { final IndexAbstraction dotFleetIndex = mockIndexAbstraction(index); assertThat(role.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(dotFleetIndex), is(true)); - assertThat(role.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(dotFleetIndex), is(true)); + assertThat(role.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(dotFleetIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(dotFleetIndex), is(true)); - assertThat(role.indices().allowedIndicesMatcher(BulkAction.NAME).test(dotFleetIndex), is(true)); + assertThat(role.indices().allowedIndicesMatcher(TransportBulkAction.NAME).test(dotFleetIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportGetAction.TYPE.name()).test(dotFleetIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportMultiGetAction.NAME).test(dotFleetIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(dotFleetIndex), is(true)); @@ -202,9 +202,9 @@ public void testElasticFleetServerPrivileges() { final IndexAbstraction dotFleetSecretsIndex = mockIndexAbstraction(".fleet-secrets" + randomAlphaOfLengthBetween(1, 20)); assertThat(role.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(dotFleetSecretsIndex), is(false)); - assertThat(role.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(dotFleetSecretsIndex), is(false)); + assertThat(role.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(dotFleetSecretsIndex), is(false)); assertThat(role.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(dotFleetSecretsIndex), is(false)); - assertThat(role.indices().allowedIndicesMatcher(BulkAction.NAME).test(dotFleetSecretsIndex), is(false)); + assertThat(role.indices().allowedIndicesMatcher(TransportBulkAction.NAME).test(dotFleetSecretsIndex), is(false)); assertThat(role.indices().allowedIndicesMatcher(TransportGetAction.TYPE.name()).test(dotFleetSecretsIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportMultiGetAction.NAME).test(dotFleetSecretsIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(dotFleetSecretsIndex), is(true)); @@ -221,9 +221,9 @@ public void testElasticFleetServerPrivileges() { final IndexAbstraction apmSampledTracesIndex = mockIndexAbstraction("traces-apm.sampled-" + randomAlphaOfLengthBetween(1, 20)); assertThat(role.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(apmSampledTracesIndex), is(true)); - assertThat(role.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(apmSampledTracesIndex), is(true)); + assertThat(role.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(apmSampledTracesIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(apmSampledTracesIndex), is(true)); - assertThat(role.indices().allowedIndicesMatcher(BulkAction.NAME).test(apmSampledTracesIndex), is(true)); + assertThat(role.indices().allowedIndicesMatcher(TransportBulkAction.NAME).test(apmSampledTracesIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportGetAction.TYPE.name()).test(apmSampledTracesIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportMultiGetAction.NAME).test(apmSampledTracesIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(apmSampledTracesIndex), is(true)); @@ -371,11 +371,11 @@ public void testElasticEnterpriseSearchServerAccount() { ).forEach(index -> { final IndexAbstraction enterpriseSearchIndex = mockIndexAbstraction(index); assertThat(role.indices().allowedIndicesMatcher(AutoCreateAction.NAME).test(enterpriseSearchIndex), is(true)); - assertThat(role.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(enterpriseSearchIndex), is(true)); + assertThat(role.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(enterpriseSearchIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(enterpriseSearchIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(enterpriseSearchIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(enterpriseSearchIndex), is(true)); - assertThat(role.indices().allowedIndicesMatcher(BulkAction.NAME).test(enterpriseSearchIndex), is(true)); + assertThat(role.indices().allowedIndicesMatcher(TransportBulkAction.NAME).test(enterpriseSearchIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportGetAction.TYPE.name()).test(enterpriseSearchIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportMultiGetAction.NAME).test(enterpriseSearchIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(enterpriseSearchIndex), is(true)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java index 4330dc3171047..861b21403b2b0 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java @@ -20,8 +20,8 @@ import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; import org.elasticsearch.action.admin.indices.alias.TransportIndicesAliasesAction; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.admin.indices.get.GetIndexAction; @@ -42,12 +42,12 @@ import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequest; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; import org.elasticsearch.action.admin.indices.template.put.TransportPutIndexTemplateAction; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkItemRequest; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkShardRequest; import org.elasticsearch.action.bulk.BulkShardResponse; import org.elasticsearch.action.bulk.MappingUpdatePerformer; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.bulk.TransportShardBulkAction; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.delete.TransportDeleteAction; @@ -1430,14 +1430,14 @@ public void testCreateIndexWithAliasWithoutPermissions() { final String requestId = AuditUtil.getOrGenerateRequestId(threadContext); assertThrowsAuthorizationException( - () -> authorize(authentication, CreateIndexAction.NAME, request), + () -> authorize(authentication, TransportCreateIndexAction.TYPE.name(), request), TransportIndicesAliasesAction.NAME, "test user" ); verify(auditTrail).accessGranted( eq(requestId), eq(authentication), - eq(CreateIndexAction.NAME), + eq(TransportCreateIndexAction.TYPE.name()), eq(request), authzInfoRoles(new String[] { role.getName() }) ); @@ -1467,12 +1467,12 @@ public void testCreateIndexWithAlias() { roleMap.put("a_all", role); final String requestId = AuditUtil.getOrGenerateRequestId(threadContext); - authorize(authentication, CreateIndexAction.NAME, request); + authorize(authentication, TransportCreateIndexAction.TYPE.name(), request); verify(auditTrail).accessGranted( eq(requestId), eq(authentication), - eq(CreateIndexAction.NAME), + eq(TransportCreateIndexAction.TYPE.name()), eq(request), authzInfoRoles(new String[] { role.getName() }) ); @@ -1543,7 +1543,7 @@ public void testDenialErrorMessagesForBulkIngest() throws Exception { RoleDescriptor role = new RoleDescriptor( "some_indices_" + randomAlphaOfLengthBetween(3, 6), null, - new IndicesPrivileges[] { IndicesPrivileges.builder().indices(index).privileges(BulkAction.NAME).build() }, + new IndicesPrivileges[] { IndicesPrivileges.builder().indices(index).privileges(TransportBulkAction.NAME).build() }, null ); User user = new User(randomAlphaOfLengthBetween(6, 8), role.getName()); @@ -2053,7 +2053,10 @@ public void testGrantAllRestrictedUserCannotExecuteOperationAgainstSecurityIndic List> requests = new ArrayList<>(); requests.add( - new Tuple<>(BulkAction.NAME + "[s]", new DeleteRequest(randomFrom(SECURITY_MAIN_ALIAS, INTERNAL_SECURITY_MAIN_INDEX_7), "id")) + new Tuple<>( + TransportBulkAction.NAME + "[s]", + new DeleteRequest(randomFrom(SECURITY_MAIN_ALIAS, INTERNAL_SECURITY_MAIN_INDEX_7), "id") + ) ); requests.add( new Tuple<>( @@ -2062,7 +2065,7 @@ public void testGrantAllRestrictedUserCannotExecuteOperationAgainstSecurityIndic ) ); requests.add( - new Tuple<>(BulkAction.NAME + "[s]", new IndexRequest(randomFrom(SECURITY_MAIN_ALIAS, INTERNAL_SECURITY_MAIN_INDEX_7))) + new Tuple<>(TransportBulkAction.NAME + "[s]", new IndexRequest(randomFrom(SECURITY_MAIN_ALIAS, INTERNAL_SECURITY_MAIN_INDEX_7))) ); requests.add( new Tuple<>( @@ -2327,19 +2330,19 @@ public void testSuperusersCannotExecuteWriteOperationAgainstSecurityIndex() { List> requests = new ArrayList<>(); requests.add( new Tuple<>( - BulkAction.NAME + "[s]", + TransportBulkAction.NAME + "[s]", createBulkShardRequest(randomFrom(SECURITY_MAIN_ALIAS, INTERNAL_SECURITY_MAIN_INDEX_7), DeleteRequest::new) ) ); requests.add( new Tuple<>( - BulkAction.NAME + "[s]", + TransportBulkAction.NAME + "[s]", createBulkShardRequest(randomFrom(SECURITY_MAIN_ALIAS, INTERNAL_SECURITY_MAIN_INDEX_7), UpdateRequest::new) ) ); requests.add( new Tuple<>( - BulkAction.NAME + "[s]", + TransportBulkAction.NAME + "[s]", createBulkShardRequest( randomFrom(SECURITY_MAIN_ALIAS, INTERNAL_SECURITY_MAIN_INDEX_7), (index, id) -> new IndexRequest(index).id(id) @@ -2500,7 +2503,7 @@ public void testCompositeActionsIndicesAreCheckedAtTheShardLevel() { request = mockRequest; } case 3 -> { - action = BulkAction.NAME + "[s]"; + action = TransportBulkAction.NAME + "[s]"; request = createBulkShardRequest("index", (index, id) -> new IndexRequest(index).id(id)); } case 4 -> { @@ -2540,7 +2543,7 @@ public void testCompositeActionsIndicesAreCheckedAtTheShardLevel() { } public void testAuthorizationOfSingleActionMultipleIndicesBulkItems() { - final String action = BulkAction.NAME + "[s]"; + final String action = TransportBulkAction.NAME + "[s]"; final BulkItemRequest[] items; final DocWriteRequest.OpType opType = randomFrom(DocWriteRequest.OpType.values()); // the "good role" authorizes all the bulk items @@ -2788,7 +2791,7 @@ public void testAuthorizationOfSingleActionMultipleIndicesBulkItems() { } public void testAuthorizationOfMultipleActionsSingleIndexBulkItems() { - final String action = BulkAction.NAME + "[s]"; + final String action = TransportBulkAction.NAME + "[s]"; final AtomicInteger idCounter = new AtomicInteger(); final Set actionTypes = new HashSet<>(); final Set deleteItems = new HashSet<>(); @@ -2929,7 +2932,7 @@ public void testAuthorizationOfMultipleActionsSingleIndexBulkItems() { } public void testAuthorizationOfIndividualIndexAndDeleteBulkItems() { - final String action = BulkAction.NAME + "[s]"; + final String action = TransportBulkAction.NAME + "[s]"; final BulkItemRequest[] items = { new BulkItemRequest(1, new DeleteRequest("concrete-index", "c1")), new BulkItemRequest(2, new IndexRequest("concrete-index").id("c2")), @@ -3019,7 +3022,7 @@ public void testAuthorizationOfIndividualIndexAndDeleteBulkItems() { } public void testAuthorizationOfIndividualBulkItemsWithDateMath() { - final String action = BulkAction.NAME + "[s]"; + final String action = TransportBulkAction.NAME + "[s]"; final BulkItemRequest[] items = { new BulkItemRequest(1, new IndexRequest("").id("dy1")), new BulkItemRequest(2, new DeleteRequest("", "dy2")), // resolves to same as above @@ -3089,7 +3092,7 @@ private static Tuple randomCompositeRequest() { case 0 -> Tuple.tuple(TransportMultiGetAction.NAME, new MultiGetRequest().add("index", "id")); case 1 -> Tuple.tuple(TransportMultiSearchAction.TYPE.name(), new MultiSearchRequest().add(new SearchRequest())); case 2 -> Tuple.tuple(MultiTermVectorsAction.NAME, new MultiTermVectorsRequest().add("index", "id")); - case 3 -> Tuple.tuple(BulkAction.NAME, new BulkRequest().add(new DeleteRequest("index", "id"))); + case 3 -> Tuple.tuple(TransportBulkAction.NAME, new BulkRequest().add(new DeleteRequest("index", "id"))); case 4 -> Tuple.tuple("indices:data/read/mpercolate", new MockCompositeIndicesRequest()); case 5 -> Tuple.tuple("indices:data/read/msearch/template", new MockCompositeIndicesRequest()); case 6 -> Tuple.tuple("indices:data/read/search/template", new MockCompositeIndicesRequest()); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationUtilsTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationUtilsTests.java index 54ef4a19e182d..a2b6bf0c33dc8 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationUtilsTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationUtilsTests.java @@ -30,7 +30,7 @@ import java.util.concurrent.CountDownLatch; import java.util.function.Consumer; -import static org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskAction.TASKS_ORIGIN; +import static org.elasticsearch.action.admin.cluster.node.tasks.get.TransportGetTaskAction.TASKS_ORIGIN; import static org.hamcrest.Matchers.is; /** diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java index 4ad7c61d45d63..39eda23e35eec 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java @@ -17,7 +17,7 @@ import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsAction; import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; import org.elasticsearch.action.admin.cluster.stats.ClusterStatsAction; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.delete.TransportDeleteAction; import org.elasticsearch.action.get.TransportGetAction; import org.elasticsearch.action.index.TransportIndexAction; @@ -2132,7 +2132,8 @@ public void testGetRoleForCrossClusterAccessAuthentication() throws Exception { is(false == emptyRemoteRole) ); assertThat( - role.authorize(CreateIndexAction.NAME, Sets.newHashSet("index1"), indexMetadata.getIndicesLookup(), emptyCache).isGranted(), + role.authorize(TransportCreateIndexAction.TYPE.name(), Sets.newHashSet("index1"), indexMetadata.getIndicesLookup(), emptyCache) + .isGranted(), is(false) ); assertThat( diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java index 0760eeafc2f77..11b8598768667 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java @@ -11,8 +11,8 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.get.MultiGetItemResponse; @@ -623,7 +623,7 @@ public void testSecurityProfileOrigin() { final ActionListener listener = (ActionListener) invocation.getArguments()[2]; listener.onFailure(expectedException); return null; - }).when(client).execute(eq(BulkAction.INSTANCE), any(BulkRequest.class), anyActionListener()); + }).when(client).execute(eq(TransportBulkAction.TYPE), any(BulkRequest.class), anyActionListener()); final PlainActionFuture future1 = new PlainActionFuture<>(); profileService.activateProfile(AuthenticationTestHelper.builder().realm().build(), future1); diff --git a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/history/SnapshotHistoryStoreTests.java b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/history/SnapshotHistoryStoreTests.java index 082b097df684b..750fdd40c12d6 100644 --- a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/history/SnapshotHistoryStoreTests.java +++ b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/history/SnapshotHistoryStoreTests.java @@ -7,9 +7,9 @@ package org.elasticsearch.xpack.slm.history; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.index.TransportIndexAction; @@ -137,7 +137,7 @@ public void testPut() throws Exception { AtomicInteger calledTimes = new AtomicInteger(0); client.setVerifier((action, request, listener) -> { - if (action instanceof CreateIndexAction && request instanceof CreateIndexRequest) { + if (action == TransportCreateIndexAction.TYPE && request instanceof CreateIndexRequest) { return new CreateIndexResponse(true, true, ((CreateIndexRequest) request).index()); } calledTimes.incrementAndGet(); diff --git a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/SpatialDiskUsageIT.java b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/SpatialDiskUsageIT.java index 43c72642c84a4..a3dd8cdf5bcbb 100644 --- a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/SpatialDiskUsageIT.java +++ b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/SpatialDiskUsageIT.java @@ -8,9 +8,9 @@ package org.elasticsearch.xpack.spatial; import org.apache.lucene.tests.geo.GeoTestUtil; -import org.elasticsearch.action.admin.indices.diskusage.AnalyzeIndexDiskUsageAction; import org.elasticsearch.action.admin.indices.diskusage.AnalyzeIndexDiskUsageRequest; import org.elasticsearch.action.admin.indices.diskusage.AnalyzeIndexDiskUsageResponse; +import org.elasticsearch.action.admin.indices.diskusage.TransportAnalyzeIndexDiskUsageAction; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; @@ -89,7 +89,7 @@ private void doTestSpatialField(String type) throws Exception { prepareIndex(index).setId("id-" + i).setSource(doc).get(); } AnalyzeIndexDiskUsageResponse resp = client().execute( - AnalyzeIndexDiskUsageAction.INSTANCE, + TransportAnalyzeIndexDiskUsageAction.TYPE, new AnalyzeIndexDiskUsageRequest(new String[] { index }, AnalyzeIndexDiskUsageRequest.DEFAULT_INDICES_OPTIONS, true) ).actionGet(); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/TransformIndex.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/TransformIndex.java index e3d9fa3aff671..79b9458be4ed2 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/TransformIndex.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/TransformIndex.java @@ -14,8 +14,8 @@ import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.TransportIndicesAliasesAction; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.admin.indices.get.GetIndexAction; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; @@ -191,7 +191,7 @@ static void createDestinationIndex( config.getHeaders(), TRANSFORM_ORIGIN, client, - CreateIndexAction.INSTANCE, + TransportCreateIndexAction.TYPE, request, ActionListener.wrap(createIndexResponse -> { listener.onResponse(true); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java index c68c73fd71d9e..66c618bc07c46 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java @@ -14,10 +14,10 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.search.ClosePointInTimeRequest; import org.elasticsearch.action.search.OpenPointInTimeRequest; import org.elasticsearch.action.search.SearchRequest; @@ -170,7 +170,7 @@ protected void doNextBulk(BulkRequest request, ActionListener next transformConfig.getHeaders(), ClientHelper.TRANSFORM_ORIGIN, client, - BulkAction.INSTANCE, + TransportBulkAction.TYPE, request, ActionListener.wrap(bulkResponse -> handleBulkResponse(bulkResponse, nextPhase), nextPhase::onFailure) ); diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/persistence/TransformIndexTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/persistence/TransformIndexTests.java index 87b65978f667e..ce43a353ff414 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/persistence/TransformIndexTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/persistence/TransformIndexTests.java @@ -10,8 +10,8 @@ import org.elasticsearch.action.LatchedActionListener; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.TransportIndicesAliasesAction; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.admin.indices.get.GetIndexAction; import org.elasticsearch.action.admin.indices.get.GetIndexResponse; import org.elasticsearch.action.support.ActionTestUtils; @@ -151,7 +151,7 @@ public void testCreateDestinationIndex() throws IOException { ); ArgumentCaptor createIndexRequestCaptor = ArgumentCaptor.forClass(CreateIndexRequest.class); - verify(client).execute(eq(CreateIndexAction.INSTANCE), createIndexRequestCaptor.capture(), any()); + verify(client).execute(eq(TransportCreateIndexAction.TYPE), createIndexRequestCaptor.capture(), any()); verify(client, atLeastOnce()).threadPool(); verifyNoMoreInteractions(client); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java index b75ac51c3510f..776f649300aa4 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java @@ -11,11 +11,11 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkProcessor2; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.ClearScrollResponse; import org.elasticsearch.action.search.SearchResponse; @@ -472,7 +472,7 @@ public void testPutTriggeredWatches() throws Exception { listener.onResponse(new BulkResponse(bulkItemResponse, 123)); return null; - }).when(client).execute(eq(BulkAction.INSTANCE), any(), any()); + }).when(client).execute(eq(TransportBulkAction.TYPE), any(), any()); BulkResponse response = triggeredWatchStore.putAll(triggeredWatches); assertThat(response.hasFailures(), is(false)); From 64847b39d2ea9c3b3a7157c84a5e8dc9b23ce219 Mon Sep 17 00:00:00 2001 From: David Turner Date: Sun, 31 Mar 2024 21:51:30 +0100 Subject: [PATCH 022/264] AwaitsFix for #104081 --- .../xpack/ml/integration/MlDistributedFailureIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java index 33fd7c108863b..a2b00974d4038 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java @@ -144,6 +144,7 @@ public void testLoseDedicatedMasterNode() throws Exception { }); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104081") public void testFullClusterRestart() throws Exception { internalCluster().ensureAtLeastNumDataNodes(3); ensureStableCluster(); From b643abab3b9749cf32ad3561de9b2233abaa978a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Slobodan=20Adamovi=C4=87?= Date: Mon, 1 Apr 2024 01:41:52 +0200 Subject: [PATCH 023/264] Harden usage of XML document builder in build-conventions (#106874) While `LicenseHeadersTask` only uses `DocumentBuilderFactory` internally to parse `rat.xml` files (which are the output of running Apache RAT on Elasticsearch codebase files), it is a good practice to disable XXE features even if it's part of checks that are run on developers machines. --- .../precommit/LicenseHeadersTask.java | 27 +++++++++++++++---- 1 file changed, 22 insertions(+), 5 deletions(-) diff --git a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/LicenseHeadersTask.java b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/LicenseHeadersTask.java index 76b117f8f5308..e49feb4289586 100644 --- a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/LicenseHeadersTask.java +++ b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/LicenseHeadersTask.java @@ -25,6 +25,7 @@ import org.gradle.api.file.FileCollection; import org.gradle.api.file.ProjectLayout; import org.gradle.api.file.RegularFileProperty; +import org.gradle.api.model.ObjectFactory; import org.gradle.api.provider.ListProperty; import org.gradle.api.tasks.CacheableTask; import org.gradle.api.tasks.IgnoreEmptyDirectories; @@ -39,13 +40,12 @@ import org.w3c.dom.Element; import org.w3c.dom.NodeList; import org.xml.sax.SAXException; -import org.gradle.api.model.ObjectFactory; -import javax.xml.parsers.DocumentBuilderFactory; -import javax.xml.parsers.ParserConfigurationException; + import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.IOException; +import java.io.Serializable; import java.io.Writer; import java.nio.file.Files; import java.util.ArrayList; @@ -53,7 +53,9 @@ import java.util.List; import java.util.stream.Collectors; import javax.inject.Inject; -import java.io.Serializable; +import javax.xml.XMLConstants; +import javax.xml.parsers.DocumentBuilderFactory; +import javax.xml.parsers.ParserConfigurationException; /** * Checks files for license headers.. @@ -232,7 +234,7 @@ private ClaimStatistic toXmlReportFile(ReportConfiguration config, Writer writer private static List unapprovedFiles(File xmlReportFile) { try { - NodeList resourcesNodes = DocumentBuilderFactory.newInstance() + NodeList resourcesNodes = createXmlDocumentBuilderFactory() .newDocumentBuilder() .parse(xmlReportFile) .getElementsByTagName("resource"); @@ -249,6 +251,21 @@ private static List unapprovedFiles(File xmlReportFile) { } } + private static DocumentBuilderFactory createXmlDocumentBuilderFactory() throws ParserConfigurationException { + final DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); + dbf.setXIncludeAware(false); + dbf.setIgnoringComments(true); + dbf.setExpandEntityReferences(false); + dbf.setAttribute(XMLConstants.ACCESS_EXTERNAL_DTD, ""); + dbf.setAttribute(XMLConstants.ACCESS_EXTERNAL_SCHEMA, ""); + dbf.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING, true); + dbf.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true); + dbf.setFeature("http://xml.org/sax/features/external-general-entities", false); + dbf.setFeature("http://xml.org/sax/features/external-parameter-entities", false); + dbf.setFeature("http://apache.org/xml/features/nonvalidating/load-external-dtd", false); + return dbf; + } + private static List elementList(NodeList resourcesNodes) { List nodeList = new ArrayList<>(resourcesNodes.getLength()); for (int idx = 0; idx < resourcesNodes.getLength(); idx++) { From de6ece610e6abdfb288803a8ac7bcbc2e976fbc0 Mon Sep 17 00:00:00 2001 From: William Brafford Date: Sun, 31 Mar 2024 20:57:25 -0400 Subject: [PATCH 024/264] Check that operations on the Kibana system index use system index thread pools (#106915) * Add test to verify Kibana's access to system thread pools * Block all relevant threads on any number of nodes * Fill threadpool queues before testing requests that should be blocked --- .../kibana/KibanaThreadPoolTests.java | 52 +++++++++ .../indices/SystemIndexThreadPoolTests.java | 108 ++++++++++++++++++ 2 files changed, 160 insertions(+) create mode 100644 modules/kibana/src/test/java/org/elasticsearch/kibana/KibanaThreadPoolTests.java create mode 100644 test/framework/src/main/java/org/elasticsearch/indices/SystemIndexThreadPoolTests.java diff --git a/modules/kibana/src/test/java/org/elasticsearch/kibana/KibanaThreadPoolTests.java b/modules/kibana/src/test/java/org/elasticsearch/kibana/KibanaThreadPoolTests.java new file mode 100644 index 0000000000000..0974fd6d36b18 --- /dev/null +++ b/modules/kibana/src/test/java/org/elasticsearch/kibana/KibanaThreadPoolTests.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.kibana; + +import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.indices.SystemIndexThreadPoolTests; +import org.elasticsearch.plugins.Plugin; + +import java.util.Collection; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; + +public class KibanaThreadPoolTests extends SystemIndexThreadPoolTests { + + @Override + protected Collection> nodePlugins() { + return Set.of(KibanaPlugin.class); + } + + public void testKibanaThreadPool() { + runWithBlockedThreadPools(() -> { + // index documents + String idToDelete = client().prepareIndex(".kibana").setSource(Map.of("foo", "delete me!")).get().getId(); + String idToUpdate = client().prepareIndex(".kibana").setSource(Map.of("foo", "update me!")).get().getId(); + + // bulk index, delete, and update + Client bulkClient = client(); + BulkResponse response = bulkClient.prepareBulk(".kibana") + .add(bulkClient.prepareIndex(".kibana").setSource(Map.of("foo", "search me!"))) + .add(bulkClient.prepareDelete(".kibana", idToDelete)) + .add(bulkClient.prepareUpdate().setId(idToUpdate).setDoc(Map.of("foo", "I'm updated!"))) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .get(); + assertNoFailures(response); + + // match-all search + assertHitCount(client().prepareSearch(".kibana").setQuery(QueryBuilders.matchAllQuery()), 2); + }); + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/indices/SystemIndexThreadPoolTests.java b/test/framework/src/main/java/org/elasticsearch/indices/SystemIndexThreadPoolTests.java new file mode 100644 index 0000000000000..b97c39ce70792 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/indices/SystemIndexThreadPoolTests.java @@ -0,0 +1,108 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.indices; + +import org.elasticsearch.action.search.SearchPhaseExecutionException; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.threadpool.ThreadPool; + +import java.util.Map; +import java.util.Set; +import java.util.concurrent.Phaser; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.startsWith; + +/** + * Tests to verify that system indices are bypassing user-space thread pools + * + *

We can block thread pools by setting them to one thread and no queue, then submitting + * threads that wait on a countdown latch. This lets us verify that operations on system indices + * are being directed to other thread pools.

+ * + *

When implementing this class, don't forget to override {@link ESIntegTestCase#nodePlugins()} if + * the relevant system index is defined in a plugin.

+ */ +public abstract class SystemIndexThreadPoolTests extends ESIntegTestCase { + + private static final String USER_INDEX = "user_index"; + + // For system indices that use ExecutorNames.CRITICAL_SYSTEM_INDEX_THREAD_POOLS, we'll want to + // block normal system index thread pools as well. + protected Set threadPoolsToBlock() { + return Set.of(ThreadPool.Names.GET, ThreadPool.Names.WRITE, ThreadPool.Names.SEARCH); + } + + protected void runWithBlockedThreadPools(Runnable runnable) { + Phaser phaser = new Phaser(); + Runnable waitAction = () -> { + phaser.arriveAndAwaitAdvance(); + phaser.arriveAndAwaitAdvance(); + }; + phaser.register(); // register this test's thread + + for (String nodeName : internalCluster().getNodeNames()) { + ThreadPool threadPool = internalCluster().getInstance(ThreadPool.class, nodeName); + for (String threadPoolName : threadPoolsToBlock()) { + ThreadPool.Info info = threadPool.info(threadPoolName); + phaser.bulkRegister(info.getMax()); + for (int i = 0; i < info.getMax(); i++) { + threadPool.executor(threadPoolName).submit(waitAction); + } + } + } + phaser.arriveAndAwaitAdvance(); + try { + runnable.run(); + } finally { + phaser.arriveAndAwaitAdvance(); + } + } + + public void testUserThreadPoolsAreBlocked() { + assertAcked(client().admin().indices().prepareCreate(USER_INDEX)); + + runWithBlockedThreadPools(this::assertThreadPoolsBlocked); + + assertAcked(client().admin().indices().prepareDelete(USER_INDEX)); + } + + private void assertThreadPoolsBlocked() { + fillThreadPoolQueues(); // rejections are easier to check than timeouts + + var e1 = expectThrows( + EsRejectedExecutionException.class, + () -> client().prepareIndex(USER_INDEX).setSource(Map.of("foo", "bar")).get() + ); + assertThat(e1.getMessage(), startsWith("rejected execution of TimedRunnable")); + var e2 = expectThrows(EsRejectedExecutionException.class, () -> client().prepareGet(USER_INDEX, "id").get()); + assertThat(e2.getMessage(), startsWith("rejected execution of ActionRunnable")); + var e3 = expectThrows( + SearchPhaseExecutionException.class, + () -> client().prepareSearch(USER_INDEX).setQuery(QueryBuilders.matchAllQuery()).get() + ); + assertThat(e3.getMessage(), startsWith("all shards failed")); + } + + private void fillThreadPoolQueues() { + for (String nodeName : internalCluster().getNodeNames()) { + ThreadPool threadPool = internalCluster().getInstance(ThreadPool.class, nodeName); + for (String threadPoolName : threadPoolsToBlock()) { + ThreadPool.Info info = threadPool.info(threadPoolName); + + // fill up the queue + for (int i = 0; i < info.getQueueSize().singles(); i++) { + threadPool.executor(threadPoolName).submit(() -> {}); + } + } + } + } +} From 8644d3d11a18581cb244a674f52957bf05a51048 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Mon, 1 Apr 2024 14:23:01 +0200 Subject: [PATCH 025/264] Remove unused o.e.i.m.extras.RankFeatureQueryBuilders (#106951) This is unused and the last commit referencing it was in 21. --- .../extras/RankFeatureQueryBuilders.java | 65 ------------------- 1 file changed, 65 deletions(-) delete mode 100644 modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/RankFeatureQueryBuilders.java diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/RankFeatureQueryBuilders.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/RankFeatureQueryBuilders.java deleted file mode 100644 index 649685667b2fe..0000000000000 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/RankFeatureQueryBuilders.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.index.mapper.extras; - -public final class RankFeatureQueryBuilders { - private RankFeatureQueryBuilders() {} - - /** - * Return a new {@link RankFeatureQueryBuilder} that will score documents as - * {@code boost * S / (S + pivot)} where S is the value of the static feature. - * @param fieldName field that stores features - * @param pivot feature value that would give a score contribution equal to weight/2, must be in (0, +Infinity) - */ - public static RankFeatureQueryBuilder saturation(String fieldName, float pivot) { - return new RankFeatureQueryBuilder(fieldName, new RankFeatureQueryBuilder.ScoreFunction.Saturation(pivot)); - } - - /** - * Same as {@link #saturation(String, float)} but a reasonably good default pivot value - * is computed based on index statistics and is approximately equal to the geometric mean of all - * values that exist in the index. - * @param fieldName field that stores features - */ - public static RankFeatureQueryBuilder saturation(String fieldName) { - return new RankFeatureQueryBuilder(fieldName, new RankFeatureQueryBuilder.ScoreFunction.Saturation()); - } - - /** - * Return a new {@link RankFeatureQueryBuilder} that will score documents as - * {@code boost * Math.log(scalingFactor + S)} where S is the value of the static feature. - * @param fieldName field that stores features - * @param scalingFactor scaling factor applied before taking the logarithm, must be in [1, +Infinity) - */ - public static RankFeatureQueryBuilder log(String fieldName, float scalingFactor) { - return new RankFeatureQueryBuilder(fieldName, new RankFeatureQueryBuilder.ScoreFunction.Log(scalingFactor)); - } - - /** - * Return a new {@link RankFeatureQueryBuilder} that will score documents as - * {@code boost * S^a / (S^a + pivot^a)} where S is the value of the static feature. - * @param fieldName field that stores features - * @param pivot feature value that would give a score contribution equal to weight/2, must be in (0, +Infinity) - * @param exp exponent, higher values make the function grow slower before 'pivot' and faster after 'pivot', - * must be in (0, +Infinity) - */ - public static RankFeatureQueryBuilder sigmoid(String fieldName, float pivot, float exp) { - return new RankFeatureQueryBuilder(fieldName, new RankFeatureQueryBuilder.ScoreFunction.Sigmoid(pivot, exp)); - } - - /** - * Return a new {@link RankFeatureQueryBuilder} that will score documents as - * {@code S)} where S is the indexed value of the static feature. - * @param fieldName field that stores features - */ - public static RankFeatureQueryBuilder linear(String fieldName) { - return new RankFeatureQueryBuilder(fieldName, new RankFeatureQueryBuilder.ScoreFunction.Linear()); - } - -} From 89bf4b33e853bec1d9ce5a05b8d1bd57dd8242f0 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Mon, 1 Apr 2024 08:23:32 -0400 Subject: [PATCH 026/264] Make int8_hnsw our default index for new dense-vector fields (#106836) For float32, there is no compelling reason to use all the memory required by default for HNSW. Using `int8_hnsw` provides a much saner default when it comes to cost vs relevancy. So, on all new indices that use `dense_vector` and want to index them for fast search, we will default to `int8_hnsw`. Users can still customize their parameters, or prefer `hnsw` over float32 if they so desire. --- docs/changelog/106836.yaml | 5 ++ .../mapping/types/dense-vector.asciidoc | 10 ++-- .../search-your-data/knn-search.asciidoc | 7 ++- .../test/old_cluster/30_vector_search.yml | 4 ++ .../search.vectors/100_knn_nested_search.yml | 4 ++ .../120_knn_query_multiple_shards.yml | 4 ++ .../140_knn_query_with_other_queries.yml | 4 ++ .../160_knn_query_missing_params.yml | 8 ++++ .../test/search.vectors/40_knn_search.yml | 12 +++++ .../search.vectors/40_knn_search_cosine.yml | 20 ++++++++ .../60_knn_search_filter_alias.yml | 4 ++ .../80_dense_vector_indexed_by_default.yml | 38 ++++++++++----- .../elasticsearch/index/IndexVersions.java | 1 + .../vectors/DenseVectorFieldMapper.java | 48 ++++++++++++------- .../vectors/DenseVectorFieldMapperTests.java | 10 +++- .../xpack/rank/rrf/RRFRankSingleShardIT.java | 9 ++++ .../rest-api-spec/test/rrf/100_rank_rrf.yml | 4 ++ .../test/rrf/200_rank_rrf_script.yml | 8 ++++ .../test/rrf/300_rrf_retriever.yml | 4 ++ .../test/rrf/400_rrf_retriever_script.yml | 8 ++++ 20 files changed, 179 insertions(+), 33 deletions(-) create mode 100644 docs/changelog/106836.yaml diff --git a/docs/changelog/106836.yaml b/docs/changelog/106836.yaml new file mode 100644 index 0000000000000..f561f44d9bb2d --- /dev/null +++ b/docs/changelog/106836.yaml @@ -0,0 +1,5 @@ +pr: 106836 +summary: Make int8_hnsw our default index for new dense-vector fields +area: Mapping +type: enhancement +issues: [] diff --git a/docs/reference/mapping/types/dense-vector.asciidoc b/docs/reference/mapping/types/dense-vector.asciidoc index cec41eab41238..14fe9d4963970 100644 --- a/docs/reference/mapping/types/dense-vector.asciidoc +++ b/docs/reference/mapping/types/dense-vector.asciidoc @@ -65,7 +65,7 @@ data structure to support fast kNN retrieval through the <> `float` vectors. Currently the only quantization method supported is `int8` and provided vectors `element_type` must be `float`. To use -a quantized index, you can set your index type to `int8_hnsw`. +a quantized index, you can set your index type to `int8_hnsw`. When indexing `float` vectors, the current default +index type is `int8_hnsw`. When using the `int8_hnsw` index, each of the `float` vectors' dimensions are quantized to 1-byte integers. This can reduce the memory footprint by as much as 75% at the cost of some accuracy. However, the disk usage can increase by @@ -240,9 +241,10 @@ expense of slower indexing speed. The type of kNN algorithm to use. Can be either any of: + -- -* `hnsw` - The default storage type. This utilizes the https://arxiv.org/abs/1603.09320[HNSW algorithm] for scalable +* `hnsw` - This utilizes the https://arxiv.org/abs/1603.09320[HNSW algorithm] for scalable approximate kNN search. This supports all `element_type` values. -* `int8_hnsw` - This utilizes the https://arxiv.org/abs/1603.09320[HNSW algorithm] in addition to automatically scalar +* `int8_hnsw` - The default index type for float vectors. +This utilizes the https://arxiv.org/abs/1603.09320[HNSW algorithm] in addition to automatically scalar quantization for scalable approximate kNN search with `element_type` of `float`. This can reduce the memory footprint by 4x at the cost of some accuracy. See <>. * `flat` - This utilizes a brute-force search algorithm for exact kNN search. This supports all `element_type` values. diff --git a/docs/reference/search/search-your-data/knn-search.asciidoc b/docs/reference/search/search-your-data/knn-search.asciidoc index 030c10a91d005..ffac84c11a779 100644 --- a/docs/reference/search/search-your-data/knn-search.asciidoc +++ b/docs/reference/search/search-your-data/knn-search.asciidoc @@ -272,6 +272,8 @@ If you want to provide `float` vectors, but want the memory savings of `byte` ve internally they are indexed as `byte` vectors. Additionally, the original `float` vectors are still retained in the index. +NOTE: The default index type for `dense_vector` is `int8_hnsw`. + To use quantization, you can use the index type `int8_hnsw` object in the `dense_vector` mapping. [source,console] @@ -652,7 +654,10 @@ PUT passage_vectors "properties": { "vector": { "type": "dense_vector", - "dims": 2 + "dims": 2, + "index_options": { + "type": "hnsw" + } }, "text": { "type": "text", diff --git a/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/old_cluster/30_vector_search.yml b/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/old_cluster/30_vector_search.yml index 4aca71fe48f4a..96b950e5ae927 100644 --- a/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/old_cluster/30_vector_search.yml +++ b/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/old_cluster/30_vector_search.yml @@ -18,6 +18,10 @@ dims: 3 index: true similarity: l2_norm + index_options: + type: hnsw + m: 16 + ef_construction: 100 - do: bulk: index: test-float-index diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/100_knn_nested_search.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/100_knn_nested_search.yml index 6c6c75990b0f5..32558dbe5a8c0 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/100_knn_nested_search.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/100_knn_nested_search.yml @@ -23,6 +23,10 @@ setup: dims: 5 index: true similarity: l2_norm + index_options: + type: hnsw + m: 16 + ef_construction: 200 - do: index: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/120_knn_query_multiple_shards.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/120_knn_query_multiple_shards.yml index b1c0fd948481b..eb70e5b7bcf64 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/120_knn_query_multiple_shards.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/120_knn_query_multiple_shards.yml @@ -19,6 +19,10 @@ setup: dims: 4 index : true similarity : l2_norm + index_options: + type: hnsw + m: 16 + ef_construction: 200 my_name: type: keyword store: true diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/140_knn_query_with_other_queries.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/140_knn_query_with_other_queries.yml index 8f52a72cce01e..0ea24686ff839 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/140_knn_query_with_other_queries.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/140_knn_query_with_other_queries.yml @@ -19,6 +19,10 @@ setup: dims: 4 index : true similarity : l2_norm + index_options: + type: hnsw + m: 16 + ef_construction: 200 my_name: type: keyword store: true diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/160_knn_query_missing_params.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/160_knn_query_missing_params.yml index 9ff6319a01af4..4a884b644c6a7 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/160_knn_query_missing_params.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/160_knn_query_missing_params.yml @@ -15,6 +15,10 @@ setup: dims: 3 index: true similarity: l2_norm + index_options: + type: hnsw + ef_construction: 100 + m: 16 category: type: keyword nested: @@ -27,6 +31,10 @@ setup: dims: 5 index: true similarity: l2_norm + index_options: + type: hnsw + ef_construction: 100 + m: 16 - do: index: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search.yml index 57f8603f1e06e..c8cbf499cf8b2 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search.yml @@ -15,11 +15,19 @@ setup: dims: 5 index: true similarity: l2_norm + index_options: + type: hnsw + m: 16 + ef_construction: 200 another_vector: type: dense_vector dims: 5 index: true similarity: l2_norm + index_options: + type: hnsw + m: 16 + ef_construction: 200 - do: index: @@ -371,6 +379,10 @@ setup: dims: 5 index: true similarity: max_inner_product + index_options: + type: hnsw + m: 16 + ef_construction: 200 - do: index: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search_cosine.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search_cosine.yml index 8faad25f0037d..b1933ebde297d 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search_cosine.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search_cosine.yml @@ -13,26 +13,46 @@ setup: dims: 5 index: true similarity: cosine + index_options: + type: hnsw + m: 16 + ef_construction: 200 normalized_vector: type: dense_vector dims: 5 index: true similarity: cosine + index_options: + type: hnsw + m: 16 + ef_construction: 200 end_normalized: type: dense_vector dims: 5 index: true similarity: cosine + index_options: + type: hnsw + m: 16 + ef_construction: 200 first_normalized: type: dense_vector dims: 5 index: true similarity: cosine + index_options: + type: hnsw + m: 16 + ef_construction: 200 middle_normalized: type: dense_vector dims: 5 index: true similarity: cosine + index_options: + type: hnsw + m: 16 + ef_construction: 200 - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/60_knn_search_filter_alias.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/60_knn_search_filter_alias.yml index 92065857bd571..0672e27b43c67 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/60_knn_search_filter_alias.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/60_knn_search_filter_alias.yml @@ -17,6 +17,10 @@ setup: dims: 4 index : true similarity : l2_norm + index_options: + type: hnsw + m: 16 + ef_construction: 200 name: type: keyword store: true diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/80_dense_vector_indexed_by_default.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/80_dense_vector_indexed_by_default.yml index 7f67d53f31384..784edfdac3469 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/80_dense_vector_indexed_by_default.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/80_dense_vector_indexed_by_default.yml @@ -21,16 +21,10 @@ setup: indices.get_mapping: index: test - - match: - test: - mappings: - properties: - vector: - type: dense_vector - dims: 5 - index: true - similarity: cosine - + - match: { test.mappings.properties.vector.type: dense_vector } + - match: { test.mappings.properties.vector.dims: 5 } + - match: { test.mappings.properties.vector.index: true } + - match: { test.mappings.properties.vector.similarity: cosine } --- "Indexed by default with specified similarity and index options": - do: @@ -127,7 +121,29 @@ setup: type: hnsw m: 32 ef_construction: 200 +--- +"Default index options for dense_vector": + - skip: + version: ' - 8.13.99' + reason: 'dense_vector indexed as int8_hnsw by default was added in 8.14' + - do: + indices.create: + index: test_default_index_options + body: + mappings: + properties: + vector: + type: dense_vector + dims: 5 + - match: { acknowledged: true } + - do: + indices.get_mapping: + index: test_default_index_options - + - match: { test_default_index_options.mappings.properties.vector.type: dense_vector } + - match: { test_default_index_options.mappings.properties.vector.dims: 5 } + - match: { test_default_index_options.mappings.properties.vector.index: true } + - match: { test_default_index_options.mappings.properties.vector.similarity: cosine } + - match: { test_default_index_options.mappings.properties.vector.index_options.type: int8_hnsw } diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index bca7b963becaa..6edd43683519e 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -103,6 +103,7 @@ private static IndexVersion def(int id, Version luceneVersion) { public static final IndexVersion TIME_SERIES_ID_HASHING = def(8_502_00_1, Version.LUCENE_9_9_2); public static final IndexVersion UPGRADE_TO_LUCENE_9_10 = def(8_503_00_0, Version.LUCENE_9_10_0); public static final IndexVersion TIME_SERIES_ROUTING_HASH_IN_ID = def(8_504_00_0, Version.LUCENE_9_10_0); + public static final IndexVersion DEFAULT_DENSE_VECTOR_TO_INT8_HNSW = def(8_505_00_0, Version.LUCENE_9_10_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index 73e29a98c1531..db958dc8a8acb 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -93,6 +93,7 @@ import static org.elasticsearch.common.Strings.format; import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; +import static org.elasticsearch.index.IndexVersions.DEFAULT_DENSE_VECTOR_TO_INT8_HNSW; /** * A {@link FieldMapper} for indexing a dense vector of floats. @@ -108,6 +109,7 @@ static boolean isNotUnitVector(float magnitude) { public static final IndexVersion MAGNITUDE_STORED_INDEX_VERSION = IndexVersions.V_7_5_0; public static final IndexVersion INDEXED_BY_DEFAULT_INDEX_VERSION = IndexVersions.FIRST_DETACHED_INDEX_VERSION; public static final IndexVersion NORMALIZE_COSINE = IndexVersions.NORMALIZED_VECTOR_COSINE; + public static final IndexVersion DEFAULT_TO_INT8 = DEFAULT_DENSE_VECTOR_TO_INT8_HNSW; public static final IndexVersion LITTLE_ENDIAN_FLOAT_STORED_INDEX_VERSION = IndexVersions.V_8_9_0; public static final String CONTENT_TYPE = "dense_vector"; @@ -152,15 +154,7 @@ public static class Builder extends FieldMapper.Builder { }, m -> toType(m).fieldType().dims, XContentBuilder::field, Object::toString).setSerializerCheck((id, ic, v) -> v != null) .setMergeValidator((previous, current, c) -> previous == null || Objects.equals(previous, current)); private final Parameter similarity; - private final Parameter indexOptions = new Parameter<>( - "index_options", - false, - () -> null, - (n, c, o) -> o == null ? null : parseIndexOptions(n, o), - m -> toType(m).indexOptions, - XContentBuilder::field, - Objects::toString - ).setSerializerCheck((id, ic, v) -> v != null); + private final Parameter indexOptions; private final Parameter indexed; private final Parameter> meta = Parameter.metaParam(); @@ -170,6 +164,7 @@ public Builder(String name, IndexVersion indexVersionCreated) { super(name); this.indexVersionCreated = indexVersionCreated; final boolean indexedByDefault = indexVersionCreated.onOrAfter(INDEXED_BY_DEFAULT_INDEX_VERSION); + final boolean defaultInt8Hnsw = indexVersionCreated.onOrAfter(DEFAULT_DENSE_VECTOR_TO_INT8_HNSW); this.indexed = Parameter.indexParam(m -> toType(m).fieldType().indexed, indexedByDefault); if (indexedByDefault) { // Only serialize on newer index versions to prevent breaking existing indices when upgrading @@ -182,6 +177,34 @@ public Builder(String name, IndexVersion indexVersionCreated) { (Supplier) () -> indexedByDefault && indexed.getValue() ? VectorSimilarity.COSINE : null, VectorSimilarity.class ).acceptsNull().setSerializerCheck((id, ic, v) -> v != null); + this.indexOptions = new Parameter<>( + "index_options", + false, + () -> defaultInt8Hnsw && elementType.getValue() != ElementType.BYTE && this.indexed.getValue() + ? new Int8HnswIndexOptions( + Lucene99HnswVectorsFormat.DEFAULT_MAX_CONN, + Lucene99HnswVectorsFormat.DEFAULT_BEAM_WIDTH, + null + ) + : null, + (n, c, o) -> o == null ? null : parseIndexOptions(n, o), + m -> toType(m).indexOptions, + (b, n, v) -> { + if (v != null) { + b.field(n, v); + } + }, + Objects::toString + ).setSerializerCheck((id, ic, v) -> v != null).addValidator(v -> { + if (v != null && v.supportsElementType(elementType.getValue()) == false) { + throw new IllegalArgumentException( + "[element_type] cannot be [" + elementType.getValue().toString() + "] when using index type [" + v.type + "]" + ); + } + }).acceptsNull(); + if (defaultInt8Hnsw) { + this.indexOptions.alwaysSerialize(); + } this.indexed.addValidator(v -> { if (v) { if (similarity.getValue() == null) { @@ -200,13 +223,6 @@ public Builder(String name, IndexVersion indexVersionCreated) { } } }); - this.indexOptions.addValidator(v -> { - if (v != null && v.supportsElementType(elementType.getValue()) == false) { - throw new IllegalArgumentException( - "[element_type] cannot be [" + elementType.getValue().toString() + "] when using index type [" + v.type + "]" - ); - } - }); } @Override diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java index c417ec995a20a..2d1b1cc9545db 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java @@ -247,7 +247,15 @@ public void testMergeDims() throws IOException { mapping = mapping(b -> { b.startObject("field"); - b.field("type", "dense_vector").field("dims", 4).field("similarity", "cosine").field("index", true); + b.field("type", "dense_vector") + .field("dims", 4) + .field("similarity", "cosine") + .field("index", true) + .startObject("index_options") + .field("type", "int8_hnsw") + .field("m", 16) + .field("ef_construction", 100) + .endObject(); b.endObject(); }); merge(mapperService, mapping); diff --git a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankSingleShardIT.java b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankSingleShardIT.java index 3a82f697acc9d..7269d9c3e5e7f 100644 --- a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankSingleShardIT.java +++ b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankSingleShardIT.java @@ -53,6 +53,9 @@ public void setupIndices() throws Exception { .field("dims", 1) .field("index", true) .field("similarity", "l2_norm") + .startObject("index_options") + .field("type", "hnsw") + .endObject() .endObject() .startObject("text") .field("type", "text") @@ -80,12 +83,18 @@ public void setupIndices() throws Exception { .field("dims", 1) .field("index", true) .field("similarity", "l2_norm") + .startObject("index_options") + .field("type", "hnsw") + .endObject() .endObject() .startObject("vector_desc") .field("type", "dense_vector") .field("dims", 1) .field("index", true) .field("similarity", "l2_norm") + .startObject("index_options") + .field("type", "hnsw") + .endObject() .endObject() .startObject("int") .field("type", "integer") diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/100_rank_rrf.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/100_rank_rrf.yml index 56cb8dd94de0d..e55a1897eb701 100644 --- a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/100_rank_rrf.yml +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/100_rank_rrf.yml @@ -21,6 +21,10 @@ setup: dims: 1 index: true similarity: l2_norm + index_options: + type: hnsw + ef_construction: 100 + m: 16 - do: index: diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/200_rank_rrf_script.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/200_rank_rrf_script.yml index 11d048b9d11be..de5b29b21da72 100644 --- a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/200_rank_rrf_script.yml +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/200_rank_rrf_script.yml @@ -18,11 +18,19 @@ setup: dims: 1 index: true similarity: l2_norm + index_options: + type: hnsw + ef_construction: 100 + m: 16 vector_desc: type: dense_vector dims: 1 index: true similarity: l2_norm + index_options: + type: hnsw + ef_construction: 100 + m: 16 int: type: integer text: diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/300_rrf_retriever.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/300_rrf_retriever.yml index 2c7c5e5a50697..1387c37349cd4 100644 --- a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/300_rrf_retriever.yml +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/300_rrf_retriever.yml @@ -21,6 +21,10 @@ setup: dims: 1 index: true similarity: l2_norm + index_options: + type: hnsw + ef_construction: 100 + m: 16 - do: index: diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/400_rrf_retriever_script.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/400_rrf_retriever_script.yml index 7ac41de12c5e7..2c2b59f306ee3 100644 --- a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/400_rrf_retriever_script.yml +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/400_rrf_retriever_script.yml @@ -20,11 +20,19 @@ setup: dims: 1 index: true similarity: l2_norm + index_options: + type: hnsw + ef_construction: 100 + m: 16 vector_desc: type: dense_vector dims: 1 index: true similarity: l2_norm + index_options: + type: hnsw + ef_construction: 100 + m: 16 int: type: integer text: From f4613d0248368f4a8e28ad52b1b95156c4499a6d Mon Sep 17 00:00:00 2001 From: Andrei Stefan Date: Mon, 1 Apr 2024 17:33:03 +0300 Subject: [PATCH 027/264] ESQL: perform a reduction on the data node (#106516) * Introduce node-level reduction (instead of the coordinator level one) behind a pragma --- docs/changelog/106516.yaml | 5 + .../org/elasticsearch/TransportVersions.java | 1 + .../xpack/esql/EsqlSecurityIT.java | 3 + .../src/main/resources/ip.csv-spec | 6 +- .../action/AbstractEsqlIntegTestCase.java | 3 + .../xpack/esql/action/EsqlActionTaskIT.java | 100 +++++++++++++++--- .../xpack/esql/io/stream/PlanNamedTypes.java | 6 +- .../xpack/esql/io/stream/PlanStreamInput.java | 4 + .../esql/io/stream/PlanStreamOutput.java | 9 ++ .../esql/optimizer/PhysicalPlanOptimizer.java | 3 +- .../esql/plan/physical/FragmentExec.java | 31 ++++-- .../esql/planner/LocalExecutionPlanner.java | 10 +- .../xpack/esql/planner/Mapper.java | 2 +- .../xpack/esql/planner/PlannerUtils.java | 38 +++++++ .../xpack/esql/plugin/ComputeService.java | 27 ++++- .../xpack/esql/plugin/QueryPragmas.java | 10 ++ .../xpack/esql/session/EsqlSession.java | 2 +- .../xpack/esql/planner/FilterTests.java | 2 +- 18 files changed, 224 insertions(+), 38 deletions(-) create mode 100644 docs/changelog/106516.yaml diff --git a/docs/changelog/106516.yaml b/docs/changelog/106516.yaml new file mode 100644 index 0000000000000..905896fb0ef03 --- /dev/null +++ b/docs/changelog/106516.yaml @@ -0,0 +1,5 @@ +pr: 106516 +summary: "ESQL: perform a reduction on the data node" +area: ES|QL +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 76f4d6c1c0fae..d7bc07b3eb2b4 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -158,6 +158,7 @@ static TransportVersion def(int id) { public static final TransportVersion SEARCH_NODE_LOAD_AUTOSCALING = def(8_617_00_0); public static final TransportVersion ESQL_ES_SOURCE_OPTIONS = def(8_618_00_0); public static final TransportVersion ADD_PERSISTENT_TASK_EXCEPTIONS = def(8_619_00_0); + public static final TransportVersion ESQL_REDUCER_NODE_FRAGMENT = def(8_620_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java index 2dd64cf02446b..7a9b90baa0d35 100644 --- a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java +++ b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java @@ -389,6 +389,9 @@ static Settings randomPragmas() { if (randomBoolean()) { settings.put("enrich_max_workers", between(1, 5)); } + if (randomBoolean()) { + settings.put("node_level_reduction", randomBoolean()); + } return settings.build(); } } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec index 09b17ed4112c9..58c1cf3dc9174 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec @@ -62,9 +62,9 @@ eth2 |epsilon |[fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece lessThan required_feature: esql.mv_warn -from hosts | sort host, card | where ip0 < ip1 | keep card, host, ip0, ip1; -warning:Line 1:38: evaluation of [ip0 < ip1] failed, treating result as null. Only first 20 failures recorded. -warning:Line 1:38: java.lang.IllegalArgumentException: single-value function encountered multi-value +from hosts | sort host, card, ip1 | where ip0 < ip1 | keep card, host, ip0, ip1; +warning:Line 1:43: evaluation of [ip0 < ip1] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:43: java.lang.IllegalArgumentException: single-value function encountered multi-value card:keyword |host:keyword |ip0:ip |ip1:ip eth1 |beta |127.0.0.1 |127.0.0.2 diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java index 5ba9c622d85da..a9238d202e5b5 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java @@ -188,6 +188,9 @@ protected static QueryPragmas randomPragmas() { if (randomBoolean()) { settings.put("max_concurrent_shards_per_node", randomIntBetween(1, 10)); } + if (randomBoolean()) { + settings.put("node_level_reduction", randomBoolean()); + } } return new QueryPragmas(settings.build()); } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java index 92987db865ac7..672a1f6cc8c71 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java @@ -76,6 +76,7 @@ public class EsqlActionTaskIT extends AbstractPausableIntegTestCase { private String READ_DESCRIPTION; private String MERGE_DESCRIPTION; private String REDUCE_DESCRIPTION; + private boolean nodeLevelReduction; @Before public void setup() { @@ -94,6 +95,7 @@ public void setup() { REDUCE_DESCRIPTION = """ \\_ExchangeSourceOperator[] \\_ExchangeSinkOperator"""; + nodeLevelReduction = randomBoolean(); } public void testTaskContents() throws Exception { @@ -209,22 +211,31 @@ public void testCancelEsqlTask() throws Exception { } private ActionFuture startEsql() { + return startEsql("from test | stats sum(pause_me)"); + } + + private ActionFuture startEsql(String query) { scriptPermits.drainPermits(); scriptPermits.release(between(1, 5)); - var pragmas = new QueryPragmas( - Settings.builder() - // Force shard partitioning because that's all the tests know how to match. It is easier to reason about too. - .put("data_partitioning", "shard") - // Limit the page size to something small so we do more than one page worth of work, so we get more status updates. - .put("page_size", pageSize()) - // Report the status after every action - .put("status_interval", "0ms") - .build() - ); - return EsqlQueryRequestBuilder.newSyncEsqlQueryRequestBuilder(client()) - .query("from test | stats sum(pause_me)") - .pragmas(pragmas) - .execute(); + var settingsBuilder = Settings.builder() + // Force shard partitioning because that's all the tests know how to match. It is easier to reason about too. + .put("data_partitioning", "shard") + // Limit the page size to something small so we do more than one page worth of work, so we get more status updates. + .put("page_size", pageSize()) + // Report the status after every action + .put("status_interval", "0ms"); + + if (nodeLevelReduction == false) { + // explicitly set the default (false) or don't + if (randomBoolean()) { + settingsBuilder.put("node_level_reduction", nodeLevelReduction); + } + } else { + settingsBuilder.put("node_level_reduction", nodeLevelReduction); + } + + var pragmas = new QueryPragmas(settingsBuilder.build()); + return EsqlQueryRequestBuilder.newSyncEsqlQueryRequestBuilder(client()).query(query).pragmas(pragmas).execute(); } private void cancelTask(TaskId taskId) { @@ -407,6 +418,67 @@ protected void doRun() throws Exception { } } + public void testTaskContentsForTopNQuery() throws Exception { + READ_DESCRIPTION = ("\\_LuceneTopNSourceOperator[dataPartitioning = SHARD, maxPageSize = pageSize(), limit = 1000, " + + "sorts = [{\"pause_me\":{\"order\":\"asc\",\"missing\":\"_last\",\"unmapped_type\":\"long\"}}]]\n" + + "\\_ValuesSourceReaderOperator[fields = [pause_me]]\n" + + "\\_ProjectOperator[projection = [1]]\n" + + "\\_ExchangeSinkOperator").replace("pageSize()", Integer.toString(pageSize())); + MERGE_DESCRIPTION = "\\_ExchangeSourceOperator[]\n" + + "\\_TopNOperator[count=1000, elementTypes=[LONG], encoders=[DefaultSortable], " + + "sortOrders=[SortOrder[channel=0, asc=true, nullsFirst=false]]]\n" + + "\\_ProjectOperator[projection = [0]]\n" + + "\\_OutputOperator[columns = [pause_me]]"; + REDUCE_DESCRIPTION = "\\_ExchangeSourceOperator[]\n" + + (nodeLevelReduction + ? "\\_TopNOperator[count=1000, elementTypes=[LONG], encoders=[DefaultSortable], " + + "sortOrders=[SortOrder[channel=0, asc=true, nullsFirst=false]]]\n" + : "") + + "\\_ExchangeSinkOperator"; + + ActionFuture response = startEsql("from test | sort pause_me | keep pause_me"); + try { + getTasksStarting(); + scriptPermits.release(pageSize()); + getTasksRunning(); + } finally { + // each scripted field "emit" is called by LuceneTopNSourceOperator and by ValuesSourceReaderOperator + scriptPermits.release(2 * numberOfDocs()); + try (EsqlQueryResponse esqlResponse = response.get()) { + assertThat(Iterators.flatMap(esqlResponse.values(), i -> i).next(), equalTo(1L)); + } + } + } + + public void testTaskContentsForLimitQuery() throws Exception { + String limit = Integer.toString(randomIntBetween(pageSize() + 1, 2 * numberOfDocs())); + READ_DESCRIPTION = """ + \\_LuceneSourceOperator[dataPartitioning = SHARD, maxPageSize = pageSize(), limit = limit()] + \\_ValuesSourceReaderOperator[fields = [pause_me]] + \\_ProjectOperator[projection = [1]] + \\_ExchangeSinkOperator""".replace("pageSize()", Integer.toString(pageSize())).replace("limit()", limit); + MERGE_DESCRIPTION = """ + \\_ExchangeSourceOperator[] + \\_LimitOperator[limit = limit()] + \\_ProjectOperator[projection = [0]] + \\_OutputOperator[columns = [pause_me]]""".replace("limit()", limit); + REDUCE_DESCRIPTION = ("\\_ExchangeSourceOperator[]\n" + + (nodeLevelReduction ? "\\_LimitOperator[limit = limit()]\n" : "") + + "\\_ExchangeSinkOperator").replace("limit()", limit); + + ActionFuture response = startEsql("from test | keep pause_me | limit " + limit); + try { + getTasksStarting(); + scriptPermits.release(pageSize()); + getTasksRunning(); + } finally { + scriptPermits.release(numberOfDocs()); + try (EsqlQueryResponse esqlResponse = response.get()) { + assertThat(Iterators.flatMap(esqlResponse.values(), i -> i).next(), equalTo(1L)); + } + } + } + @Override protected Collection> nodePlugins() { return CollectionUtils.appendToCopy(super.nodePlugins(), MockTransportService.TestPlugin.class); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java index 44e134a0d7aec..536265b1be3e8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java @@ -629,7 +629,8 @@ static FragmentExec readFragmentExec(PlanStreamInput in) throws IOException { in.readSource(), in.readLogicalPlanNode(), in.readOptionalNamedWriteable(QueryBuilder.class), - in.readOptionalVInt() + in.readOptionalVInt(), + in.getTransportVersion().onOrAfter(TransportVersions.ESQL_REDUCER_NODE_FRAGMENT) ? in.readOptionalPhysicalPlanNode() : null ); } @@ -638,6 +639,9 @@ static void writeFragmentExec(PlanStreamOutput out, FragmentExec fragmentExec) t out.writeLogicalPlanNode(fragmentExec.fragment()); out.writeOptionalNamedWriteable(fragmentExec.esFilter()); out.writeOptionalVInt(fragmentExec.estimatedRowSize()); + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_REDUCER_NODE_FRAGMENT)) { + out.writeOptionalPhysicalPlanNode(fragmentExec.reducer()); + } } static GrokExec readGrokExec(PlanStreamInput in) throws IOException { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java index bdd93d733a460..046e46d216bdc 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java @@ -104,6 +104,10 @@ public PhysicalPlan readPhysicalPlanNode() throws IOException { return readNamed(PhysicalPlan.class); } + public PhysicalPlan readOptionalPhysicalPlanNode() throws IOException { + return readOptionalNamed(PhysicalPlan.class); + } + public Source readSource() throws IOException { boolean hasSource = readBoolean(); return hasSource ? readSourceWithText(this, configuration.query()) : Source.EMPTY; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java index ac894ce7a099e..5ee292b6add9e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java @@ -53,6 +53,15 @@ public void writePhysicalPlanNode(PhysicalPlan physicalPlan) throws IOException writeNamed(PhysicalPlan.class, physicalPlan); } + public void writeOptionalPhysicalPlanNode(PhysicalPlan physicalPlan) throws IOException { + if (physicalPlan == null) { + writeBoolean(false); + } else { + writeBoolean(true); + writePhysicalPlanNode(physicalPlan); + } + } + public void writeSource(Source source) throws IOException { writeBoolean(true); writeSourceNoText(this, source); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizer.java index 42e54f002477a..ee095a24e20fd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizer.java @@ -151,7 +151,8 @@ public PhysicalPlan apply(PhysicalPlan plan) { Source.EMPTY, new Project(logicalFragment.source(), logicalFragment, output), fragmentExec.esFilter(), - fragmentExec.estimatedRowSize() + fragmentExec.estimatedRowSize(), + fragmentExec.reducer() ) ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/FragmentExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/FragmentExec.java index 43fccf4cf62da..e23a8c783e1e6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/FragmentExec.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/FragmentExec.java @@ -20,6 +20,7 @@ public class FragmentExec extends LeafExec implements EstimatesRowSize { private final LogicalPlan fragment; private final QueryBuilder esFilter; + private final PhysicalPlan reducer; // datanode-level physical plan node that performs an intermediate (not partial) reduce /** * Estimate of the number of bytes that'll be loaded per position before @@ -28,14 +29,15 @@ public class FragmentExec extends LeafExec implements EstimatesRowSize { private final int estimatedRowSize; public FragmentExec(LogicalPlan fragment) { - this(fragment.source(), fragment, null, 0); + this(fragment.source(), fragment, null, 0, null); } - public FragmentExec(Source source, LogicalPlan fragment, QueryBuilder esFilter, int estimatedRowSize) { + public FragmentExec(Source source, LogicalPlan fragment, QueryBuilder esFilter, int estimatedRowSize, PhysicalPlan reducer) { super(source); this.fragment = fragment; this.esFilter = esFilter; this.estimatedRowSize = estimatedRowSize; + this.reducer = reducer; } public LogicalPlan fragment() { @@ -50,9 +52,13 @@ public Integer estimatedRowSize() { return estimatedRowSize; } + public PhysicalPlan reducer() { + return reducer; + } + @Override protected NodeInfo info() { - return NodeInfo.create(this, FragmentExec::new, fragment, esFilter, estimatedRowSize); + return NodeInfo.create(this, FragmentExec::new, fragment, esFilter, estimatedRowSize, reducer); } @Override @@ -65,12 +71,20 @@ public PhysicalPlan estimateRowSize(State state) { int estimatedRowSize = state.consumeAllFields(false); return Objects.equals(estimatedRowSize, this.estimatedRowSize) ? this - : new FragmentExec(source(), fragment, esFilter, estimatedRowSize); + : new FragmentExec(source(), fragment, esFilter, estimatedRowSize, reducer); + } + + public FragmentExec withFilter(QueryBuilder filter) { + return Objects.equals(filter, this.esFilter) ? this : new FragmentExec(source(), fragment, filter, estimatedRowSize, reducer); + } + + public FragmentExec withReducer(PhysicalPlan reducer) { + return Objects.equals(reducer, this.reducer) ? this : new FragmentExec(source(), fragment, esFilter, estimatedRowSize, reducer); } @Override public int hashCode() { - return Objects.hash(fragment, esFilter, estimatedRowSize); + return Objects.hash(fragment, esFilter, estimatedRowSize, reducer); } @Override @@ -86,7 +100,8 @@ public boolean equals(Object obj) { FragmentExec other = (FragmentExec) obj; return Objects.equals(fragment, other.fragment) && Objects.equals(esFilter, other.esFilter) - && Objects.equals(estimatedRowSize, other.estimatedRowSize); + && Objects.equals(estimatedRowSize, other.estimatedRowSize) + && Objects.equals(reducer, other.reducer); } @Override @@ -97,7 +112,9 @@ public String nodeString() { sb.append(esFilter); sb.append(", estimatedRowSize="); sb.append(estimatedRowSize); - sb.append(", fragment=[<>\n"); + sb.append(", reducer=["); + sb.append(reducer == null ? "" : reducer.toString()); + sb.append("], fragment=[<>\n"); sb.append(fragment.toString()); sb.append("<>]]"); return sb.toString(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index aad80b6c673ba..3ea3bd54da135 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -149,7 +149,7 @@ public LocalExecutionPlanner( /** * turn the given plan into a list of drivers to execute */ - public LocalExecutionPlan plan(PhysicalPlan node) { + public LocalExecutionPlan plan(PhysicalPlan localPhysicalPlan) { var context = new LocalExecutionPlannerContext( new ArrayList<>(), new Holder<>(DriverParallelism.SINGLE), @@ -160,11 +160,11 @@ public LocalExecutionPlan plan(PhysicalPlan node) { ); // workaround for https://github.com/elastic/elasticsearch/issues/99782 - node = node.transformUp( + localPhysicalPlan = localPhysicalPlan.transformUp( AggregateExec.class, a -> a.getMode() == AggregateExec.Mode.FINAL ? new ProjectExec(a.source(), a, Expressions.asAttributes(a.aggregates())) : a ); - PhysicalOperation physicalOperation = plan(node, context); + PhysicalOperation physicalOperation = plan(localPhysicalPlan, context); final TimeValue statusInterval = configuration.pragmas().statusInterval(); context.addDriverFactory( @@ -181,7 +181,7 @@ private PhysicalOperation plan(PhysicalPlan node, LocalExecutionPlannerContext c if (node instanceof AggregateExec aggregate) { return planAggregation(aggregate, context); } else if (node instanceof FieldExtractExec fieldExtractExec) { - return planFieldExtractNode(context, fieldExtractExec); + return planFieldExtractNode(fieldExtractExec, context); } else if (node instanceof ExchangeExec exchangeExec) { return planExchange(exchangeExec, context); } else if (node instanceof TopNExec topNExec) { @@ -259,7 +259,7 @@ private PhysicalOperation planEsStats(EsStatsQueryExec statsQuery, LocalExecutio return PhysicalOperation.fromSource(luceneFactory, layout.build()); } - private PhysicalOperation planFieldExtractNode(LocalExecutionPlannerContext context, FieldExtractExec fieldExtractExec) { + private PhysicalOperation planFieldExtractNode(FieldExtractExec fieldExtractExec, LocalExecutionPlannerContext context) { return physicalOperationProviders.fieldExtractPhysicalOperation(fieldExtractExec, plan(fieldExtractExec.child(), context)); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java index 516c88b5f6526..2205947dccdeb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java @@ -116,7 +116,7 @@ public PhysicalPlan map(LogicalPlan p) { throw new EsqlIllegalArgumentException("unsupported logical plan node [" + p.nodeName() + "]"); } - private static boolean isPipelineBreaker(LogicalPlan p) { + static boolean isPipelineBreaker(LogicalPlan p) { return p instanceof Aggregate || p instanceof TopN || p instanceof Limit || p instanceof OrderBy; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java index f8fd284bbd558..1e64a6f136310 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java @@ -21,6 +21,7 @@ import org.elasticsearch.xpack.esql.optimizer.LocalLogicalPlanOptimizer; import org.elasticsearch.xpack.esql.optimizer.LocalPhysicalOptimizerContext; import org.elasticsearch.xpack.esql.optimizer.LocalPhysicalPlanOptimizer; +import org.elasticsearch.xpack.esql.plan.logical.TopN; import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; import org.elasticsearch.xpack.esql.plan.physical.EsSourceExec; import org.elasticsearch.xpack.esql.plan.physical.EstimatesRowSize; @@ -28,7 +29,10 @@ import org.elasticsearch.xpack.esql.plan.physical.ExchangeSinkExec; import org.elasticsearch.xpack.esql.plan.physical.ExchangeSourceExec; import org.elasticsearch.xpack.esql.plan.physical.FragmentExec; +import org.elasticsearch.xpack.esql.plan.physical.LimitExec; +import org.elasticsearch.xpack.esql.plan.physical.OrderExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; +import org.elasticsearch.xpack.esql.plan.physical.TopNExec; import org.elasticsearch.xpack.esql.session.EsqlConfiguration; import org.elasticsearch.xpack.esql.stats.SearchStats; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; @@ -37,8 +41,13 @@ import org.elasticsearch.xpack.ql.expression.FieldAttribute; import org.elasticsearch.xpack.ql.expression.predicate.Predicates; import org.elasticsearch.xpack.ql.options.EsSourceOptions; +import org.elasticsearch.xpack.ql.plan.logical.Aggregate; import org.elasticsearch.xpack.ql.plan.logical.EsRelation; import org.elasticsearch.xpack.ql.plan.logical.Filter; +import org.elasticsearch.xpack.ql.plan.logical.Limit; +import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.ql.plan.logical.OrderBy; +import org.elasticsearch.xpack.ql.plan.logical.UnaryPlan; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; @@ -73,6 +82,35 @@ public static Tuple breakPlanBetweenCoordinatorAndDa return new Tuple<>(coordinatorPlan, dataNodePlan.get()); } + public static PhysicalPlan dataNodeReductionPlan(LogicalPlan plan, PhysicalPlan unused) { + var pipelineBreakers = plan.collectFirstChildren(Mapper::isPipelineBreaker); + + if (pipelineBreakers.isEmpty() == false) { + UnaryPlan pipelineBreaker = (UnaryPlan) pipelineBreakers.get(0); + if (pipelineBreaker instanceof TopN topN) { + return new TopNExec(topN.source(), unused, topN.order(), topN.limit(), 2000); + } else if (pipelineBreaker instanceof Limit limit) { + return new LimitExec(limit.source(), unused, limit.limit()); + } else if (pipelineBreaker instanceof OrderBy order) { + return new OrderExec(order.source(), unused, order.order()); + } else if (pipelineBreaker instanceof Aggregate aggregate) { + // TODO handle this as a special PARTIAL step (intermediate) + /*return new AggregateExec( + aggregate.source(), + unused, + aggregate.groupings(), + aggregate.aggregates(), + AggregateExec.Mode.PARTIAL, + 0 + );*/ + return null; + } else { + throw new EsqlIllegalArgumentException("unsupported unary physical plan node [" + pipelineBreaker.nodeName() + "]"); + } + } + return null; + } + /** * Returns a set of concrete indices after resolving the original indices specified in the FROM command. */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index 8fae3c09f32d5..959783d2f5235 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -64,6 +64,7 @@ import org.elasticsearch.xpack.esql.enrich.EnrichLookupService; import org.elasticsearch.xpack.esql.plan.physical.ExchangeSinkExec; import org.elasticsearch.xpack.esql.plan.physical.ExchangeSourceExec; +import org.elasticsearch.xpack.esql.plan.physical.FragmentExec; import org.elasticsearch.xpack.esql.plan.physical.OutputExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.planner.EsPhysicalOperationProviders; @@ -289,12 +290,19 @@ private void startComputeOnDataNodes( ActionListener parentListener, Supplier> dataNodeListenerSupplier ) { + var planWithReducer = configuration.pragmas().nodeLevelReduction() == false + ? dataNodePlan + : dataNodePlan.transformUp(FragmentExec.class, f -> { + PhysicalPlan reductionNode = PlannerUtils.dataNodeReductionPlan(f.fragment(), dataNodePlan); + return reductionNode == null ? f : f.withReducer(reductionNode); + }); + // The lambda is to say if a TEXT field has an identical exact subfield // We cannot use SearchContext because we don't have it yet. // Since it's used only for @timestamp, it is relatively safe to assume it's not needed // but it would be better to have a proper impl. - QueryBuilder requestFilter = PlannerUtils.requestFilter(dataNodePlan, x -> true); - EsSourceOptions esSourceOptions = PlannerUtils.esSourceOptions(dataNodePlan); + QueryBuilder requestFilter = PlannerUtils.requestFilter(planWithReducer, x -> true); + EsSourceOptions esSourceOptions = PlannerUtils.esSourceOptions(planWithReducer); lookupDataNodes( parentTask, clusterAlias, @@ -327,7 +335,7 @@ private void startComputeOnDataNodes( clusterAlias, node.shardIds, node.aliasFilters, - dataNodePlan + planWithReducer ), parentTask, TransportRequestOptions.EMPTY, @@ -426,6 +434,9 @@ void runCompute(CancellableTask task, ComputeContext context, PhysicalPlan plan, LOGGER.debug("Received physical plan:\n{}", plan); plan = PlannerUtils.localPlan(context.searchContexts, context.configuration, plan); + // the planner will also set the driver parallelism in LocalExecutionPlanner.LocalExecutionPlan (used down below) + // it's doing this in the planning of EsQueryExec (the source of the data) + // see also EsPhysicalOperationProviders.sourcePhysicalOperation LocalExecutionPlanner.LocalExecutionPlan localExecutionPlan = planner.plan(plan); if (LOGGER.isDebugEnabled()) { @@ -750,11 +761,19 @@ public void messageReceived(DataNodeRequest request, TransportChannel channel, T final ActionListener listener = new ChannelActionListener<>(channel); final ExchangeSinkExec reducePlan; if (request.plan() instanceof ExchangeSinkExec plan) { + var fragments = plan.collectFirstChildren(FragmentExec.class::isInstance); + if (fragments.isEmpty()) { + listener.onFailure(new IllegalStateException("expected a fragment plan for a remote compute; got " + request.plan())); + return; + } + + var localExchangeSource = new ExchangeSourceExec(plan.source(), plan.output(), plan.isIntermediateAgg()); + FragmentExec fragment = (FragmentExec) fragments.get(0); reducePlan = new ExchangeSinkExec( plan.source(), plan.output(), plan.isIntermediateAgg(), - new ExchangeSourceExec(plan.source(), plan.output(), plan.isIntermediateAgg()) + fragment.reducer() != null ? fragment.reducer().replaceChildren(List.of(localExchangeSource)) : localExchangeSource ); } else { listener.onFailure(new IllegalStateException("expected exchange sink for a remote compute; got " + request.plan())); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/QueryPragmas.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/QueryPragmas.java index fd76edf46229e..f24619ff80d9a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/QueryPragmas.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/QueryPragmas.java @@ -57,6 +57,8 @@ public final class QueryPragmas implements Writeable { public static final Setting MAX_CONCURRENT_SHARDS_PER_NODE = Setting.intSetting("max_concurrent_shards_per_node", 10, 1, 100); + public static final Setting NODE_LEVEL_REDUCTION = Setting.boolSetting("node_level_reduction", false); + public static final QueryPragmas EMPTY = new QueryPragmas(Settings.EMPTY); private final Settings settings; @@ -126,6 +128,14 @@ public int maxConcurrentShardsPerNode() { return MAX_CONCURRENT_SHARDS_PER_NODE.get(settings); } + /** + * Returns true if each data node should perform a local reduction for sort, limit, topN, stats or false if the coordinator node + * will perform the reduction. + */ + public boolean nodeLevelReduction() { + return NODE_LEVEL_REDUCTION.get(settings); + } + public boolean isEmpty() { return settings.isEmpty(); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java index bbf16fc946999..cc0010c788a0c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java @@ -133,7 +133,7 @@ public void execute(EsqlQueryRequest request, ActionListener liste // TODO: filter integration testing filter = fragmentFilter != null ? boolQuery().filter(fragmentFilter).must(filter) : filter; LOGGER.debug("Fold filter {} to EsQueryExec", filter); - f = new FragmentExec(f.source(), f.fragment(), filter, f.estimatedRowSize()); + f = f.withFilter(filter); } return f; }))) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/FilterTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/FilterTests.java index eef109cb2830e..aedb379338171 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/FilterTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/FilterTests.java @@ -293,7 +293,7 @@ private PhysicalPlan plan(String query, QueryBuilder restFilter) { // System.out.println("physical\n" + physical); physical = physical.transformUp( FragmentExec.class, - f -> new FragmentExec(f.source(), f.fragment(), restFilter, f.estimatedRowSize()) + f -> new FragmentExec(f.source(), f.fragment(), restFilter, f.estimatedRowSize(), f.reducer()) ); physical = physicalPlanOptimizer.optimize(physical); // System.out.println("optimized\n" + physical); From f078c30be7d35e8906cadb938e07c2802dfaa828 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Mon, 1 Apr 2024 08:17:26 -0700 Subject: [PATCH 028/264] AwaitsFix #106957 --- .../java/org/elasticsearch/kibana/KibanaThreadPoolTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/modules/kibana/src/test/java/org/elasticsearch/kibana/KibanaThreadPoolTests.java b/modules/kibana/src/test/java/org/elasticsearch/kibana/KibanaThreadPoolTests.java index 0974fd6d36b18..5fe5bbf95ba56 100644 --- a/modules/kibana/src/test/java/org/elasticsearch/kibana/KibanaThreadPoolTests.java +++ b/modules/kibana/src/test/java/org/elasticsearch/kibana/KibanaThreadPoolTests.java @@ -29,6 +29,7 @@ protected Collection> nodePlugins() { return Set.of(KibanaPlugin.class); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106957") public void testKibanaThreadPool() { runWithBlockedThreadPools(() -> { // index documents From af8de04156f7d767cda17fbeaa7e14358f817d52 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Mon, 1 Apr 2024 08:23:27 -0700 Subject: [PATCH 029/264] Remove redundant testing of Java 22 (#106943) --- .buildkite/pipelines/periodic.template.yml | 2 -- .buildkite/pipelines/periodic.yml | 2 -- 2 files changed, 4 deletions(-) diff --git a/.buildkite/pipelines/periodic.template.yml b/.buildkite/pipelines/periodic.template.yml index 8e92fffbc6f88..05d516992a7f6 100644 --- a/.buildkite/pipelines/periodic.template.yml +++ b/.buildkite/pipelines/periodic.template.yml @@ -85,7 +85,6 @@ steps: - graalvm-ce17 - openjdk17 - openjdk21 - - openjdk22 GRADLE_TASK: - checkPart1 - checkPart2 @@ -108,7 +107,6 @@ steps: - graalvm-ce17 - openjdk17 - openjdk21 - - openjdk22 BWC_VERSION: $BWC_LIST agents: provider: gcp diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 0fa88bb08f468..435e709bbf072 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -416,7 +416,6 @@ steps: - graalvm-ce17 - openjdk17 - openjdk21 - - openjdk22 GRADLE_TASK: - checkPart1 - checkPart2 @@ -439,7 +438,6 @@ steps: - graalvm-ce17 - openjdk17 - openjdk21 - - openjdk22 BWC_VERSION: ["7.17.20", "8.13.1", "8.14.0"] agents: provider: gcp From 25d3f70f3d912d52eec310f3dee0d1ad3751dcb5 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Mon, 1 Apr 2024 11:37:01 -0400 Subject: [PATCH 030/264] ESQL: More tests for DATE_TRUNC (#106908) This adds integration tests for `DATE_TRUNC` that round to `1 HOUR` and `1 MINUTE` - that's a thing folks will do and I didn't see it in the integration tests. We do it in unit tests but I just want to be extra paranoid. --- .../src/main/resources/date.csv-spec | 33 +++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec index de7a48bcf6834..96a025ce5dc9c 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec @@ -159,6 +159,39 @@ x:date |hire_date:date 1995-01-01T00:00:00.000Z|1995-01-27T00:00:00.000Z ; +dateTruncHour + FROM sample_data +| SORT @timestamp ASC +| EVAL t = DATE_TRUNC(1 HOUR, @timestamp) +| KEEP t; + +t:date +2023-10-23T12:00:00 +2023-10-23T12:00:00 +2023-10-23T13:00:00 +2023-10-23T13:00:00 +2023-10-23T13:00:00 +2023-10-23T13:00:00 +2023-10-23T13:00:00 +; + +dateTruncMinute + FROM sample_data +| SORT @timestamp ASC +| EVAL t = DATE_TRUNC(1 MINUTE, @timestamp) +| KEEP t; + +t:date +2023-10-23T12:15:00 +2023-10-23T12:27:00 +2023-10-23T13:33:00 +2023-10-23T13:51:00 +2023-10-23T13:52:00 +2023-10-23T13:53:00 +2023-10-23T13:55:00 +; + + convertFromDatetime from employees | sort emp_no | keep birth_date | eval bd = to_datetime(birth_date) | limit 2; From af9390074cc8d0d6f7c6dfc38eb6d584cae11515 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Mon, 1 Apr 2024 17:53:51 +0200 Subject: [PATCH 031/264] Remove Singleton annotation and downstream code from o.e.common.inject (#106936) We don't use the singleton annotation anywhere so it and a bunch of the scoping code can just go away. Also, we don't use the stage functionality (it's always develop) so that can go away as well. --- .../common/inject/AbstractProcessor.java | 6 -- .../elasticsearch/common/inject/Binder.java | 15 +-- .../common/inject/BindingProcessor.java | 9 +- .../common/inject/InheritingState.java | 14 --- .../elasticsearch/common/inject/Injector.java | 1 - .../common/inject/InjectorBuilder.java | 18 ++-- .../common/inject/InjectorImpl.java | 9 -- .../common/inject/InjectorShell.java | 19 +--- .../common/inject/ScopeAnnotation.java | 41 --------- .../common/inject/ScopeBindingProcessor.java | 62 ------------- .../elasticsearch/common/inject/Scopes.java | 26 +----- .../common/inject/Singleton.java | 35 ------- .../elasticsearch/common/inject/Stage.java | 44 --------- .../elasticsearch/common/inject/State.java | 18 ---- .../internal/AbstractBindingBuilder.java | 2 +- .../common/inject/internal/Annotations.java | 47 ---------- .../common/inject/internal/BindingImpl.java | 2 +- .../common/inject/internal/Errors.java | 30 ------ .../inject/internal/InstanceBindingImpl.java | 4 +- .../inject/internal/LinkedBindingImpl.java | 4 +- .../internal/LinkedProviderBindingImpl.java | 4 +- .../internal/ProviderInstanceBindingImpl.java | 4 +- .../common/inject/internal/Scoping.java | 91 +------------------ .../internal/UntargettedBindingImpl.java | 4 +- .../common/inject/multibindings/Element.java | 2 - .../inject/multibindings/MapBinder.java | 2 +- .../inject/multibindings/Multibinder.java | 10 +- .../inject/multibindings/RealElement.java | 15 +-- .../common/inject/spi/ElementVisitor.java | 5 - .../common/inject/spi/Elements.java | 19 +--- .../common/inject/spi/ScopeBinding.java | 64 ------------- 31 files changed, 41 insertions(+), 585 deletions(-) delete mode 100644 server/src/main/java/org/elasticsearch/common/inject/ScopeAnnotation.java delete mode 100644 server/src/main/java/org/elasticsearch/common/inject/ScopeBindingProcessor.java delete mode 100644 server/src/main/java/org/elasticsearch/common/inject/Singleton.java delete mode 100644 server/src/main/java/org/elasticsearch/common/inject/Stage.java delete mode 100644 server/src/main/java/org/elasticsearch/common/inject/spi/ScopeBinding.java diff --git a/server/src/main/java/org/elasticsearch/common/inject/AbstractProcessor.java b/server/src/main/java/org/elasticsearch/common/inject/AbstractProcessor.java index d23a3c3c10995..3eba654d412bd 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/AbstractProcessor.java +++ b/server/src/main/java/org/elasticsearch/common/inject/AbstractProcessor.java @@ -21,7 +21,6 @@ import org.elasticsearch.common.inject.spi.ElementVisitor; import org.elasticsearch.common.inject.spi.Message; import org.elasticsearch.common.inject.spi.ProviderLookup; -import org.elasticsearch.common.inject.spi.ScopeBinding; import java.util.Iterator; import java.util.List; @@ -73,11 +72,6 @@ public Boolean visit(Message message) { return false; } - @Override - public Boolean visit(ScopeBinding scopeBinding) { - return false; - } - @Override public Boolean visit(Binding binding) { return false; diff --git a/server/src/main/java/org/elasticsearch/common/inject/Binder.java b/server/src/main/java/org/elasticsearch/common/inject/Binder.java index 97aa924d32cb1..07a8979eb18a6 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/Binder.java +++ b/server/src/main/java/org/elasticsearch/common/inject/Binder.java @@ -20,8 +20,6 @@ import org.elasticsearch.common.inject.binder.LinkedBindingBuilder; import org.elasticsearch.common.inject.spi.Message; -import java.lang.annotation.Annotation; - /** * Collects configuration information (primarily bindings) which will be * used to create an {@link Injector}. Guice provides this object to your @@ -96,11 +94,6 @@ *

Note: a scope specified in this way overrides any scope that * was specified with an annotation on the {@code ServiceImpl} class. * - *

Besides {@link Singleton}/{@link Scopes#SINGLETON}, there are - * servlet-specific scopes available in - * {@code com.google.inject.servlet.ServletScopes}, and your Modules can - * contribute their own custom scopes for use here as well. - * *

  *     bind(new TypeLiteral<PaymentService<CreditCard>>() {})
  *         .to(CreditCardPaymentService.class);
@@ -165,8 +158,7 @@ * cases Guice will let something bogus slip by, and will then inform you of * the problems at runtime, as soon as you try to create your Injector. * - *

The other methods of Binder such as {@link #bindScope}, - * {@link #install}, and {@link #addError} are not part of the Binding EDSL; + *

The other methods of Binder such as {@link #install}, and {@link #addError} are not part of the Binding EDSL; * you can learn how to use these in the usual way, from the method * documentation. * @@ -176,11 +168,6 @@ */ public interface Binder { - /** - * Binds a scope to an annotation. - */ - void bindScope(Class annotationType, Scope scope); - /** * See the EDSL examples at {@link Binder}. */ diff --git a/server/src/main/java/org/elasticsearch/common/inject/BindingProcessor.java b/server/src/main/java/org/elasticsearch/common/inject/BindingProcessor.java index 0865bf47090af..dfa4fcb16bc62 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/BindingProcessor.java +++ b/server/src/main/java/org/elasticsearch/common/inject/BindingProcessor.java @@ -16,7 +16,6 @@ package org.elasticsearch.common.inject; -import org.elasticsearch.common.inject.internal.Annotations; import org.elasticsearch.common.inject.internal.BindingImpl; import org.elasticsearch.common.inject.internal.Errors; import org.elasticsearch.common.inject.internal.ErrorsException; @@ -73,9 +72,7 @@ public Boolean visit(Binding command) { return true; } - validateKey(command.getSource(), command.getKey()); - - final Scoping scoping = Scopes.makeInjectable(((BindingImpl) command).getScoping(), injector, errors); + final Scoping scoping = ((BindingImpl) command).getScoping(); command.acceptTargetVisitor(new BindingTargetVisitor() { @@ -173,10 +170,6 @@ public Void visit() { return true; } - private void validateKey(Object source, Key key) { - Annotations.checkForMisplacedScopeAnnotations(key.getRawType(), source, errors); - } - static UntargettedBindingImpl invalidBinding(InjectorImpl injector, Key key, Object source) { return new UntargettedBindingImpl<>(injector, key, source); } diff --git a/server/src/main/java/org/elasticsearch/common/inject/InheritingState.java b/server/src/main/java/org/elasticsearch/common/inject/InheritingState.java index 95788343f6f57..bf6dbabafb034 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/InheritingState.java +++ b/server/src/main/java/org/elasticsearch/common/inject/InheritingState.java @@ -23,10 +23,8 @@ import org.elasticsearch.common.inject.internal.MatcherAndConverter; import org.elasticsearch.common.inject.internal.SourceProvider; -import java.lang.annotation.Annotation; import java.util.ArrayList; import java.util.Collections; -import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -41,7 +39,6 @@ class InheritingState implements State { // Must be a linked hashmap in order to preserve order of bindings in Modules. private final Map, Binding> explicitBindingsMutable = new LinkedHashMap<>(); private final Map, Binding> explicitBindings = Collections.unmodifiableMap(explicitBindingsMutable); - private final Map, Scope> scopes = new HashMap<>(); private final List converters = new ArrayList<>(); private WeakKeySet blacklistedKeys = new WeakKeySet(); private final Object lock; @@ -72,17 +69,6 @@ public void putBinding(Key key, BindingImpl binding) { explicitBindingsMutable.put(key, binding); } - @Override - public Scope getScope(Class annotationType) { - Scope scope = scopes.get(annotationType); - return scope != null ? scope : State.NONE.getScope(annotationType); - } - - @Override - public void putAnnotation(Class annotationType, Scope scope) { - scopes.put(annotationType, scope); - } - @Override public Iterable getConvertersThisLevel() { return converters; diff --git a/server/src/main/java/org/elasticsearch/common/inject/Injector.java b/server/src/main/java/org/elasticsearch/common/inject/Injector.java index a4af1300e6414..a3060af8f56d4 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/Injector.java +++ b/server/src/main/java/org/elasticsearch/common/inject/Injector.java @@ -29,7 +29,6 @@ *

  • This {@link Injector} instance itself *
  • A {@code Provider} for each binding of type {@code T} *
  • The {@link java.util.logging.Logger} for the class being injected - *
  • The {@link Stage} in which the Injector was created * *

    * Injectors are created using the facade class {@link Guice}. diff --git a/server/src/main/java/org/elasticsearch/common/inject/InjectorBuilder.java b/server/src/main/java/org/elasticsearch/common/inject/InjectorBuilder.java index 737176c77de54..4c5cb95491ebb 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/InjectorBuilder.java +++ b/server/src/main/java/org/elasticsearch/common/inject/InjectorBuilder.java @@ -36,8 +36,7 @@ * No user code is executed in this phase.

  • *
  • Dynamic injection. In this phase, we call user code. We inject members that requested * injection. This may require user's objects be created and their providers be called. And we - * create eager singletons. In this phase, user code may have started other threads. This phase - * is not executed for injectors created using {@link Stage#TOOL the tool stage}
  • + * create eager singletons. In this phase, user code may have started other threads. * * * @author crazybob@google.com (Bob Lee) @@ -136,27 +135,26 @@ private void injectDynamically() { errors.throwCreationExceptionIfErrorsExist(); for (InjectorShell shell : shells) { - loadEagerSingletons(shell.getInjector(), Stage.DEVELOPMENT, errors); + loadEagerSingletons(shell.getInjector(), errors); } stopwatch.resetAndLog("Preloading singletons"); errors.throwCreationExceptionIfErrorsExist(); } /** - * Loads eager singletons, or all singletons if we're in Stage.PRODUCTION. Bindings discovered - * while we're binding these singletons are not be eager. + * Loads eager singletons. Bindings discovered while we're binding these singletons are not be eager. */ - public static void loadEagerSingletons(InjectorImpl injector, Stage stage, Errors errors) { + public static void loadEagerSingletons(InjectorImpl injector, Errors errors) { for (final Binding binding : injector.state.getExplicitBindingsThisLevel().values()) { - loadEagerSingletons(injector, stage, errors, (BindingImpl) binding); + loadEagerSingletons(injector, errors, (BindingImpl) binding); } for (final BindingImpl binding : injector.jitBindings.values()) { - loadEagerSingletons(injector, stage, errors, binding); + loadEagerSingletons(injector, errors, binding); } } - private static void loadEagerSingletons(InjectorImpl injector, Stage stage, final Errors errors, BindingImpl binding) { - if (binding.getScoping().isEagerSingleton(stage)) { + private static void loadEagerSingletons(InjectorImpl injector, final Errors errors, BindingImpl binding) { + if (binding.getScoping().isEagerSingleton()) { try { injector.callInContext(new ContextualCallable() { final Dependency dependency = Dependency.get(binding.getKey()); diff --git a/server/src/main/java/org/elasticsearch/common/inject/InjectorImpl.java b/server/src/main/java/org/elasticsearch/common/inject/InjectorImpl.java index 8614fd99da088..10385c54860bc 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/InjectorImpl.java +++ b/server/src/main/java/org/elasticsearch/common/inject/InjectorImpl.java @@ -34,7 +34,6 @@ import org.elasticsearch.common.inject.spi.ProviderKeyBinding; import org.elasticsearch.common.inject.util.Providers; -import java.lang.annotation.Annotation; import java.lang.reflect.GenericArrayType; import java.lang.reflect.Modifier; import java.lang.reflect.ParameterizedType; @@ -46,7 +45,6 @@ import java.util.Map; import static java.util.Collections.emptySet; -import static org.elasticsearch.common.inject.internal.Annotations.findScopeAnnotation; /** * Default {@link Injector} implementation. @@ -330,13 +328,6 @@ BindingImpl createUnitializedBinding(Key key, Scoping scoping, Object throw errors.cannotInjectInnerClass(rawType).toException(); } - if (scoping.isExplicitlyScoped() == false) { - Class scopeAnnotation = findScopeAnnotation(errors, rawType); - if (scopeAnnotation != null) { - scoping = Scopes.makeInjectable(Scoping.forAnnotation(scopeAnnotation), this, errors.withSource(rawType)); - } - } - return ConstructorBindingImpl.create(this, key, source, scoping); } diff --git a/server/src/main/java/org/elasticsearch/common/inject/InjectorShell.java b/server/src/main/java/org/elasticsearch/common/inject/InjectorShell.java index cb3b4d63ed3e2..e6df9e2641cd8 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/InjectorShell.java +++ b/server/src/main/java/org/elasticsearch/common/inject/InjectorShell.java @@ -30,11 +30,9 @@ import java.util.ArrayList; import java.util.List; -import java.util.Objects; import java.util.logging.Logger; import static java.util.Collections.emptySet; -import static org.elasticsearch.common.inject.Scopes.SINGLETON; /** * A partially-initialized injector. See {@link InjectorBuilder}, which uses this to build a tree @@ -69,8 +67,6 @@ static class Builder { */ private State state; - private final Stage stage = Stage.DEVELOPMENT; - void addModules(Iterable modules) { for (Module module : modules) { this.modules.add(module); @@ -95,10 +91,10 @@ List build(BindingProcessor bindingProcessor, Stopwatch stopwatch InjectorImpl injector = new InjectorImpl(state); // bind Stage and Singleton if this is a top-level injector - modules.add(0, new RootModule(stage)); + modules.add(0, new RootModule()); new TypeConverterBindingProcessor(errors).prepareBuiltInConverters(injector); - elements.addAll(Elements.getElements(stage, modules)); + elements.addAll(Elements.getElements(modules)); stopwatch.resetAndLog("Module execution"); new MessageProcessor(errors).process(injector, elements); @@ -106,7 +102,6 @@ List build(BindingProcessor bindingProcessor, Stopwatch stopwatch injector.membersInjectorStore = new MembersInjectorStore(injector); stopwatch.resetAndLog("TypeListeners creation"); - new ScopeBindingProcessor(errors).process(injector, elements); stopwatch.resetAndLog("Scopes creation"); new TypeConverterBindingProcessor(errors).process(injector, elements); @@ -219,17 +214,11 @@ public String toString() { } private static class RootModule implements Module { - final Stage stage; - - private RootModule(Stage stage) { - this.stage = Objects.requireNonNull(stage, "stage"); - } + private RootModule() {} @Override public void configure(Binder binder) { - binder = binder.withSource(SourceProvider.UNKNOWN_SOURCE); - binder.bind(Stage.class).toInstance(stage); - binder.bindScope(Singleton.class, SINGLETON); + binder.withSource(SourceProvider.UNKNOWN_SOURCE); } } } diff --git a/server/src/main/java/org/elasticsearch/common/inject/ScopeAnnotation.java b/server/src/main/java/org/elasticsearch/common/inject/ScopeAnnotation.java deleted file mode 100644 index ea1dd376b29dd..0000000000000 --- a/server/src/main/java/org/elasticsearch/common/inject/ScopeAnnotation.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright (C) 2006 Google Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.elasticsearch.common.inject; - -import java.lang.annotation.Retention; -import java.lang.annotation.Target; - -import static java.lang.annotation.ElementType.ANNOTATION_TYPE; -import static java.lang.annotation.RetentionPolicy.RUNTIME; - -/** - * Annotates annotations which are used for scoping. Only one such annotation - * may apply to a single implementation class. You must also annotate scope - * annotations with {@code @Retention(RUNTIME)}. For example: - *
    - *   {@code @}Retention(RUNTIME)
    - *   {@code @}Target(TYPE)
    - *   {@code @}ScopeAnnotation
    - *   public {@code @}interface SessionScoped {}
    - * 
    - * - * @author crazybob@google.com (Bob Lee) - */ -@Target(ANNOTATION_TYPE) -@Retention(RUNTIME) -public @interface ScopeAnnotation { -} diff --git a/server/src/main/java/org/elasticsearch/common/inject/ScopeBindingProcessor.java b/server/src/main/java/org/elasticsearch/common/inject/ScopeBindingProcessor.java deleted file mode 100644 index c14acdde94c35..0000000000000 --- a/server/src/main/java/org/elasticsearch/common/inject/ScopeBindingProcessor.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright (C) 2008 Google Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.elasticsearch.common.inject; - -import org.elasticsearch.common.inject.internal.Annotations; -import org.elasticsearch.common.inject.internal.Errors; -import org.elasticsearch.common.inject.spi.ScopeBinding; - -import java.lang.annotation.Annotation; -import java.util.Objects; - -/** - * Handles {@link Binder#bindScope} commands. - * - * @author crazybob@google.com (Bob Lee) - * @author jessewilson@google.com (Jesse Wilson) - */ -class ScopeBindingProcessor extends AbstractProcessor { - - ScopeBindingProcessor(Errors errors) { - super(errors); - } - - @Override - public Boolean visit(ScopeBinding command) { - Scope scope = command.getScope(); - Class annotationType = command.getAnnotationType(); - - if (Annotations.isScopeAnnotation(annotationType) == false) { - errors.withSource(annotationType).missingScopeAnnotation(); - // Go ahead and bind anyway so we don't get collateral errors. - } - - if (Annotations.isRetainedAtRuntime(annotationType) == false) { - errors.withSource(annotationType).missingRuntimeRetention(command.getSource()); - // Go ahead and bind anyway so we don't get collateral errors. - } - - Scope existing = injector.state.getScope(Objects.requireNonNull(annotationType, "annotation type")); - if (existing != null) { - errors.duplicateScopes(existing, annotationType, scope); - } else { - injector.state.putAnnotation(annotationType, Objects.requireNonNull(scope, "scope")); - } - - return true; - } -} diff --git a/server/src/main/java/org/elasticsearch/common/inject/Scopes.java b/server/src/main/java/org/elasticsearch/common/inject/Scopes.java index ffdf1bf3192ca..60f36fd879aa2 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/Scopes.java +++ b/server/src/main/java/org/elasticsearch/common/inject/Scopes.java @@ -16,11 +16,9 @@ package org.elasticsearch.common.inject; -import org.elasticsearch.common.inject.internal.Errors; import org.elasticsearch.common.inject.internal.InternalFactory; import org.elasticsearch.common.inject.internal.Scoping; -import java.lang.annotation.Annotation; import java.util.Locale; /** @@ -33,7 +31,7 @@ public class Scopes { private Scopes() {} /** - * One instance per {@link Injector}. Also see {@code @}{@link Singleton}. + * One instance per {@link Injector}. */ public static final Scope SINGLETON = new Scope() { @Override @@ -82,8 +80,7 @@ public String toString() { * binding arrives it will need to obtain the instance over again. *

    * This exists only in case a class has been annotated with a scope - * annotation such as {@link Singleton @Singleton}, and you need to override - * this to "no scope" in your binding. + * annotation and you need to override this to "no scope" in your binding. * * @since 2.0 */ @@ -115,23 +112,4 @@ static InternalFactory scope(InjectorImpl injector, InternalFac return new InternalFactoryToProviderAdapter<>(Initializables.of(scoped)); } - /** - * Replaces annotation scopes with instance scopes using the Injector's annotation-to-instance - * map. If the scope annotation has no corresponding instance, an error will be added and unscoped - * will be retuned. - */ - static Scoping makeInjectable(Scoping scoping, InjectorImpl injector, Errors errors) { - Class scopeAnnotation = scoping.getScopeAnnotation(); - if (scopeAnnotation == null) { - return scoping; - } - - Scope scope = injector.state.getScope(scopeAnnotation); - if (scope != null) { - return Scoping.forInstance(scope); - } - - errors.scopeNotFound(scopeAnnotation); - return Scoping.UNSCOPED; - } } diff --git a/server/src/main/java/org/elasticsearch/common/inject/Singleton.java b/server/src/main/java/org/elasticsearch/common/inject/Singleton.java deleted file mode 100644 index 24a4ba92dd20a..0000000000000 --- a/server/src/main/java/org/elasticsearch/common/inject/Singleton.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright (C) 2006 Google Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.elasticsearch.common.inject; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.Target; - -import static java.lang.annotation.RetentionPolicy.RUNTIME; - -/** - * Apply this to implementation classes when you want only one instance - * (per {@link Injector}) to be reused for all injections for that binding. - * - * @author crazybob@google.com (Bob Lee) - */ -@Target({ ElementType.TYPE, ElementType.METHOD }) -@Retention(RUNTIME) -@ScopeAnnotation -public @interface Singleton { -} diff --git a/server/src/main/java/org/elasticsearch/common/inject/Stage.java b/server/src/main/java/org/elasticsearch/common/inject/Stage.java deleted file mode 100644 index 5533cae4c4123..0000000000000 --- a/server/src/main/java/org/elasticsearch/common/inject/Stage.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright (C) 2006 Google Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.elasticsearch.common.inject; - -/** - * The stage we're running in. - * - * @author crazybob@google.com (Bob Lee) - */ -public enum Stage { - - /** - * We're running in a tool (an IDE plugin for example). We need binding meta data but not a - * functioning Injector. Do not inject members of instances. Do not load eager singletons. Do as - * little as possible so our tools run nice and snappy. Injectors created in this stage cannot - * be used to satisfy injections. - */ - TOOL, - - /** - * We want fast startup times at the expense of runtime performance and some up front error - * checking. - */ - DEVELOPMENT, - - /** - * We want to catch errors as early as possible and take performance hits up front. - */ - PRODUCTION -} diff --git a/server/src/main/java/org/elasticsearch/common/inject/State.java b/server/src/main/java/org/elasticsearch/common/inject/State.java index 553ed746ede80..e8b9404447f4f 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/State.java +++ b/server/src/main/java/org/elasticsearch/common/inject/State.java @@ -20,7 +20,6 @@ import org.elasticsearch.common.inject.internal.Errors; import org.elasticsearch.common.inject.internal.MatcherAndConverter; -import java.lang.annotation.Annotation; import java.util.Map; import static java.util.Collections.emptySet; @@ -54,16 +53,6 @@ public void putBinding(Key key, BindingImpl binding) { throw new UnsupportedOperationException(); } - @Override - public Scope getScope(Class scopingAnnotation) { - return null; - } - - @Override - public void putAnnotation(Class annotationType, Scope scope) { - throw new UnsupportedOperationException(); - } - @Override public void addConverter(MatcherAndConverter matcherAndConverter) { throw new UnsupportedOperationException(); @@ -113,13 +102,6 @@ public Object lock() { void putBinding(Key key, BindingImpl binding); - /** - * Returns the matching scope, or null. - */ - Scope getScope(Class scopingAnnotation); - - void putAnnotation(Class annotationType, Scope scope); - void addConverter(MatcherAndConverter matcherAndConverter); /** diff --git a/server/src/main/java/org/elasticsearch/common/inject/internal/AbstractBindingBuilder.java b/server/src/main/java/org/elasticsearch/common/inject/internal/AbstractBindingBuilder.java index 60b6a74dec997..345c92ce8d354 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/internal/AbstractBindingBuilder.java +++ b/server/src/main/java/org/elasticsearch/common/inject/internal/AbstractBindingBuilder.java @@ -61,7 +61,7 @@ protected BindingImpl setBinding(BindingImpl binding) { public void asEagerSingleton() { checkNotScoped(); - setBinding(getBinding().withScoping(Scoping.EAGER_SINGLETON)); + setBinding(getBinding().withEagerSingletonScoping()); } protected void checkNotTargetted() { diff --git a/server/src/main/java/org/elasticsearch/common/inject/internal/Annotations.java b/server/src/main/java/org/elasticsearch/common/inject/internal/Annotations.java index 68fb7e68ba4bc..24f2539e2574e 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/internal/Annotations.java +++ b/server/src/main/java/org/elasticsearch/common/inject/internal/Annotations.java @@ -16,10 +16,8 @@ package org.elasticsearch.common.inject.internal; -import org.elasticsearch.common.Classes; import org.elasticsearch.common.inject.BindingAnnotation; import org.elasticsearch.common.inject.Key; -import org.elasticsearch.common.inject.ScopeAnnotation; import org.elasticsearch.common.inject.TypeLiteral; import java.lang.annotation.Annotation; @@ -42,51 +40,6 @@ public static boolean isRetainedAtRuntime(Class annotation return retention != null && retention.value() == RetentionPolicy.RUNTIME; } - /** - * Returns the scope annotation on {@code type}, or null if none is specified. - */ - public static Class findScopeAnnotation(Errors errors, Class implementation) { - return findScopeAnnotation(errors, implementation.getAnnotations()); - } - - /** - * Returns the scoping annotation, or null if there isn't one. - */ - public static Class findScopeAnnotation(Errors errors, Annotation[] annotations) { - Class found = null; - - for (Annotation annotation : annotations) { - if (annotation.annotationType().getAnnotation(ScopeAnnotation.class) != null) { - if (found != null) { - errors.duplicateScopeAnnotations(found, annotation.annotationType()); - } else { - found = annotation.annotationType(); - } - } - } - - return found; - } - - public static boolean isScopeAnnotation(Class annotationType) { - return annotationType.getAnnotation(ScopeAnnotation.class) != null; - } - - /** - * Adds an error if there is a misplaced annotations on {@code type}. Scoping - * annotations are not allowed on abstract classes or interfaces. - */ - public static void checkForMisplacedScopeAnnotations(Class type, Object source, Errors errors) { - if (Classes.isConcrete(type)) { - return; - } - - Class scopeAnnotation = findScopeAnnotation(errors, type); - if (scopeAnnotation != null) { - errors.withSource(type).scopeAnnotationOnAbstractType(scopeAnnotation, type, source); - } - } - /** * Gets a key for the given type, member and annotations. */ diff --git a/server/src/main/java/org/elasticsearch/common/inject/internal/BindingImpl.java b/server/src/main/java/org/elasticsearch/common/inject/internal/BindingImpl.java index d6a00a7d3dc6c..b68600ef5381f 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/internal/BindingImpl.java +++ b/server/src/main/java/org/elasticsearch/common/inject/internal/BindingImpl.java @@ -95,7 +95,7 @@ public V acceptVisitor(ElementVisitor visitor) { return visitor.visit(this); } - protected BindingImpl withScoping(Scoping scoping) { + protected BindingImpl withEagerSingletonScoping() { throw new AssertionError(); } diff --git a/server/src/main/java/org/elasticsearch/common/inject/internal/Errors.java b/server/src/main/java/org/elasticsearch/common/inject/internal/Errors.java index ea4b530f48b9b..cb78cf34e7d8a 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/internal/Errors.java +++ b/server/src/main/java/org/elasticsearch/common/inject/internal/Errors.java @@ -22,7 +22,6 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Key; import org.elasticsearch.common.inject.ProvisionException; -import org.elasticsearch.common.inject.Scope; import org.elasticsearch.common.inject.TypeLiteral; import org.elasticsearch.common.inject.spi.Dependency; import org.elasticsearch.common.inject.spi.InjectionPoint; @@ -196,14 +195,6 @@ public Errors bindingToProvider() { return addMessage("Binding to Provider is not allowed."); } - public Errors missingRuntimeRetention(Object source) { - return addMessage("Please annotate with @Retention(RUNTIME).%n" + " Bound at %s.", convert(source)); - } - - public Errors missingScopeAnnotation() { - return addMessage("Please annotate with @ScopeAnnotation."); - } - public Errors optionalConstructor(Constructor constructor) { return addMessage("%s is annotated @Inject(optional=true), " + "but constructors cannot be optional.", constructor); } @@ -212,19 +203,6 @@ public Errors cannotBindToGuiceType(String simpleName) { return addMessage("Binding to core guice framework type is not allowed: %s.", simpleName); } - public Errors scopeNotFound(Class scopeAnnotation) { - return addMessage("No scope is bound to %s.", scopeAnnotation); - } - - public Errors scopeAnnotationOnAbstractType(Class scopeAnnotation, Class type, Object source) { - return addMessage( - "%s is annotated with %s, but scope annotations are not supported " + "for abstract types.%n Bound at %s.", - type, - scopeAnnotation, - convert(source) - ); - } - public Errors misplacedBindingAnnotation(Member member, Annotation bindingAnnotation) { return addMessage( "%s is annotated with %s, but binding annotations should be applied " + "to its parameters instead.", @@ -245,10 +223,6 @@ public Errors tooManyConstructors(Class implementation) { return addMessage("%s has more than one constructor annotated with @Inject. " + CONSTRUCTOR_RULES, implementation); } - public Errors duplicateScopes(Scope existing, Class annotationType, Scope scope) { - return addMessage("Scope %s is already bound to %s. Cannot bind %s.", existing, annotationType, scope); - } - public Errors missingConstantValues() { return addMessage("Missing constant value. Please call to(...)."); } @@ -264,10 +238,6 @@ public Errors duplicateBindingAnnotations(Member member, Class a, Class b) { - return addMessage("More than one scope annotation was found: %s and %s.", a, b); - } - public Errors recursiveBinding() { return addMessage("Binding points to itself."); } diff --git a/server/src/main/java/org/elasticsearch/common/inject/internal/InstanceBindingImpl.java b/server/src/main/java/org/elasticsearch/common/inject/internal/InstanceBindingImpl.java index f5b36cf33b800..eae400dbbe052 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/internal/InstanceBindingImpl.java +++ b/server/src/main/java/org/elasticsearch/common/inject/internal/InstanceBindingImpl.java @@ -74,8 +74,8 @@ public Set getInjectionPoints() { } @Override - public BindingImpl withScoping(Scoping scoping) { - return new InstanceBindingImpl<>(getSource(), getKey(), scoping, injectionPoints, instance); + public BindingImpl withEagerSingletonScoping() { + return new InstanceBindingImpl<>(getSource(), getKey(), Scoping.EAGER_SINGLETON, injectionPoints, instance); } @Override diff --git a/server/src/main/java/org/elasticsearch/common/inject/internal/LinkedBindingImpl.java b/server/src/main/java/org/elasticsearch/common/inject/internal/LinkedBindingImpl.java index 135726f80e25b..79e2a4f34c7ec 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/internal/LinkedBindingImpl.java +++ b/server/src/main/java/org/elasticsearch/common/inject/internal/LinkedBindingImpl.java @@ -53,8 +53,8 @@ public Key getLinkedKey() { } @Override - public BindingImpl withScoping(Scoping scoping) { - return new LinkedBindingImpl<>(getSource(), getKey(), scoping, targetKey); + public BindingImpl withEagerSingletonScoping() { + return new LinkedBindingImpl<>(getSource(), getKey(), Scoping.EAGER_SINGLETON, targetKey); } @Override diff --git a/server/src/main/java/org/elasticsearch/common/inject/internal/LinkedProviderBindingImpl.java b/server/src/main/java/org/elasticsearch/common/inject/internal/LinkedProviderBindingImpl.java index 0bfd2ef273a74..7f9e36be4e49d 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/internal/LinkedProviderBindingImpl.java +++ b/server/src/main/java/org/elasticsearch/common/inject/internal/LinkedProviderBindingImpl.java @@ -54,8 +54,8 @@ public Key> getProviderKey() { } @Override - public BindingImpl withScoping(Scoping scoping) { - return new LinkedProviderBindingImpl<>(getSource(), getKey(), scoping, providerKey); + public BindingImpl withEagerSingletonScoping() { + return new LinkedProviderBindingImpl<>(getSource(), getKey(), Scoping.EAGER_SINGLETON, providerKey); } @Override diff --git a/server/src/main/java/org/elasticsearch/common/inject/internal/ProviderInstanceBindingImpl.java b/server/src/main/java/org/elasticsearch/common/inject/internal/ProviderInstanceBindingImpl.java index 792c18920a6fa..ae8823e2f7246 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/internal/ProviderInstanceBindingImpl.java +++ b/server/src/main/java/org/elasticsearch/common/inject/internal/ProviderInstanceBindingImpl.java @@ -72,8 +72,8 @@ public Set getInjectionPoints() { } @Override - public BindingImpl withScoping(Scoping scoping) { - return new ProviderInstanceBindingImpl<>(getSource(), getKey(), scoping, injectionPoints, providerInstance); + public BindingImpl withEagerSingletonScoping() { + return new ProviderInstanceBindingImpl<>(getSource(), getKey(), Scoping.EAGER_SINGLETON, injectionPoints, providerInstance); } @Override diff --git a/server/src/main/java/org/elasticsearch/common/inject/internal/Scoping.java b/server/src/main/java/org/elasticsearch/common/inject/internal/Scoping.java index 797996afbf499..d0544aff20e39 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/internal/Scoping.java +++ b/server/src/main/java/org/elasticsearch/common/inject/internal/Scoping.java @@ -18,10 +18,6 @@ import org.elasticsearch.common.inject.Scope; import org.elasticsearch.common.inject.Scopes; -import org.elasticsearch.common.inject.Singleton; -import org.elasticsearch.common.inject.Stage; - -import java.lang.annotation.Annotation; /** * References a scope, either directly (as a scope instance), or indirectly (as a scope annotation). @@ -49,34 +45,6 @@ public String toString() { }; - public static final Scoping SINGLETON_ANNOTATION = new Scoping() { - - @Override - public Class getScopeAnnotation() { - return Singleton.class; - } - - @Override - public String toString() { - return Singleton.class.getName(); - } - - }; - - public static final Scoping SINGLETON_INSTANCE = new Scoping() { - - @Override - public Scope getScopeInstance() { - return Scopes.SINGLETON; - } - - @Override - public String toString() { - return Scopes.SINGLETON.toString(); - } - - }; - public static final Scoping EAGER_SINGLETON = new Scoping() { @Override @@ -91,46 +59,6 @@ public String toString() { }; - public static Scoping forAnnotation(final Class scopingAnnotation) { - if (scopingAnnotation == Singleton.class) { - return SINGLETON_ANNOTATION; - } - - return new Scoping() { - - @Override - public Class getScopeAnnotation() { - return scopingAnnotation; - } - - @Override - public String toString() { - return scopingAnnotation.getName(); - } - - }; - } - - public static Scoping forInstance(final Scope scope) { - if (scope == Scopes.SINGLETON) { - return SINGLETON_INSTANCE; - } - - return new Scoping() { - - @Override - public Scope getScopeInstance() { - return scope; - } - - @Override - public String toString() { - return scope.toString(); - } - - }; - } - /** * Returns true if this scope was explicitly applied. If no scope was explicitly applied then the * scoping annotation will be used. @@ -150,16 +78,8 @@ public boolean isNoScope() { /** * Returns true if this scope is a singleton that should be loaded eagerly in {@code stage}. */ - public boolean isEagerSingleton(Stage stage) { - if (this == EAGER_SINGLETON) { - return true; - } - - if (stage == Stage.PRODUCTION) { - return this == SINGLETON_ANNOTATION || this == SINGLETON_INSTANCE; - } - - return false; + public boolean isEagerSingleton() { + return this == EAGER_SINGLETON; } /** @@ -169,12 +89,5 @@ public Scope getScopeInstance() { return null; } - /** - * Returns the scope annotation, or {@code null} if that isn't known for this instance. - */ - public Class getScopeAnnotation() { - return null; - } - private Scoping() {} } diff --git a/server/src/main/java/org/elasticsearch/common/inject/internal/UntargettedBindingImpl.java b/server/src/main/java/org/elasticsearch/common/inject/internal/UntargettedBindingImpl.java index c5595d570563f..d0ef8affa0bfb 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/internal/UntargettedBindingImpl.java +++ b/server/src/main/java/org/elasticsearch/common/inject/internal/UntargettedBindingImpl.java @@ -37,8 +37,8 @@ public void acceptTargetVisitor(BindingTargetVisitor visitor) } @Override - public BindingImpl withScoping(Scoping scoping) { - return new UntargettedBindingImpl<>(getSource(), getKey(), scoping); + public BindingImpl withEagerSingletonScoping() { + return new UntargettedBindingImpl<>(getSource(), getKey(), Scoping.EAGER_SINGLETON); } @Override diff --git a/server/src/main/java/org/elasticsearch/common/inject/multibindings/Element.java b/server/src/main/java/org/elasticsearch/common/inject/multibindings/Element.java index 596418f3e182d..6fca4135b596f 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/multibindings/Element.java +++ b/server/src/main/java/org/elasticsearch/common/inject/multibindings/Element.java @@ -32,7 +32,5 @@ @Retention(RUNTIME) @BindingAnnotation @interface Element { - String setName(); - int uniqueId(); } diff --git a/server/src/main/java/org/elasticsearch/common/inject/multibindings/MapBinder.java b/server/src/main/java/org/elasticsearch/common/inject/multibindings/MapBinder.java index f8646ecf5fa22..5ba9bb9cdead0 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/multibindings/MapBinder.java +++ b/server/src/main/java/org/elasticsearch/common/inject/multibindings/MapBinder.java @@ -211,7 +211,7 @@ public LinkedBindingBuilder addBinding(K key) { Multibinder.checkNotNull(key, "key"); Multibinder.checkConfiguration(isInitialized() == false, "MapBinder was already initialized"); - Key valueKey = Key.get(valueType, new RealElement(RealMultibinder.getSetName())); + Key valueKey = Key.get(valueType, new RealElement()); entrySetBinder.addBinding().toInstance(new MapEntry<>(key, binder.getProvider(valueKey))); return binder.bind(valueKey); } diff --git a/server/src/main/java/org/elasticsearch/common/inject/multibindings/Multibinder.java b/server/src/main/java/org/elasticsearch/common/inject/multibindings/Multibinder.java index 26609b4e2bdb9..dc44f68f2a3d0 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/multibindings/Multibinder.java +++ b/server/src/main/java/org/elasticsearch/common/inject/multibindings/Multibinder.java @@ -152,7 +152,7 @@ public void configure(Binder binder) { public LinkedBindingBuilder addBinding() { checkConfiguration(isInitialized() == false, "Multibinder was already initialized"); - return binder.bind(Key.get(elementType, new RealElement(""))); + return binder.bind(Key.get(elementType, new RealElement())); } /** @@ -165,9 +165,7 @@ public void initialize(Injector injector) { providers = new ArrayList<>(); for (Binding entry : injector.findBindingsByType(elementType)) { Key key = entry.getKey(); - if (key.getTypeLiteral().equals(elementType) - && key.getAnnotation() instanceof Element - && ((Element) key.getAnnotation()).setName().equals("")) { + if (key.getTypeLiteral().equals(elementType) && key.getAnnotation() instanceof Element) { providers.add(entry.getProvider()); } } @@ -192,10 +190,6 @@ public Set get() { return Collections.unmodifiableSet(result); } - static String getSetName() { - return ""; - } - Key> getSetKey() { return setKey; } diff --git a/server/src/main/java/org/elasticsearch/common/inject/multibindings/RealElement.java b/server/src/main/java/org/elasticsearch/common/inject/multibindings/RealElement.java index 050a9c2fff590..a1c403744a23d 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/multibindings/RealElement.java +++ b/server/src/main/java/org/elasticsearch/common/inject/multibindings/RealElement.java @@ -26,16 +26,9 @@ class RealElement implements Element { private static final AtomicInteger nextUniqueId = new AtomicInteger(1); private final int uniqueId; - private final String setName; - RealElement(String setName) { + RealElement() { uniqueId = nextUniqueId.getAndIncrement(); - this.setName = setName; - } - - @Override - public String setName() { - return setName; } @Override @@ -50,16 +43,16 @@ public Class annotationType() { @Override public String toString() { - return "@" + Element.class.getName() + "(setName=" + setName + ",uniqueId=" + uniqueId + ")"; + return "@" + Element.class.getName() + "(uniqueId=" + uniqueId + ")"; } @Override public boolean equals(Object o) { - return o instanceof Element && ((Element) o).setName().equals(setName()) && ((Element) o).uniqueId() == uniqueId(); + return ((Element) o).uniqueId() == uniqueId(); } @Override public int hashCode() { - return 127 * ("setName".hashCode() ^ setName.hashCode()) + 127 * ("uniqueId".hashCode() ^ uniqueId); + return Integer.hashCode(uniqueId); } } diff --git a/server/src/main/java/org/elasticsearch/common/inject/spi/ElementVisitor.java b/server/src/main/java/org/elasticsearch/common/inject/spi/ElementVisitor.java index dcfe9733e5e8b..094e869d8caa8 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/spi/ElementVisitor.java +++ b/server/src/main/java/org/elasticsearch/common/inject/spi/ElementVisitor.java @@ -33,11 +33,6 @@ public interface ElementVisitor { */ V visit(Binding binding); - /** - * Visit a registration of a scope annotation with the scope that implements it. - */ - V visit(ScopeBinding binding); - /** * Visit a lookup of the provider for a type. */ diff --git a/server/src/main/java/org/elasticsearch/common/inject/spi/Elements.java b/server/src/main/java/org/elasticsearch/common/inject/spi/Elements.java index 22f86d6991e84..0115abcfa3f03 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/spi/Elements.java +++ b/server/src/main/java/org/elasticsearch/common/inject/spi/Elements.java @@ -24,8 +24,6 @@ import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.inject.PrivateBinder; import org.elasticsearch.common.inject.Provider; -import org.elasticsearch.common.inject.Scope; -import org.elasticsearch.common.inject.Stage; import org.elasticsearch.common.inject.TypeLiteral; import org.elasticsearch.common.inject.binder.AnnotatedBindingBuilder; import org.elasticsearch.common.inject.internal.AbstractBindingBuilder; @@ -33,7 +31,6 @@ import org.elasticsearch.common.inject.internal.Errors; import org.elasticsearch.common.inject.internal.SourceProvider; -import java.lang.annotation.Annotation; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -54,14 +51,14 @@ public final class Elements { * Records the elements executed by {@code modules}. */ public static List getElements(Module... modules) { - return getElements(Stage.DEVELOPMENT, Arrays.asList(modules)); + return getElements(Arrays.asList(modules)); } /** * Records the elements executed by {@code modules}. */ - public static List getElements(Stage stage, Iterable modules) { - RecordingBinder binder = new RecordingBinder(stage); + public static List getElements(Iterable modules) { + RecordingBinder binder = new RecordingBinder(); for (Module module : modules) { binder.install(module); } @@ -69,7 +66,6 @@ public static List getElements(Stage stage, Iterable } private static class RecordingBinder implements Binder, PrivateBinder { - private final Stage stage; private final Set modules; private final List elements; private final Object source; @@ -80,8 +76,7 @@ private static class RecordingBinder implements Binder, PrivateBinder { */ private final RecordingBinder parent; - private RecordingBinder(Stage stage) { - this.stage = stage; + private RecordingBinder() { this.modules = new HashSet<>(); this.elements = new ArrayList<>(); this.source = null; @@ -103,7 +98,6 @@ private RecordingBinder(RecordingBinder prototype, Object source, SourceProvider throw new IllegalArgumentException(); } - this.stage = prototype.stage; this.modules = prototype.modules; this.elements = prototype.elements; this.source = source; @@ -111,11 +105,6 @@ private RecordingBinder(RecordingBinder prototype, Object source, SourceProvider this.parent = prototype.parent; } - @Override - public void bindScope(Class annotationType, Scope scope) { - elements.add(new ScopeBinding(getSource(), annotationType, scope)); - } - @Override public void install(Module module) { if (modules.add(module)) { diff --git a/server/src/main/java/org/elasticsearch/common/inject/spi/ScopeBinding.java b/server/src/main/java/org/elasticsearch/common/inject/spi/ScopeBinding.java deleted file mode 100644 index 958562a528a5b..0000000000000 --- a/server/src/main/java/org/elasticsearch/common/inject/spi/ScopeBinding.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright (C) 2008 Google Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.elasticsearch.common.inject.spi; - -import org.elasticsearch.common.inject.Scope; - -import java.lang.annotation.Annotation; -import java.util.Objects; - -/** - * Registration of a scope annotation with the scope that implements it. Instances are created - * explicitly in a module using {@link org.elasticsearch.common.inject.Binder#bindScope(Class, Scope) bindScope()} - * statements: - *

    - *     Scope recordScope = new RecordScope();
    - *     bindScope(RecordScoped.class, new RecordScope());
    - * - * @author jessewilson@google.com (Jesse Wilson) - * @since 2.0 - */ -public final class ScopeBinding implements Element { - private final Object source; - private final Class annotationType; - private final Scope scope; - - ScopeBinding(Object source, Class annotationType, Scope scope) { - this.source = Objects.requireNonNull(source, "source"); - this.annotationType = Objects.requireNonNull(annotationType, "annotationType"); - this.scope = Objects.requireNonNull(scope, "scope"); - } - - @Override - public Object getSource() { - return source; - } - - public Class getAnnotationType() { - return annotationType; - } - - public Scope getScope() { - return scope; - } - - @Override - public T acceptVisitor(ElementVisitor visitor) { - return visitor.visit(this); - } - -} From 8982513ffe26d149b9467b89db8808c1b4973736 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Mon, 1 Apr 2024 10:28:59 -0700 Subject: [PATCH 032/264] Adjust array resizing in block builder (#106934) I looked into an async profiler and found that AbstractBlockBuilder#updatePosition was consuming a significant amount of CPU. This is because we're growing the firstValueIndexes array one by one. While this minimizes wasted memory, it requires more CPU. I think we should use ArrayUtil.oversize() to resize this array. In contrast, it appears that we're growing the values array too quickly by 50% each time. I think we should use ArrayUtil.oversize() with a growth rate of 1/8 here as well. --- docs/changelog/106934.yaml | 5 +++ .../compute/data/BooleanArrayVector.java | 9 +++-- .../compute/data/BooleanBlockBuilder.java | 33 +++++++------------ .../compute/data/DoubleArrayVector.java | 9 +++-- .../compute/data/DoubleBlockBuilder.java | 33 +++++++------------ .../compute/data/IntArrayVector.java | 9 +++-- .../compute/data/IntBlockBuilder.java | 33 +++++++------------ .../compute/data/LongArrayVector.java | 9 +++-- .../compute/data/LongBlockBuilder.java | 33 +++++++------------ .../compute/data/AbstractArrayBlock.java | 4 +-- .../compute/data/AbstractBlockBuilder.java | 17 +++++----- .../compute/data/X-ArrayVector.java.st | 9 +++-- .../compute/data/X-BlockBuilder.java.st | 33 +++++++------------ 13 files changed, 110 insertions(+), 126 deletions(-) create mode 100644 docs/changelog/106934.yaml diff --git a/docs/changelog/106934.yaml b/docs/changelog/106934.yaml new file mode 100644 index 0000000000000..fbfce3118e8a6 --- /dev/null +++ b/docs/changelog/106934.yaml @@ -0,0 +1,5 @@ +pr: 106934 +summary: Adjust array resizing in block builder +area: ES|QL +type: enhancement +issues: [] diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java index 63f02b14d9481..3cebcd75cbe7a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java @@ -12,7 +12,8 @@ import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; -import java.util.Arrays; +import java.util.stream.Collectors; +import java.util.stream.IntStream; /** * Vector implementation that stores an array of boolean values. @@ -112,7 +113,11 @@ public int hashCode() { @Override public String toString() { - return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", values=" + Arrays.toString(values) + ']'; + String valuesString = IntStream.range(0, getPositionCount()) + .limit(10) + .mapToObj(n -> String.valueOf(values[n])) + .collect(Collectors.joining(", ", "[", getPositionCount() > 10 ? ", ...]" : "]")); + return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", values=" + valuesString + ']'; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java index 988106779a9d5..09c436e805d57 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java @@ -212,28 +212,19 @@ public BooleanBlock build() { BooleanBlock theBlock; if (hasNonNullValue && positionCount == 1 && valueCount == 1) { theBlock = blockFactory.newConstantBooleanBlockWith(values[0], 1, estimatedBytes); + } else if (estimatedBytes > blockFactory.maxPrimitiveArrayBytes()) { + theBlock = buildBigArraysBlock(); + } else if (isDense() && singleValued()) { + theBlock = blockFactory.newBooleanArrayVector(values, positionCount, estimatedBytes).asBlock(); } else { - if (estimatedBytes > blockFactory.maxPrimitiveArrayBytes()) { - theBlock = buildBigArraysBlock(); - } else { - if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { - adjustBreaker(valueCount * elementSize()); - values = Arrays.copyOf(values, valueCount); - adjustBreaker(-values.length * elementSize()); - } - if (isDense() && singleValued()) { - theBlock = blockFactory.newBooleanArrayVector(values, positionCount, estimatedBytes).asBlock(); - } else { - theBlock = blockFactory.newBooleanArrayBlock( - values, - positionCount, - firstValueIndexes, - nullsMask, - mvOrdering, - estimatedBytes - ); - } - } + theBlock = blockFactory.newBooleanArrayBlock( + values, // stylecheck + positionCount, + firstValueIndexes, + nullsMask, + mvOrdering, + estimatedBytes + ); } built(); return theBlock; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java index a7868beaf5db8..451b6cc7b655b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java @@ -12,7 +12,8 @@ import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; -import java.util.Arrays; +import java.util.stream.Collectors; +import java.util.stream.IntStream; /** * Vector implementation that stores an array of double values. @@ -111,7 +112,11 @@ public int hashCode() { @Override public String toString() { - return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", values=" + Arrays.toString(values) + ']'; + String valuesString = IntStream.range(0, getPositionCount()) + .limit(10) + .mapToObj(n -> String.valueOf(values[n])) + .collect(Collectors.joining(", ", "[", getPositionCount() > 10 ? ", ...]" : "]")); + return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", values=" + valuesString + ']'; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java index 9a157cdcef50e..427127784869a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java @@ -210,28 +210,19 @@ public DoubleBlock build() { DoubleBlock theBlock; if (hasNonNullValue && positionCount == 1 && valueCount == 1) { theBlock = blockFactory.newConstantDoubleBlockWith(values[0], 1, estimatedBytes); + } else if (estimatedBytes > blockFactory.maxPrimitiveArrayBytes()) { + theBlock = buildBigArraysBlock(); + } else if (isDense() && singleValued()) { + theBlock = blockFactory.newDoubleArrayVector(values, positionCount, estimatedBytes).asBlock(); } else { - if (estimatedBytes > blockFactory.maxPrimitiveArrayBytes()) { - theBlock = buildBigArraysBlock(); - } else { - if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { - adjustBreaker(valueCount * elementSize()); - values = Arrays.copyOf(values, valueCount); - adjustBreaker(-values.length * elementSize()); - } - if (isDense() && singleValued()) { - theBlock = blockFactory.newDoubleArrayVector(values, positionCount, estimatedBytes).asBlock(); - } else { - theBlock = blockFactory.newDoubleArrayBlock( - values, - positionCount, - firstValueIndexes, - nullsMask, - mvOrdering, - estimatedBytes - ); - } - } + theBlock = blockFactory.newDoubleArrayBlock( + values, // stylecheck + positionCount, + firstValueIndexes, + nullsMask, + mvOrdering, + estimatedBytes + ); } built(); return theBlock; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java index 644af9ae512a8..5273ab0546151 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java @@ -12,7 +12,8 @@ import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; -import java.util.Arrays; +import java.util.stream.Collectors; +import java.util.stream.IntStream; /** * Vector implementation that stores an array of int values. @@ -111,7 +112,11 @@ public int hashCode() { @Override public String toString() { - return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", values=" + Arrays.toString(values) + ']'; + String valuesString = IntStream.range(0, getPositionCount()) + .limit(10) + .mapToObj(n -> String.valueOf(values[n])) + .collect(Collectors.joining(", ", "[", getPositionCount() > 10 ? ", ...]" : "]")); + return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", values=" + valuesString + ']'; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java index d49f5af05a8a7..aaf46798fd789 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java @@ -210,28 +210,19 @@ public IntBlock build() { IntBlock theBlock; if (hasNonNullValue && positionCount == 1 && valueCount == 1) { theBlock = blockFactory.newConstantIntBlockWith(values[0], 1, estimatedBytes); + } else if (estimatedBytes > blockFactory.maxPrimitiveArrayBytes()) { + theBlock = buildBigArraysBlock(); + } else if (isDense() && singleValued()) { + theBlock = blockFactory.newIntArrayVector(values, positionCount, estimatedBytes).asBlock(); } else { - if (estimatedBytes > blockFactory.maxPrimitiveArrayBytes()) { - theBlock = buildBigArraysBlock(); - } else { - if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { - adjustBreaker(valueCount * elementSize()); - values = Arrays.copyOf(values, valueCount); - adjustBreaker(-values.length * elementSize()); - } - if (isDense() && singleValued()) { - theBlock = blockFactory.newIntArrayVector(values, positionCount, estimatedBytes).asBlock(); - } else { - theBlock = blockFactory.newIntArrayBlock( - values, - positionCount, - firstValueIndexes, - nullsMask, - mvOrdering, - estimatedBytes - ); - } - } + theBlock = blockFactory.newIntArrayBlock( + values, // stylecheck + positionCount, + firstValueIndexes, + nullsMask, + mvOrdering, + estimatedBytes + ); } built(); return theBlock; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java index b3cee58356d70..6eec82528c8da 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java @@ -12,7 +12,8 @@ import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; -import java.util.Arrays; +import java.util.stream.Collectors; +import java.util.stream.IntStream; /** * Vector implementation that stores an array of long values. @@ -111,7 +112,11 @@ public int hashCode() { @Override public String toString() { - return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", values=" + Arrays.toString(values) + ']'; + String valuesString = IntStream.range(0, getPositionCount()) + .limit(10) + .mapToObj(n -> String.valueOf(values[n])) + .collect(Collectors.joining(", ", "[", getPositionCount() > 10 ? ", ...]" : "]")); + return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", values=" + valuesString + ']'; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java index b74831599276b..5d8daf306809d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java @@ -210,28 +210,19 @@ public LongBlock build() { LongBlock theBlock; if (hasNonNullValue && positionCount == 1 && valueCount == 1) { theBlock = blockFactory.newConstantLongBlockWith(values[0], 1, estimatedBytes); + } else if (estimatedBytes > blockFactory.maxPrimitiveArrayBytes()) { + theBlock = buildBigArraysBlock(); + } else if (isDense() && singleValued()) { + theBlock = blockFactory.newLongArrayVector(values, positionCount, estimatedBytes).asBlock(); } else { - if (estimatedBytes > blockFactory.maxPrimitiveArrayBytes()) { - theBlock = buildBigArraysBlock(); - } else { - if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { - adjustBreaker(valueCount * elementSize()); - values = Arrays.copyOf(values, valueCount); - adjustBreaker(-values.length * elementSize()); - } - if (isDense() && singleValued()) { - theBlock = blockFactory.newLongArrayVector(values, positionCount, estimatedBytes).asBlock(); - } else { - theBlock = blockFactory.newLongArrayBlock( - values, - positionCount, - firstValueIndexes, - nullsMask, - mvOrdering, - estimatedBytes - ); - } - } + theBlock = blockFactory.newLongArrayBlock( + values, // stylecheck + positionCount, + firstValueIndexes, + nullsMask, + mvOrdering, + estimatedBytes + ); } built(); return theBlock; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractArrayBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractArrayBlock.java index 81098cba393bb..f163c630e259c 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractArrayBlock.java @@ -61,9 +61,9 @@ protected final BitSet shiftNullsToExpandedPositions() { private boolean assertInvariants() { if (firstValueIndexes != null) { - assert firstValueIndexes.length == getPositionCount() + 1; + assert firstValueIndexes.length >= getPositionCount() + 1 : firstValueIndexes.length + " < " + positionCount; for (int i = 0; i < getPositionCount(); i++) { - assert (firstValueIndexes[i + 1] - firstValueIndexes[i]) >= 0; + assert firstValueIndexes[i + 1] >= firstValueIndexes[i] : firstValueIndexes[i + 1] + " < " + firstValueIndexes[i]; } } if (nullsMask != null) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlockBuilder.java index 24303ff0ea0a4..abf3a243b7682 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlockBuilder.java @@ -7,7 +7,8 @@ package org.elasticsearch.compute.data; -import java.util.Arrays; +import org.apache.lucene.util.ArrayUtil; + import java.util.BitSet; import java.util.stream.IntStream; @@ -139,7 +140,7 @@ protected final void ensureCapacity() { if (valueCount < valuesLength) { return; } - int newSize = calculateNewArraySize(valuesLength); + int newSize = ArrayUtil.oversize(valueCount, elementSize()); adjustBreaker(newSize * elementSize()); growValuesArray(newSize); adjustBreaker(-valuesLength * elementSize()); @@ -159,11 +160,6 @@ public final void close() { */ protected void extraClose() {} - static int calculateNewArraySize(int currentSize) { - // trivially, grows array by 50% - return currentSize + (currentSize >> 1); - } - protected void adjustBreaker(long deltaBytes) { blockFactory.adjustBreaker(deltaBytes); estimatedBytes += deltaBytes; @@ -173,8 +169,11 @@ protected void adjustBreaker(long deltaBytes) { private void setFirstValue(int position, int value) { if (position >= firstValueIndexes.length) { final int currentSize = firstValueIndexes.length; - adjustBreaker((long) (position + 1 - currentSize) * Integer.BYTES); - firstValueIndexes = Arrays.copyOf(firstValueIndexes, position + 1); + // We grow the `firstValueIndexes` at the same rate as the `values` array, but independently. + final int newLength = ArrayUtil.oversize(position + 1, Integer.BYTES); + adjustBreaker((long) newLength * Integer.BYTES); + firstValueIndexes = ArrayUtil.growExact(firstValueIndexes, newLength); + adjustBreaker(-(long) currentSize * Integer.BYTES); } firstValueIndexes[position] = value; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st index b5ecb2cad4a56..9615ce83215e8 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st @@ -23,7 +23,8 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; -import java.util.Arrays; +import java.util.stream.Collectors; +import java.util.stream.IntStream; $endif$ /** @@ -173,7 +174,11 @@ $endif$ $if(BytesRef)$ return getClass().getSimpleName() + "[positions=" + getPositionCount() + ']'; $else$ - return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", values=" + Arrays.toString(values) + ']'; + String valuesString = IntStream.range(0, getPositionCount()) + .limit(10) + .mapToObj(n -> String.valueOf(values[n])) + .collect(Collectors.joining(", ", "[", getPositionCount() > 10 ? ", ...]" : "]")); + return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", values=" + valuesString + ']'; $endif$ } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st index f4ee6c145f3ed..5b432f1c62968 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st @@ -327,28 +327,19 @@ $endif$ $else$ if (hasNonNullValue && positionCount == 1 && valueCount == 1) { theBlock = blockFactory.newConstant$Type$BlockWith(values[0], 1, estimatedBytes); + } else if (estimatedBytes > blockFactory.maxPrimitiveArrayBytes()) { + theBlock = buildBigArraysBlock(); + } else if (isDense() && singleValued()) { + theBlock = blockFactory.new$Type$ArrayVector(values, positionCount, estimatedBytes).asBlock(); } else { - if (estimatedBytes > blockFactory.maxPrimitiveArrayBytes()) { - theBlock = buildBigArraysBlock(); - } else { - if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { - adjustBreaker(valueCount * elementSize()); - values = Arrays.copyOf(values, valueCount); - adjustBreaker(-values.length * elementSize()); - } - if (isDense() && singleValued()) { - theBlock = blockFactory.new$Type$ArrayVector(values, positionCount, estimatedBytes).asBlock(); - } else { - theBlock = blockFactory.new$Type$ArrayBlock( - values, - positionCount, - firstValueIndexes, - nullsMask, - mvOrdering, - estimatedBytes - ); - } - } + theBlock = blockFactory.new$Type$ArrayBlock( + values, // stylecheck + positionCount, + firstValueIndexes, + nullsMask, + mvOrdering, + estimatedBytes + ); } $endif$ built(); From 3810a9164eecac3478675251ef601156a3bc6ed1 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Mon, 1 Apr 2024 12:49:29 -0700 Subject: [PATCH 033/264] Block readiness on file settings being applied (#106437) The readiness service is supposed to wait on the cluster being formed as well as file settings being applied. Yet file settings application is only checked on the master node. Since master election itself does not guarantee file settings have been applied, non-master nodes may mark themselves as ready even though they should not yet accept requests. This commit reworks how the readiness service looks ofr file settings being applied. Previously it only worked on the master node where it got a callback directly from the file settings service. With this change we now only look at cluster state. Note that this means the readiness service and file based settings are tightly coupled. In practice this was always the case as they are both meant to be used in cloud environments, but the coupling is a bit tighter now since the readiness service will never report ready unless file based settings exist. --- .../readiness/ReadinessClusterIT.java | 66 ++++++++++++++----- .../file/AbstractFileWatchingService.java | 3 + .../java/org/elasticsearch/node/Node.java | 4 -- .../readiness/ReadinessService.java | 33 ++++------ .../service/FileSettingsService.java | 9 +++ .../service/ReservedClusterStateService.java | 21 ++++++ .../service/ReservedStateUpdateTask.java | 8 ++- .../AbstractFileWatchingServiceTests.java | 7 ++ .../readiness/ReadinessServiceTests.java | 42 +++++++----- .../shutdown/NodeShutdownReadinessIT.java | 28 ++++++++ 10 files changed, 165 insertions(+), 56 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/readiness/ReadinessClusterIT.java b/server/src/internalClusterTest/java/org/elasticsearch/readiness/ReadinessClusterIT.java index 3c06a4c084e04..3d200e0fa195a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/readiness/ReadinessClusterIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/readiness/ReadinessClusterIT.java @@ -14,7 +14,6 @@ import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.metadata.ReservedStateErrorMetadata; -import org.elasticsearch.cluster.metadata.ReservedStateHandlerMetadata; import org.elasticsearch.cluster.metadata.ReservedStateMetadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; @@ -22,12 +21,13 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.discovery.MasterNotDiscoveredException; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.reservedstate.action.ReservedClusterSettingsAction; import org.elasticsearch.reservedstate.service.FileSettingsService; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.InternalTestCluster; +import org.junit.Before; +import java.io.IOException; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; @@ -83,6 +83,13 @@ public class ReadinessClusterIT extends ESIntegTestCase { } }"""; + Path configDir; + + @Before + public void setupMasterConfigDir() throws IOException { + configDir = createTempDir(); + } + @Override protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { Settings.Builder settings = Settings.builder() @@ -91,6 +98,11 @@ protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { return settings.build(); } + @Override + protected Path nodeConfigPath(int nodeOrdinal) { + return configDir; + } + @Override protected Collection> getMockPlugins() { final List> plugins = new ArrayList<>(super.getMockPlugins()); @@ -108,6 +120,7 @@ private void expectMasterNotFound() { public void testReadinessDuringRestarts() throws Exception { internalCluster().setBootstrapMasterNodeIndex(0); + writeFileSettings(testJSON); logger.info("--> start data node / non master node"); String dataNode = internalCluster().startNode(Settings.builder().put(dataOnlyNode()).put("discovery.initial_state_timeout", "1s")); @@ -143,6 +156,7 @@ public void testReadinessDuringRestarts() throws Exception { public void testReadinessDuringRestartsNormalOrder() throws Exception { internalCluster().setBootstrapMasterNodeIndex(0); + writeFileSettings(testJSON); logger.info("--> start master node"); String masterNode = internalCluster().startMasterOnlyNode(); internalCluster().validateClusterFormed(); @@ -222,16 +236,14 @@ public void clusterChanged(ClusterChangedEvent event) { return new Tuple<>(savedClusterState, metadataVersion); } - private void writeJSONFile(String node, String json) throws Exception { + private void writeFileSettings(String json) throws Exception { long version = versionCounter.incrementAndGet(); - - FileSettingsService fileSettingsService = internalCluster().getInstance(FileSettingsService.class, node); - - Files.createDirectories(fileSettingsService.watchedFileDir()); Path tempFilePath = createTempFile(); + Path fileSettings = configDir.resolve("operator").resolve("settings.json"); + Files.createDirectories(fileSettings.getParent()); Files.write(tempFilePath, Strings.format(json, version).getBytes(StandardCharsets.UTF_8)); - Files.move(tempFilePath, fileSettingsService.watchedFile(), StandardCopyOption.ATOMIC_MOVE); + Files.move(tempFilePath, fileSettings, StandardCopyOption.ATOMIC_MOVE); logger.info("--> New file settings: [{}]", Strings.format(json, version)); } @@ -244,7 +256,7 @@ public void testNotReadyOnBadFileSettings() throws Exception { assertFalse(dataFileSettingsService.watching()); logger.info("--> write bad file settings before we boot master node"); - writeJSONFile(dataNode, testErrorJSON); + writeFileSettings(testErrorJSON); logger.info("--> start master node"); final String masterNode = internalCluster().startMasterOnlyNode( @@ -269,6 +281,31 @@ public void testNotReadyOnBadFileSettings() throws Exception { assertNull(s.boundAddress()); } + public void testReadyWhenMissingFileSettings() throws Exception { + internalCluster().setBootstrapMasterNodeIndex(0); + internalCluster().startNode(Settings.builder().put(dataOnlyNode()).put("discovery.initial_state_timeout", "1s")); + + final String masterNode = internalCluster().startMasterOnlyNode( + Settings.builder().put(INITIAL_STATE_TIMEOUT_SETTING.getKey(), "0s").build() + ); + assertMasterNode(internalCluster().nonMasterClient(), masterNode); + var savedClusterState = setupClusterStateListener(masterNode); + + // we need this after we setup the listener above, in case the node started and processed + // settings before we set our listener to cluster state changes. + causeClusterStateUpdate(); + + FileSettingsService masterFileSettingsService = internalCluster().getInstance(FileSettingsService.class, masterNode); + + assertTrue(masterFileSettingsService.watching()); + + boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); + assertTrue(awaitSuccessful); + + ReadinessService s = internalCluster().getInstance(ReadinessService.class, masterNode); + assertNotNull(s.boundAddress()); + } + private Tuple setupClusterStateListener(String node) { ClusterService clusterService = internalCluster().clusterService(node); CountDownLatch savedClusterState = new CountDownLatch(1); @@ -278,12 +315,9 @@ private Tuple setupClusterStateListener(String node) public void clusterChanged(ClusterChangedEvent event) { ReservedStateMetadata reservedState = event.state().metadata().reservedStateMetadata().get(FileSettingsService.NAMESPACE); if (reservedState != null) { - ReservedStateHandlerMetadata handlerMetadata = reservedState.handlers().get(ReservedClusterSettingsAction.NAME); - if (handlerMetadata != null && handlerMetadata.keys().contains("indices.recovery.max_bytes_per_sec")) { - clusterService.removeListener(this); - metadataVersion.set(event.state().metadata().version()); - savedClusterState.countDown(); - } + clusterService.removeListener(this); + metadataVersion.set(event.state().metadata().version()); + savedClusterState.countDown(); } } }); @@ -308,7 +342,7 @@ public void testReadyAfterCorrectFileSettings() throws Exception { var savedClusterState = setupClusterStateListener(dataNode); logger.info("--> write correct file settings before we boot master node"); - writeJSONFile(dataNode, testJSON); + writeFileSettings(testJSON); logger.info("--> start master node"); final String masterNode = internalCluster().startMasterOnlyNode(); diff --git a/server/src/main/java/org/elasticsearch/common/file/AbstractFileWatchingService.java b/server/src/main/java/org/elasticsearch/common/file/AbstractFileWatchingService.java index 4279733ec403a..49cb06a55a4e9 100644 --- a/server/src/main/java/org/elasticsearch/common/file/AbstractFileWatchingService.java +++ b/server/src/main/java/org/elasticsearch/common/file/AbstractFileWatchingService.java @@ -74,6 +74,8 @@ public AbstractFileWatchingService(Path watchedFile) { */ protected abstract void processFileChanges() throws InterruptedException, ExecutionException, IOException; + protected abstract void processInitialFileMissing() throws InterruptedException, ExecutionException, IOException; + public final void addFileChangedListener(FileChangedListener listener) { eventListeners.add(listener); } @@ -173,6 +175,7 @@ protected final void watcherThread() { logger.debug("found initial operator settings file [{}], applying...", path); processSettingsAndNotifyListeners(); } else { + processInitialFileMissing(); // Notify everyone we don't have any initial file settings for (var listener : eventListeners) { listener.watchedFileChanged(); diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index 8ff2ac5e5fca0..801a2038fc06b 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -358,10 +358,6 @@ public Node start() throws NodeValidationException { final FileSettingsService fileSettingsService = injector.getInstance(FileSettingsService.class); fileSettingsService.start(); - // if we are using the readiness service, listen for the file settings being applied - if (ReadinessService.enabled(environment)) { - fileSettingsService.addFileChangedListener(injector.getInstance(ReadinessService.class)); - } clusterService.addStateApplier(transportService.getTaskManager()); // start after transport service so the local disco is known diff --git a/server/src/main/java/org/elasticsearch/readiness/ReadinessService.java b/server/src/main/java/org/elasticsearch/readiness/ReadinessService.java index 7f7a55762bf08..b8f874a8bfcb5 100644 --- a/server/src/main/java/org/elasticsearch/readiness/ReadinessService.java +++ b/server/src/main/java/org/elasticsearch/readiness/ReadinessService.java @@ -13,6 +13,7 @@ import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.cluster.metadata.ReservedStateMetadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.common.component.AbstractLifecycleComponent; @@ -21,7 +22,7 @@ import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.env.Environment; -import org.elasticsearch.reservedstate.service.FileChangedListener; +import org.elasticsearch.reservedstate.service.FileSettingsService; import org.elasticsearch.shutdown.PluginShutdownService; import org.elasticsearch.transport.BindTransportException; @@ -38,7 +39,7 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicReference; -public class ReadinessService extends AbstractLifecycleComponent implements ClusterStateListener, FileChangedListener { +public class ReadinessService extends AbstractLifecycleComponent implements ClusterStateListener { private static final Logger logger = LogManager.getLogger(ReadinessService.class); private final Environment environment; @@ -50,9 +51,6 @@ public class ReadinessService extends AbstractLifecycleComponent implements Clus volatile CountDownLatch listenerThreadLatch = new CountDownLatch(0); final AtomicReference boundSocket = new AtomicReference<>(); private final Collection boundAddressListeners = new CopyOnWriteArrayList<>(); - private volatile boolean fileSettingsApplied = false; - private volatile boolean masterElected = false; - private volatile boolean shuttingDown = false; public static final Setting PORT = Setting.intSetting("readiness.port", -1, Setting.Property.NodeScope); @@ -237,9 +235,7 @@ protected void doClose() {} public void clusterChanged(ClusterChangedEvent event) { ClusterState clusterState = event.state(); Set shutdownNodeIds = PluginShutdownService.shutdownNodes(clusterState); - - this.masterElected = clusterState.nodes().getMasterNodeId() != null; - this.shuttingDown = shutdownNodeIds.contains(clusterState.nodes().getLocalNodeId()); + boolean shuttingDown = shutdownNodeIds.contains(clusterState.nodes().getLocalNodeId()); if (shuttingDown) { // only disable the probe and log if the probe is running @@ -248,14 +244,19 @@ public void clusterChanged(ClusterChangedEvent event) { logger.info("marking node as not ready because it's shutting down"); } } else { - if (clusterState.nodes().getLocalNodeId().equals(clusterState.nodes().getMasterNodeId())) { - setReady(fileSettingsApplied); - } else { - setReady(masterElected); - } + boolean masterElected = clusterState.nodes().getMasterNodeId() != null; + boolean fileSettingsApplied = areFileSettingsApplied(clusterState); + logger.info("readiness: masterElected={}, fileSettingsApplied={}", masterElected, fileSettingsApplied); + setReady(masterElected && fileSettingsApplied); } } + // protected to allow mock service to override + protected boolean areFileSettingsApplied(ClusterState clusterState) { + ReservedStateMetadata fileSettingsMetadata = clusterState.metadata().reservedStateMetadata().get(FileSettingsService.NAMESPACE); + return fileSettingsMetadata != null && fileSettingsMetadata.errorMetadata() == null; + } + private void setReady(boolean ready) { if (ready) { startListener(); @@ -277,12 +278,6 @@ public synchronized void addBoundAddressListener(BoundAddressListener listener) boundAddressListeners.add(listener); } - @Override - public void watchedFileChanged() { - fileSettingsApplied = true; - setReady(masterElected && (shuttingDown == false)); - } - /** * A listener to be notified when the readiness service establishes the port it's listening on. * The {@link #addressBound(BoundTransportAddress)} method is called after the readiness service socket diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java b/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java index 56c975e148ab5..8719c8cbf8730 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; @@ -125,6 +126,14 @@ protected void processFileChanges() throws ExecutionException, InterruptedExcept completion.get(); } + @Override + protected void processInitialFileMissing() throws ExecutionException, InterruptedException, IOException { + PlainActionFuture completion = new PlainActionFuture<>(); + logger.info("setting file [{}] not found, initializing [{}] as empty", watchedFile(), NAMESPACE); + stateService.initEmpty(NAMESPACE, completion); + completion.get(); + } + private static void completeProcessing(Exception e, PlainActionFuture completion) { if (e != null) { completion.onFailure(e); diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedClusterStateService.java b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedClusterStateService.java index 76c2007dc8d8e..d2aea19417787 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedClusterStateService.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedClusterStateService.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.RefCountingListener; @@ -146,6 +147,26 @@ public void process(String namespace, XContentParser parser, Consumer process(namespace, stateChunk, errorListener); } + public void initEmpty(String namespace, ActionListener listener) { + var missingVersion = new ReservedStateVersion(-1L, Version.CURRENT); + var emptyState = new ReservedStateChunk(Map.of(), missingVersion); + updateTaskQueue.submitTask( + "empty initial cluster state [" + namespace + "]", + new ReservedStateUpdateTask( + namespace, + emptyState, + List.of(), + Map.of(), + List.of(), + // error state should not be possible since there is no metadata being parsed or processed + errorState -> { throw new AssertionError(); }, + listener + ), + null + ); + + } + /** * Saves and reserves a chunk of the cluster state under a given 'namespace' from {@link XContentParser} * diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java index 08f576f4a37e0..2ee9aa0d86a0e 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java @@ -169,14 +169,18 @@ static boolean checkMetadataVersion( return false; } + // Version -1 is special, it means "empty" + if (reservedStateVersion.version() == -1L) { + return true; + } + // Version 0 is special, snapshot restores will reset to 0. if (reservedStateVersion.version() <= 0L) { logger.warn( () -> format( "Not updating reserved cluster state for namespace [%s], because version [%s] is less or equal to 0", namespace, - reservedStateVersion.version(), - existingMetadata.version() + reservedStateVersion.version() ) ); return false; diff --git a/server/src/test/java/org/elasticsearch/common/file/AbstractFileWatchingServiceTests.java b/server/src/test/java/org/elasticsearch/common/file/AbstractFileWatchingServiceTests.java index e12312844e571..d3924bd10d240 100644 --- a/server/src/test/java/org/elasticsearch/common/file/AbstractFileWatchingServiceTests.java +++ b/server/src/test/java/org/elasticsearch/common/file/AbstractFileWatchingServiceTests.java @@ -73,6 +73,13 @@ protected void processFileChanges() throws InterruptedException, ExecutionExcept countDownLatch.countDown(); } } + + @Override + protected void processInitialFileMissing() { + if (countDownLatch != null) { + countDownLatch.countDown(); + } + } } private AbstractFileWatchingService fileWatchingService; diff --git a/server/src/test/java/org/elasticsearch/readiness/ReadinessServiceTests.java b/server/src/test/java/org/elasticsearch/readiness/ReadinessServiceTests.java index e794752aff15e..68c2230fe138f 100644 --- a/server/src/test/java/org/elasticsearch/readiness/ReadinessServiceTests.java +++ b/server/src/test/java/org/elasticsearch/readiness/ReadinessServiceTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.metadata.NodesShutdownMetadata; +import org.elasticsearch.cluster.metadata.ReservedStateMetadata; import org.elasticsearch.cluster.metadata.SingleNodeShutdownMetadata; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeRole; @@ -29,6 +30,7 @@ import org.elasticsearch.http.HttpInfo; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.http.HttpStats; +import org.elasticsearch.reservedstate.service.FileSettingsService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.MockLogAppender; import org.elasticsearch.test.readiness.ReadinessClientProbe; @@ -51,6 +53,12 @@ public class ReadinessServiceTests extends ESTestCase implements ReadinessClient private Environment env; private FakeHttpTransport httpTransport; + private static Metadata emptyReservedStateMetadata; + static { + var fileSettingsState = new ReservedStateMetadata.Builder(FileSettingsService.NAMESPACE).version(-1L); + emptyReservedStateMetadata = new Metadata.Builder().put(fileSettingsState.build()).build(); + } + static class FakeHttpTransport extends AbstractLifecycleComponent implements HttpServerTransport { final DiscoveryNode node; @@ -192,44 +200,49 @@ public void testStatusChange() throws Exception { // initially the service isn't ready assertFalse(readinessService.ready()); - ClusterState previousState = ClusterState.builder(new ClusterName("cluster")) + ClusterState emptyState = ClusterState.builder(new ClusterName("cluster")) .nodes( DiscoveryNodes.builder().add(DiscoveryNodeUtils.create("node2", new TransportAddress(TransportAddress.META_ADDRESS, 9201))) ) .build(); - ClusterState newState = ClusterState.builder(previousState) + ClusterState noFileSettingsState = ClusterState.builder(emptyState) .nodes( - DiscoveryNodes.builder(previousState.nodes()) + DiscoveryNodes.builder(emptyState.nodes()) .add(httpTransport.node) .masterNodeId(httpTransport.node.getId()) .localNodeId(httpTransport.node.getId()) ) .build(); - ClusterChangedEvent event = new ClusterChangedEvent("test", newState, previousState); + ClusterChangedEvent event = new ClusterChangedEvent("test", noFileSettingsState, emptyState); readinessService.clusterChanged(event); - readinessService.watchedFileChanged(); - // sending a cluster state with active master should bring up the service - assertTrue(readinessService.ready()); + // sending a cluster state with active master should not yet bring up the service, file settings still are not applied + assertFalse(readinessService.ready()); + + ClusterState completeState = ClusterState.builder(noFileSettingsState).metadata(emptyReservedStateMetadata).build(); + event = new ClusterChangedEvent("test", completeState, noFileSettingsState); + readinessService.clusterChanged(event); - previousState = newState; + // sending a cluster state with active master and file settings applied should bring up the service + assertTrue(readinessService.ready()); tcpReadinessProbeTrue(readinessService); - ClusterState noMasterState = ClusterState.builder(previousState).nodes(previousState.nodes().withMasterNodeId(null)).build(); - event = new ClusterChangedEvent("test", noMasterState, previousState); + ClusterState noMasterState = ClusterState.builder(completeState).nodes(completeState.nodes().withMasterNodeId(null)).build(); + event = new ClusterChangedEvent("test", noMasterState, completeState); readinessService.clusterChanged(event); assertFalse(readinessService.ready()); tcpReadinessProbeFalse(readinessService); - event = new ClusterChangedEvent("test", previousState, noMasterState); + event = new ClusterChangedEvent("test", completeState, noMasterState); readinessService.clusterChanged(event); assertTrue(readinessService.ready()); tcpReadinessProbeTrue(readinessService); - newState = ClusterState.builder(previousState) + // shutting down flips back to not ready + ClusterState nodeShuttingDownState = ClusterState.builder(completeState) .metadata( - Metadata.builder(previousState.metadata()) + Metadata.builder(completeState.metadata()) .putCustom( NodesShutdownMetadata.TYPE, new NodesShutdownMetadata( @@ -247,8 +260,7 @@ public void testStatusChange() throws Exception { .build() ) .build(); - - event = new ClusterChangedEvent("test", newState, previousState); + event = new ClusterChangedEvent("test", nodeShuttingDownState, completeState); var mockAppender = new MockLogAppender(); try (var ignored = mockAppender.capturing(ReadinessService.class)) { mockAppender.addExpectation( diff --git a/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownReadinessIT.java b/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownReadinessIT.java index 87eaf4d37ae00..af0713665731c 100644 --- a/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownReadinessIT.java +++ b/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownReadinessIT.java @@ -17,7 +17,11 @@ import org.elasticsearch.readiness.MockReadinessService; import org.elasticsearch.readiness.ReadinessService; import org.elasticsearch.test.ESIntegTestCase; +import org.junit.Before; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -32,6 +36,30 @@ @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0) public class NodeShutdownReadinessIT extends ESIntegTestCase { + Path configDir; + + @Before + public void setupMasterConfigDir() throws IOException { + configDir = createTempDir(); + Path settingsFile = configDir.resolve("operator").resolve("settings.json"); + Files.createDirectories(settingsFile.getParent()); + Files.writeString(settingsFile, """ + { + "metadata": { + "version": "1", + "compatibility": "8.4.0" + }, + "state": { + "cluster_settings": {} + } + }"""); + } + + @Override + protected Path nodeConfigPath(int nodeOrdinal) { + return configDir; + } + @Override protected Collection> getMockPlugins() { final List> plugins = new ArrayList<>(super.getMockPlugins()); From a3794e7584daa4a62b6691964fded7d2d080f3a0 Mon Sep 17 00:00:00 2001 From: shainaraskas <58563081+shainaraskas@users.noreply.github.com> Date: Mon, 1 Apr 2024 16:12:48 -0400 Subject: [PATCH 034/264] [DOCS] Remove orphaned cluster issues troubleshooing doc (#106959) --- .../how-to/fix-common-cluster-issues.asciidoc | 747 ------------------ 1 file changed, 747 deletions(-) delete mode 100644 docs/reference/how-to/fix-common-cluster-issues.asciidoc diff --git a/docs/reference/how-to/fix-common-cluster-issues.asciidoc b/docs/reference/how-to/fix-common-cluster-issues.asciidoc deleted file mode 100644 index 531ae44cc3be2..0000000000000 --- a/docs/reference/how-to/fix-common-cluster-issues.asciidoc +++ /dev/null @@ -1,747 +0,0 @@ -[[fix-common-cluster-issues]] -== Fix common cluster issues - -This guide describes how to fix common errors and problems with {es} clusters. - -[discrete] -=== Error: disk usage exceeded flood-stage watermark, index has read-only-allow-delete block - -This error indicates a data node is critically low on disk space and has reached -the <>. To prevent -a full disk, when a node reaches this watermark, {es} blocks writes to any index -with a shard on the node. If the block affects related system indices, {kib} and -other {stack} features may become unavailable. - -{es} will automatically remove the write block when the affected node's disk -usage goes below the <>. To -achieve this, {es} automatically moves some of the affected node's shards to -other nodes in the same data tier. - -To verify that shards are moving off the affected node, use the <>. - -[source,console] ----- -GET _cat/shards?v=true ----- - -If shards remain on the node, use the <> to get an explanation for their allocation status. - -[source,console] ----- -GET _cluster/allocation/explain -{ - "index": "my-index", - "shard": 0, - "primary": false, - "current_node": "my-node" -} ----- -// TEST[s/^/PUT my-index\n/] -// TEST[s/"primary": false,/"primary": false/] -// TEST[s/"current_node": "my-node"//] - -To immediately restore write operations, you can temporarily increase the disk -watermarks and remove the write block. - -[source,console] ----- -PUT _cluster/settings -{ - "persistent": { - "cluster.routing.allocation.disk.watermark.low": "90%", - "cluster.routing.allocation.disk.watermark.low.max_headroom": "100GB", - "cluster.routing.allocation.disk.watermark.high": "95%", - "cluster.routing.allocation.disk.watermark.high.max_headroom": "20GB", - "cluster.routing.allocation.disk.watermark.flood_stage": "97%", - "cluster.routing.allocation.disk.watermark.flood_stage.max_headroom": "5GB", - "cluster.routing.allocation.disk.watermark.flood_stage.frozen": "97%", - "cluster.routing.allocation.disk.watermark.flood_stage.frozen.max_headroom": "5GB" - } -} - -PUT */_settings?expand_wildcards=all -{ - "index.blocks.read_only_allow_delete": null -} ----- -// TEST[s/^/PUT my-index\n/] - -As a long-term solution, we recommend you add nodes to the affected data tiers -or upgrade existing nodes to increase disk space. To free up additional disk -space, you can delete unneeded indices using the <>. - -[source,console] ----- -DELETE my-index ----- -// TEST[s/^/PUT my-index\n/] - -When a long-term solution is in place, reset or reconfigure the disk watermarks. - -[source,console] ----- -PUT _cluster/settings -{ - "persistent": { - "cluster.routing.allocation.disk.watermark.low": null, - "cluster.routing.allocation.disk.watermark.low.max_headroom": null, - "cluster.routing.allocation.disk.watermark.high": null, - "cluster.routing.allocation.disk.watermark.high.max_headroom": null, - "cluster.routing.allocation.disk.watermark.flood_stage": null, - "cluster.routing.allocation.disk.watermark.flood_stage.max_headroom": null, - "cluster.routing.allocation.disk.watermark.flood_stage.frozen": null, - "cluster.routing.allocation.disk.watermark.flood_stage.frozen.max_headroom": null - } -} ----- - -[discrete] -[[circuit-breaker-errors]] -=== Circuit breaker errors - -{es} uses <> to prevent nodes from running out -of JVM heap memory. If Elasticsearch estimates an operation would exceed a -circuit breaker, it stops the operation and returns an error. - -By default, the <> triggers at -95% JVM memory usage. To prevent errors, we recommend taking steps to reduce -memory pressure if usage consistently exceeds 85%. - -[discrete] -[[diagnose-circuit-breaker-errors]] -==== Diagnose circuit breaker errors - -**Error messages** - -If a request triggers a circuit breaker, {es} returns an error with a `429` HTTP -status code. - -[source,js] ----- -{ - 'error': { - 'type': 'circuit_breaking_exception', - 'reason': '[parent] Data too large, data for [] would be [123848638/118.1mb], which is larger than the limit of [123273216/117.5mb], real usage: [120182112/114.6mb], new bytes reserved: [3666526/3.4mb]', - 'bytes_wanted': 123848638, - 'bytes_limit': 123273216, - 'durability': 'TRANSIENT' - }, - 'status': 429 -} ----- -// NOTCONSOLE - -{es} also writes circuit breaker errors to <>. This -is helpful when automated processes, such as allocation, trigger a circuit -breaker. - -[source,txt] ----- -Caused by: org.elasticsearch.common.breaker.CircuitBreakingException: [parent] Data too large, data for [] would be [num/numGB], which is larger than the limit of [num/numGB], usages [request=0/0b, fielddata=num/numKB, in_flight_requests=num/numGB, accounting=num/numGB] ----- - -**Check JVM memory usage** - -If you've enabled Stack Monitoring, you can view JVM memory usage in {kib}. In -the main menu, click **Stack Monitoring**. On the Stack Monitoring **Overview** -page, click **Nodes**. The **JVM Heap** column lists the current memory usage -for each node. - -You can also use the <> to get the current -`heap.percent` for each node. - -[source,console] ----- -GET _cat/nodes?v=true&h=name,node*,heap* ----- - -See <> for more details. - -To get the JVM memory usage for each circuit breaker, use the -<>. - -[source,console] ----- -GET _nodes/stats/breaker ----- - -[discrete] -[[prevent-circuit-breaker-errors]] -==== Prevent circuit breaker errors - -**Reduce JVM memory pressure** - -High JVM memory pressure often causes circuit breaker errors. See -<>. - -**Avoid using fielddata on `text` fields** - -For high-cardinality `text` fields, fielddata can use a large amount of JVM -memory. To avoid this, {es} disables fielddata on `text` fields by default. If -you've enabled fielddata and triggered the <>, consider disabling it and using a `keyword` field instead. -See <>. - -**Clear the fieldata cache** - -If you've triggered the fielddata circuit breaker and can't disable fielddata, -use the <> to clear the fielddata cache. -This may disrupt any in-flight searches that use fielddata. - -[source,console] ----- -POST _cache/clear?fielddata=true ----- -// TEST[s/^/PUT my-index\n/] - -[discrete] -[[high-cpu-usage]] -=== High CPU usage - -{es} uses <> to manage CPU resources for -concurrent operations. High CPU usage typically means one or more thread pools -are running low. - -If a thread pool is depleted, {es} will <> -related to the thread pool. For example, if the `search` thread pool is -depleted, {es} will reject search requests until more threads are available. - -[discrete] -[[diagnose-high-cpu-usage]] -==== Diagnose high CPU usage - -**Check CPU usage** - -include::{es-repo-dir}/tab-widgets/cpu-usage-widget.asciidoc[] - -**Check hot threads** - -If a node has high CPU usage, use the <> to check for resource-intensive threads running on the node. - -[source,console] ----- -GET _nodes/my-node,my-other-node/hot_threads ----- -// TEST[s/\/my-node,my-other-node//] - -This API returns a breakdown of any hot threads in plain text. - -[discrete] -[[reduce-cpu-usage]] -==== Reduce CPU usage - -The following tips outline the most common causes of high CPU usage and their -solutions. - -**Scale your cluster** - -Heavy indexing and search loads can deplete smaller thread pools. To better -handle heavy workloads, add more nodes to your cluster or upgrade your existing -nodes to increase capacity. - -**Spread out bulk requests** - -While more efficient than individual requests, large <> -or <> requests still require CPU resources. If -possible, submit smaller requests and allow more time between them. - -**Cancel long-running searches** - -Long-running searches can block threads in the `search` thread pool. To check -for these searches, use the <>. - -[source,console] ----- -GET _tasks?actions=*search&detailed ----- - -The response's `description` contains the search request and its queries. -`running_time_in_nanos` shows how long the search has been running. - -[source,console-result] ----- -{ - "nodes" : { - "oTUltX4IQMOUUVeiohTt8A" : { - "name" : "my-node", - "transport_address" : "127.0.0.1:9300", - "host" : "127.0.0.1", - "ip" : "127.0.0.1:9300", - "tasks" : { - "oTUltX4IQMOUUVeiohTt8A:464" : { - "node" : "oTUltX4IQMOUUVeiohTt8A", - "id" : 464, - "type" : "transport", - "action" : "indices:data/read/search", - "description" : "indices[my-index], search_type[QUERY_THEN_FETCH], source[{\"query\":...}]", - "start_time_in_millis" : 4081771730000, - "running_time_in_nanos" : 13991383, - "cancellable" : true - } - } - } - } -} ----- -// TESTRESPONSE[skip: no way to get tasks] - -To cancel a search and free up resources, use the API's `_cancel` endpoint. - -[source,console] ----- -POST _tasks/oTUltX4IQMOUUVeiohTt8A:464/_cancel ----- - -For additional tips on how to track and avoid resource-intensive searches, see -<>. - -[discrete] -[[high-jvm-memory-pressure]] -=== High JVM memory pressure - -High JVM memory usage can degrade cluster performance and trigger -<>. To prevent this, we recommend -taking steps to reduce memory pressure if a node's JVM memory usage consistently -exceeds 85%. - -[discrete] -[[diagnose-high-jvm-memory-pressure]] -==== Diagnose high JVM memory pressure - -**Check JVM memory pressure** - -include::{es-repo-dir}/tab-widgets/jvm-memory-pressure-widget.asciidoc[] - -**Check garbage collection logs** - -As memory usage increases, garbage collection becomes more frequent and takes -longer. You can track the frequency and length of garbage collection events in -<>. For example, the following event states {es} -spent more than 50% (21 seconds) of the last 40 seconds performing garbage -collection. - -[source,log] ----- -[timestamp_short_interval_from_last][INFO ][o.e.m.j.JvmGcMonitorService] [node_id] [gc][number] overhead, spent [21s] collecting in the last [40s] ----- - -[discrete] -[[reduce-jvm-memory-pressure]] -==== Reduce JVM memory pressure - -**Reduce your shard count** - -Every shard uses memory. In most cases, a small set of large shards uses fewer -resources than many small shards. For tips on reducing your shard count, see -<>. - -[[avoid-expensive-searches]] -**Avoid expensive searches** - -Expensive searches can use large amounts of memory. To better track expensive -searches on your cluster, enable <>. - -Expensive searches may have a large <>, -use aggregations with a large number of buckets, or include -<>. To prevent expensive -searches, consider the following setting changes: - -* Lower the `size` limit using the -<> index setting. - -* Decrease the maximum number of allowed aggregation buckets using the -<> cluster setting. - -* Disable expensive queries using the -<> cluster -setting. - -[source,console] ----- -PUT _settings -{ - "index.max_result_window": 5000 -} - -PUT _cluster/settings -{ - "persistent": { - "search.max_buckets": 20000, - "search.allow_expensive_queries": false - } -} ----- -// TEST[s/^/PUT my-index\n/] - -**Prevent mapping explosions** - -Defining too many fields or nesting fields too deeply can lead to -<> that use large amounts of memory. -To prevent mapping explosions, use the <> to limit the number of field mappings. - -**Spread out bulk requests** - -While more efficient than individual requests, large <> -or <> requests can still create high JVM -memory pressure. If possible, submit smaller requests and allow more time -between them. - -**Upgrade node memory** - -Heavy indexing and search loads can cause high JVM memory pressure. To better -handle heavy workloads, upgrade your nodes to increase their memory capacity. - -[discrete] -[[red-yellow-cluster-status]] -=== Red or yellow cluster status - -A red or yellow cluster status indicates one or more shards are missing or -unallocated. These unassigned shards increase your risk of data loss and can -degrade cluster performance. - -[discrete] -[[diagnose-cluster-status]] -==== Diagnose your cluster status - -**Check your cluster status** - -Use the <>. - -[source,console] ----- -GET _cluster/health?filter_path=status,*_shards ----- - -A healthy cluster has a green `status` and zero `unassigned_shards`. A yellow -status means only replicas are unassigned. A red status means one or -more primary shards are unassigned. - -**View unassigned shards** - -To view unassigned shards, use the <>. - -[source,console] ----- -GET _cat/shards?v=true&h=index,shard,prirep,state,node,unassigned.reason&s=state ----- - -Unassigned shards have a `state` of `UNASSIGNED`. The `prirep` value is `p` for -primary shards and `r` for replicas. - -To understand why an unassigned shard is not being assigned and what action -you must take to allow {es} to assign it, use the -<>. - -[source,console] ----- -GET _cluster/allocation/explain?filter_path=index,node_allocation_decisions.node_name,node_allocation_decisions.deciders.* -{ - "index": "my-index", - "shard": 0, - "primary": false -} ----- -// TEST[s/^/PUT my-index\n/] - -[discrete] -[[fix-red-yellow-cluster-status]] -==== Fix a red or yellow cluster status - -A shard can become unassigned for several reasons. The following tips outline the -most common causes and their solutions. - -**Re-enable shard allocation** - -You typically disable allocation during a <> or other -cluster maintenance. If you forgot to re-enable allocation afterward, {es} will -be unable to assign shards. To re-enable allocation, reset the -`cluster.routing.allocation.enable` cluster setting. - -[source,console] ----- -PUT _cluster/settings -{ - "persistent" : { - "cluster.routing.allocation.enable" : null - } -} ----- - -**Recover lost nodes** - -Shards often become unassigned when a data node leaves the cluster. This can -occur for several reasons, ranging from connectivity issues to hardware failure. -After you resolve the issue and recover the node, it will rejoin the cluster. -{es} will then automatically allocate any unassigned shards. - -To avoid wasting resources on temporary issues, {es} <> by one minute by default. If you've recovered a node and don’t want -to wait for the delay period, you can call the <> with no arguments to start the allocation process. The process runs -asynchronously in the background. - -[source,console] ----- -POST _cluster/reroute?metric=none ----- - -**Fix allocation settings** - -Misconfigured allocation settings can result in an unassigned primary shard. -These settings include: - -* <> index settings -* <> cluster settings -* <> cluster settings - -To review your allocation settings, use the <> and <> APIs. - -[source,console] ----- -GET my-index/_settings?flat_settings=true&include_defaults=true - -GET _cluster/settings?flat_settings=true&include_defaults=true ----- -// TEST[s/^/PUT my-index\n/] - -You can change the settings using the <> and <> APIs. - -**Allocate or reduce replicas** - -To protect against hardware failure, {es} will not assign a replica to the same -node as its primary shard. If no other data nodes are available to host the -replica, it remains unassigned. To fix this, you can: - -* Add a data node to the same tier to host the replica. - -* Change the `index.number_of_replicas` index setting to reduce the number of -replicas for each primary shard. We recommend keeping at least one replica per -primary. - -[source,console] ----- -PUT _settings -{ - "index.number_of_replicas": 1 -} ----- -// TEST[s/^/PUT my-index\n/] - -**Free up or increase disk space** - -{es} uses a <> to ensure data -nodes have enough disk space for incoming shards. By default, {es} does not -allocate shards to nodes using more than 85% of disk space. - -To check the current disk space of your nodes, use the <>. - -[source,console] ----- -GET _cat/allocation?v=true&h=node,shards,disk.* ----- - -If your nodes are running low on disk space, you have a few options: - -* Upgrade your nodes to increase disk space. - -* Delete unneeded indices to free up space. If you use {ilm-init}, you can -update your lifecycle policy to use <> or add a delete phase. If you no longer need to search the data, you -can use a <> to store it off-cluster. - -* If you no longer write to an index, use the <> or {ilm-init}'s <> to merge its -segments into larger ones. -+ -[source,console] ----- -POST my-index/_forcemerge ----- -// TEST[s/^/PUT my-index\n/] - -* If an index is read-only, use the <> or -{ilm-init}'s <> to reduce its primary shard count. -+ -[source,console] ----- -POST my-index/_shrink/my-shrunken-index ----- -// TEST[s/^/PUT my-index\n{"settings":{"index.number_of_shards":2,"blocks.write":true}}\n/] - -* If your node has a large disk capacity, you can increase the low disk -watermark or set it to an explicit byte value. -+ -[source,console] ----- -PUT _cluster/settings -{ - "persistent": { - "cluster.routing.allocation.disk.watermark.low": "30gb" - } -} ----- -// TEST[s/"30gb"/null/] - -**Reduce JVM memory pressure** - -Shard allocation requires JVM heap memory. High JVM memory pressure can trigger -<> that stop allocation and leave shards -unassigned. See <>. - -**Recover data for a lost primary shard** - -If a node containing a primary shard is lost, {es} can typically replace it -using a replica on another node. If you can't recover the node and replicas -don't exist or are irrecoverable, you'll need to re-add the missing data from a -<> or the original data source. - -WARNING: Only use this option if node recovery is no longer possible. This -process allocates an empty primary shard. If the node later rejoins the cluster, -{es} will overwrite its primary shard with data from this newer empty shard, -resulting in data loss. - -Use the <> to manually allocate the -unassigned primary shard to another data node in the same tier. Set -`accept_data_loss` to `true`. - -[source,console] ----- -POST _cluster/reroute?metric=none -{ - "commands": [ - { - "allocate_empty_primary": { - "index": "my-index", - "shard": 0, - "node": "my-node", - "accept_data_loss": "true" - } - } - ] -} ----- -// TEST[s/^/PUT my-index\n/] -// TEST[catch:bad_request] - -If you backed up the missing index data to a snapshot, use the -<> to restore the individual index. -Alternatively, you can index the missing data from the original data source. - -[discrete] -[[rejected-requests]] -=== Rejected requests - -When {es} rejects a request, it stops the operation and returns an error with a -`429` response code. Rejected requests are commonly caused by: - -* A <>. A depleted `search` or `write` -thread pool returns a `TOO_MANY_REQUESTS` error message. - -* A <>. - -* High <> that exceeds the -<>. - -[discrete] -[[check-rejected-tasks]] -==== Check rejected tasks - -To check the number of rejected tasks for each thread pool, use the -<>. A high ratio of `rejected` to -`completed` tasks, particularly in the `search` and `write` thread pools, means -{es} regularly rejects requests. - -[source,console] ----- -GET /_cat/thread_pool?v=true&h=id,name,active,rejected,completed ----- - -[discrete] -[[prevent-rejected-requests]] -==== Prevent rejected requests - -**Fix high CPU and memory usage** - -If {es} regularly rejects requests and other tasks, your cluster likely has high -CPU usage or high JVM memory pressure. For tips, see <> and -<>. - -**Prevent circuit breaker errors** - -If you regularly trigger circuit breaker errors, see <> -for tips on diagnosing and preventing them. - -[discrete] -[[task-queue-backlog]] -=== Task queue backlog - -A backlogged task queue can prevent tasks from completing and -put the cluster into an unhealthy state. -Resource constraints, a large number of tasks being triggered at once, -and long running tasks can all contribute to a backlogged task queue. - -[discrete] -[[diagnose-task-queue-backlog]] -==== Diagnose a task queue backlog - -**Check the thread pool status** - -A <> can result in <>. - -You can use the <> to -see the number of active threads in each thread pool and -how many tasks are queued, how many have been rejected, and how many have completed. - -[source,console] ----- -GET /_cat/thread_pool?v&s=t,n&h=type,name,node_name,active,queue,rejected,completed ----- - -**Inspect the hot threads on each node** - -If a particular thread pool queue is backed up, -you can periodically poll the <> API -to determine if the thread has sufficient -resources to progress and gauge how quickly it is progressing. - -[source,console] ----- -GET /_nodes/hot_threads ----- - -**Look for long running tasks** - -Long-running tasks can also cause a backlog. -You can use the <> API to get information about the tasks that are running. -Check the `running_time_in_nanos` to identify tasks that are taking an excessive amount of time to complete. - -[source,console] ----- -GET /_tasks?filter_path=nodes.*.tasks ----- - -[discrete] -[[resolve-task-queue-backlog]] -==== Resolve a task queue backlog - -**Increase available resources** - -If tasks are progressing slowly and the queue is backing up, -you might need to take steps to <>. - -In some cases, increasing the thread pool size might help. -For example, the `force_merge` thread pool defaults to a single thread. -Increasing the size to 2 might help reduce a backlog of force merge requests. - -**Cancel stuck tasks** - -If you find the active task's hot thread isn't progressing and there's a backlog, -consider canceling the task. From 3eeffebf75f40d7f3fff9d974b4a47619d8c7d3b Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Mon, 1 Apr 2024 13:33:22 -0700 Subject: [PATCH 035/264] AwaitsFix #106964 --- .../index/mapper/vectors/DenseVectorFieldMapperTests.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java index 2d1b1cc9545db..e05cc92c8a76b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java @@ -20,6 +20,7 @@ import org.apache.lucene.index.VectorSimilarityFunction; import org.apache.lucene.search.FieldExistsQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.tests.util.LuceneTestCase.AwaitsFix; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.VectorUtil; import org.elasticsearch.common.bytes.BytesReference; @@ -64,6 +65,7 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; +@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106964") public class DenseVectorFieldMapperTests extends MapperTestCase { private static final IndexVersion INDEXED_BY_DEFAULT_PREVIOUS_INDEX_VERSION = IndexVersions.V_8_10_0; From 8fdd1aa1d0769c282780c8ea5f8f634bea7715dd Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Mon, 1 Apr 2024 16:34:15 -0400 Subject: [PATCH 036/264] Refactor geoip database properties (#106960) --- .../elasticsearch/ingest/geoip/Database.java | 182 ++++++++++++++++++ .../ingest/geoip/GeoIpProcessor.java | 156 ++------------- .../geoip/GeoIpProcessorFactoryTests.java | 18 +- .../ingest/geoip/GeoIpProcessorTests.java | 2 +- 4 files changed, 211 insertions(+), 147 deletions(-) create mode 100644 modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/Database.java diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/Database.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/Database.java new file mode 100644 index 0000000000000..b1c9c99ace03e --- /dev/null +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/Database.java @@ -0,0 +1,182 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.ingest.geoip; + +import org.elasticsearch.core.Nullable; + +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Locale; +import java.util.Set; + +/** + * A high-level representation of a kind of geoip database that is supported by the {@link GeoIpProcessor}. + *

    + * A database has a set of properties that are valid to use with it (see {@link Database#properties()}), + * as well as a list of default properties to use if no properties are specified (see {@link Database#defaultProperties()}). + *

    + * See especially {@link Database#getDatabase(String, String)} which is used to obtain instances of this class. + */ +enum Database { + + City( + Set.of( + Property.IP, + Property.COUNTRY_ISO_CODE, + Property.COUNTRY_NAME, + Property.CONTINENT_NAME, + Property.REGION_ISO_CODE, + Property.REGION_NAME, + Property.CITY_NAME, + Property.TIMEZONE, + Property.LOCATION + ), + Set.of( + Property.CONTINENT_NAME, + Property.COUNTRY_NAME, + Property.COUNTRY_ISO_CODE, + Property.REGION_ISO_CODE, + Property.REGION_NAME, + Property.CITY_NAME, + Property.LOCATION + ) + ), + Country( + Set.of(Property.IP, Property.CONTINENT_NAME, Property.COUNTRY_NAME, Property.COUNTRY_ISO_CODE), + Set.of(Property.CONTINENT_NAME, Property.COUNTRY_NAME, Property.COUNTRY_ISO_CODE) + ), + Asn( + Set.of(Property.IP, Property.ASN, Property.ORGANIZATION_NAME, Property.NETWORK), + Set.of(Property.IP, Property.ASN, Property.ORGANIZATION_NAME, Property.NETWORK) + ); + + private static final String CITY_DB_SUFFIX = "-City"; + private static final String COUNTRY_DB_SUFFIX = "-Country"; + private static final String ASN_DB_SUFFIX = "-ASN"; + + /** + * Parses the passed-in databaseType (presumably from the passed-in databaseFile) and return the Database instance that is + * associated with that databaseType. + * + * @param databaseType the database type String from the metadata of the database file + * @param databaseFile the database file from which the database type was obtained + * @throws IllegalArgumentException if the databaseType is not associated with a Database instance + * @return the Database instance that is associated with the databaseType + */ + public static Database getDatabase(final String databaseType, final String databaseFile) { + Database database = null; + if (databaseType != null) { + if (databaseType.endsWith(Database.CITY_DB_SUFFIX)) { + database = Database.City; + } else if (databaseType.endsWith(Database.COUNTRY_DB_SUFFIX)) { + database = Database.Country; + } else if (databaseType.endsWith(Database.ASN_DB_SUFFIX)) { + database = Database.Asn; + } + } + + if (database == null) { + throw new IllegalArgumentException("Unsupported database type [" + databaseType + "] for file [" + databaseFile + "]"); + } + + return database; + } + + private final Set properties; + private final Set defaultProperties; + + Database(Set properties, Set defaultProperties) { + this.properties = properties; + this.defaultProperties = defaultProperties; + } + + /** + * @return a set representing all the valid properties for this database + */ + public Set properties() { + return properties; + } + + /** + * @return a set representing the default properties for this database + */ + public Set defaultProperties() { + return defaultProperties; + } + + /** + * Parse the given list of property names. + * + * @param propertyNames a list of property names to parse, or null to use the default properties for this database + * @throws IllegalArgumentException if any of the property names are not valid + * @return a set of parsed and validated properties + */ + public Set parseProperties(@Nullable final List propertyNames) { + if (propertyNames != null) { + final Set parsedProperties = new HashSet<>(); + for (String propertyName : propertyNames) { + parsedProperties.add(Property.parseProperty(this.properties, propertyName)); // n.b. this throws if a property is invalid + } + return Set.copyOf(parsedProperties); + } else { + // if propertyNames is null, then use the default properties + return this.defaultProperties; + } + } + + /** + * High-level database 'properties' that represent information that can be extracted from a geoip database. + */ + enum Property { + + IP, + COUNTRY_ISO_CODE, + COUNTRY_NAME, + CONTINENT_NAME, + REGION_ISO_CODE, + REGION_NAME, + CITY_NAME, + TIMEZONE, + LOCATION, + ASN, + ORGANIZATION_NAME, + NETWORK; + + /** + * Parses a string representation of a property into an actual Property instance. Not all properties that exist are + * valid for all kinds of databases, so this method validates the parsed value against the provided set of valid properties. + *

    + * See {@link Database#parseProperties(List)} where this is used. + * + * @param validProperties the valid properties against which to validate the parsed property value + * @param value the string representation to parse + * @return a parsed, validated Property + * @throws IllegalArgumentException if the value does not parse as a Property or if the parsed Property is not + * in the passed-in validProperties set + */ + private static Property parseProperty(final Set validProperties, final String value) { + try { + Property property = valueOf(value.toUpperCase(Locale.ROOT)); + if (validProperties.contains(property) == false) { + throw new IllegalArgumentException("invalid"); + } + return property; + } catch (IllegalArgumentException e) { + // put the properties in natural order before throwing so that we have reliable error messages -- this is a little + // bit inefficient, but we only do this validation at processor construction time so the cost is practically immaterial + Property[] properties = validProperties.toArray(new Property[0]); + Arrays.sort(properties); + throw new IllegalArgumentException( + "illegal property value [" + value + "]. valid values are " + Arrays.toString(properties) + ); + } + } + } +} diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java index 18ca9599f183c..ea17338c25bbf 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java @@ -25,19 +25,16 @@ import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.core.Assertions; -import org.elasticsearch.core.Nullable; import org.elasticsearch.ingest.AbstractProcessor; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Processor; +import org.elasticsearch.ingest.geoip.Database.Property; import java.io.IOException; import java.net.InetAddress; import java.util.ArrayList; -import java.util.Arrays; import java.util.HashMap; -import java.util.HashSet; import java.util.List; -import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.function.Supplier; @@ -54,9 +51,6 @@ public final class GeoIpProcessor extends AbstractProcessor { + "Elasticsearch no longer includes the default Maxmind geoip databases. This setting will be removed in Elasticsearch 9.0"; public static final String TYPE = "geoip"; - private static final String CITY_DB_SUFFIX = "-City"; - private static final String COUNTRY_DB_SUFFIX = "-Country"; - private static final String ASN_DB_SUFFIX = "-ASN"; private final String field; private final Supplier isValid; @@ -167,18 +161,18 @@ public IngestDocument execute(IngestDocument ingestDocument) throws IOException private Map getGeoData(GeoIpDatabase geoIpDatabase, String ip) throws IOException { final String databaseType = geoIpDatabase.getDatabaseType(); - final InetAddress ipAddress = InetAddresses.forString(ip); - Map geoData; - if (databaseType.endsWith(CITY_DB_SUFFIX)) { - geoData = retrieveCityGeoData(geoIpDatabase, ipAddress); - } else if (databaseType.endsWith(COUNTRY_DB_SUFFIX)) { - geoData = retrieveCountryGeoData(geoIpDatabase, ipAddress); - } else if (databaseType.endsWith(ASN_DB_SUFFIX)) { - geoData = retrieveAsnGeoData(geoIpDatabase, ipAddress); - } else { - throw new ElasticsearchParseException("Unsupported database type [" + databaseType + "]", new IllegalStateException()); + final Database database; + try { + database = Database.getDatabase(databaseType, databaseFile); + } catch (IllegalArgumentException e) { + throw new ElasticsearchParseException(e.getMessage(), e); } - return geoData; + final InetAddress ipAddress = InetAddresses.forString(ip); + return switch (database) { + case City -> retrieveCityGeoData(geoIpDatabase, ipAddress); + case Country -> retrieveCountryGeoData(geoIpDatabase, ipAddress); + case Asn -> retrieveAsnGeoData(geoIpDatabase, ipAddress); + }; } @Override @@ -382,21 +376,6 @@ public GeoIpDatabase get() throws IOException { } public static final class Factory implements Processor.Factory { - static final Set DEFAULT_CITY_PROPERTIES = Set.of( - Property.CONTINENT_NAME, - Property.COUNTRY_NAME, - Property.COUNTRY_ISO_CODE, - Property.REGION_ISO_CODE, - Property.REGION_NAME, - Property.CITY_NAME, - Property.LOCATION - ); - static final Set DEFAULT_COUNTRY_PROPERTIES = Set.of( - Property.CONTINENT_NAME, - Property.COUNTRY_NAME, - Property.COUNTRY_ISO_CODE - ); - static final Set DEFAULT_ASN_PROPERTIES = Set.of(Property.IP, Property.ASN, Property.ORGANIZATION_NAME, Property.NETWORK); private final GeoIpDatabaseProvider geoIpDatabaseProvider; @@ -443,21 +422,17 @@ public Processor create( } finally { geoIpDatabase.release(); } - if (databaseType == null - || (databaseType.endsWith(CITY_DB_SUFFIX) - || databaseType.endsWith(COUNTRY_DB_SUFFIX) - || databaseType.endsWith(ASN_DB_SUFFIX)) == false) { - throw newConfigurationException( - TYPE, - processorTag, - "database_file", - "Unsupported database type [" + databaseType + "] for file [" + databaseFile + "]" - ); + + final Database database; + try { + database = Database.getDatabase(databaseType, databaseFile); + } catch (IllegalArgumentException e) { + throw newConfigurationException(TYPE, processorTag, "database_file", e.getMessage()); } final Set properties; try { - properties = Property.parseProperties(databaseType, propertyNames); + properties = database.parseProperties(propertyNames); } catch (IllegalArgumentException e) { throw newConfigurationException(TYPE, processorTag, "properties", e.getMessage()); } @@ -485,99 +460,6 @@ public static boolean downloadDatabaseOnPipelineCreation(Map con } - enum Property { - - IP, - COUNTRY_ISO_CODE, - COUNTRY_NAME, - CONTINENT_NAME, - REGION_ISO_CODE, - REGION_NAME, - CITY_NAME, - TIMEZONE, - LOCATION, - ASN, - ORGANIZATION_NAME, - NETWORK; - - static final Set ALL_CITY_PROPERTIES = Set.of( - Property.IP, - Property.COUNTRY_ISO_CODE, - Property.COUNTRY_NAME, - Property.CONTINENT_NAME, - Property.REGION_ISO_CODE, - Property.REGION_NAME, - Property.CITY_NAME, - Property.TIMEZONE, - Property.LOCATION - ); - static final Set ALL_COUNTRY_PROPERTIES = Set.of( - Property.IP, - Property.CONTINENT_NAME, - Property.COUNTRY_NAME, - Property.COUNTRY_ISO_CODE - ); - static final Set ALL_ASN_PROPERTIES = Set.of(Property.IP, Property.ASN, Property.ORGANIZATION_NAME, Property.NETWORK); - - private static Property parseProperty(Set validProperties, String value) { - try { - Property property = valueOf(value.toUpperCase(Locale.ROOT)); - if (validProperties.contains(property) == false) { - throw new IllegalArgumentException("invalid"); - } - return property; - } catch (IllegalArgumentException e) { - // put the properties in natural order before throwing so that we have reliable error messages -- this is a little - // bit inefficient, but we only do this validation at processor construction time so the cost is practically immaterial - Property[] properties = validProperties.toArray(new Property[0]); - Arrays.sort(properties); - throw new IllegalArgumentException( - "illegal property value [" + value + "]. valid values are " + Arrays.toString(properties) - ); - } - } - - /** - * Parse the given list of property names and validate them against the supplied databaseType. - * - * @param databaseType the type of database to use to validate property names - * @param propertyNames a list of property names to parse, or null to use the default properties for the associated databaseType - * @throws IllegalArgumentException if any of the property names are not valid, or if the databaseType is not valid - * @return a set of parsed and validated properties - */ - public static Set parseProperties(final String databaseType, @Nullable final List propertyNames) { - final Set validProperties; - final Set defaultProperties; - - if (databaseType.endsWith(CITY_DB_SUFFIX)) { - validProperties = ALL_CITY_PROPERTIES; - defaultProperties = Factory.DEFAULT_CITY_PROPERTIES; - } else if (databaseType.endsWith(COUNTRY_DB_SUFFIX)) { - validProperties = ALL_COUNTRY_PROPERTIES; - defaultProperties = Factory.DEFAULT_COUNTRY_PROPERTIES; - } else if (databaseType.endsWith(ASN_DB_SUFFIX)) { - validProperties = ALL_ASN_PROPERTIES; - defaultProperties = Factory.DEFAULT_ASN_PROPERTIES; - } else { - assert false : "Unsupported database type [" + databaseType + "]"; - throw new IllegalArgumentException("Unsupported database type [" + databaseType + "]"); - } - - final Set properties; - if (propertyNames != null) { - Set modifiableProperties = new HashSet<>(); - for (String propertyName : propertyNames) { - modifiableProperties.add(parseProperty(validProperties, propertyName)); // n.b. this throws if a property is invalid - } - properties = Set.copyOf(modifiableProperties); - } else { - // if propertyNames is null, then use the default properties for the databaseType - properties = defaultProperties; - } - return properties; - } - } - static class DatabaseUnavailableProcessor extends AbstractProcessor { private final String databaseName; diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java index 24f2df7e30d16..99330224451ca 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java @@ -22,7 +22,7 @@ import org.elasticsearch.ingest.IngestService; import org.elasticsearch.ingest.Processor; import org.elasticsearch.ingest.RandomDocumentPicks; -import org.elasticsearch.ingest.geoip.GeoIpProcessor.Property; +import org.elasticsearch.ingest.geoip.Database.Property; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.StreamsUtils; @@ -100,7 +100,7 @@ public void testBuildDefaults() throws Exception { assertThat(processor.getField(), equalTo("_field")); assertThat(processor.getTargetField(), equalTo("geoip")); assertThat(processor.getDatabaseType(), equalTo("GeoLite2-City")); - assertThat(processor.getProperties(), sameInstance(GeoIpProcessor.Factory.DEFAULT_CITY_PROPERTIES)); + assertThat(processor.getProperties(), sameInstance(Database.City.defaultProperties())); assertFalse(processor.isIgnoreMissing()); } @@ -117,7 +117,7 @@ public void testSetIgnoreMissing() throws Exception { assertThat(processor.getField(), equalTo("_field")); assertThat(processor.getTargetField(), equalTo("geoip")); assertThat(processor.getDatabaseType(), equalTo("GeoLite2-City")); - assertThat(processor.getProperties(), sameInstance(GeoIpProcessor.Factory.DEFAULT_CITY_PROPERTIES)); + assertThat(processor.getProperties(), sameInstance(Database.City.defaultProperties())); assertTrue(processor.isIgnoreMissing()); } @@ -135,7 +135,7 @@ public void testCountryBuildDefaults() throws Exception { assertThat(processor.getField(), equalTo("_field")); assertThat(processor.getTargetField(), equalTo("geoip")); assertThat(processor.getDatabaseType(), equalTo("GeoLite2-Country")); - assertThat(processor.getProperties(), sameInstance(GeoIpProcessor.Factory.DEFAULT_COUNTRY_PROPERTIES)); + assertThat(processor.getProperties(), sameInstance(Database.Country.defaultProperties())); assertFalse(processor.isIgnoreMissing()); } @@ -153,7 +153,7 @@ public void testAsnBuildDefaults() throws Exception { assertThat(processor.getField(), equalTo("_field")); assertThat(processor.getTargetField(), equalTo("geoip")); assertThat(processor.getDatabaseType(), equalTo("GeoLite2-ASN")); - assertThat(processor.getProperties(), sameInstance(GeoIpProcessor.Factory.DEFAULT_ASN_PROPERTIES)); + assertThat(processor.getProperties(), sameInstance(Database.Asn.defaultProperties())); assertFalse(processor.isIgnoreMissing()); } @@ -177,7 +177,7 @@ public void testBuildDbFile() throws Exception { assertThat(processor.getField(), equalTo("_field")); assertThat(processor.getTargetField(), equalTo("geoip")); assertThat(processor.getDatabaseType(), equalTo("GeoLite2-Country")); - assertThat(processor.getProperties(), sameInstance(GeoIpProcessor.Factory.DEFAULT_COUNTRY_PROPERTIES)); + assertThat(processor.getProperties(), sameInstance(Database.Country.defaultProperties())); assertFalse(processor.isIgnoreMissing()); } @@ -186,7 +186,7 @@ public void testBuildWithCountryDbAndAsnFields() throws Exception { Map config = new HashMap<>(); config.put("field", "_field"); config.put("database_file", "GeoLite2-Country.mmdb"); - Set asnOnlyProperties = new HashSet<>(Property.ALL_ASN_PROPERTIES); + Set asnOnlyProperties = new HashSet<>(Database.Asn.properties()); asnOnlyProperties.remove(Property.IP); String asnProperty = RandomPicks.randomFrom(Randomness.get(), asnOnlyProperties).toString(); config.put("properties", List.of(asnProperty)); @@ -206,7 +206,7 @@ public void testBuildWithAsnDbAndCityFields() throws Exception { Map config = new HashMap<>(); config.put("field", "_field"); config.put("database_file", "GeoLite2-ASN.mmdb"); - Set cityOnlyProperties = new HashSet<>(Property.ALL_CITY_PROPERTIES); + Set cityOnlyProperties = new HashSet<>(Database.City.properties()); cityOnlyProperties.remove(Property.IP); String cityProperty = RandomPicks.randomFrom(Randomness.get(), cityOnlyProperties).toString(); config.put("properties", List.of(cityProperty)); @@ -251,7 +251,7 @@ public void testBuildFields() throws Exception { int counter = 0; int numFields = scaledRandomIntBetween(1, Property.values().length); - for (Property property : Property.ALL_CITY_PROPERTIES) { + for (Property property : Database.City.properties()) { properties.add(property); fieldNames.add(property.name().toLowerCase(Locale.ROOT)); if (++counter >= numFields) { diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java index 3114d24ee7571..b40845246deb3 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.core.PathUtils; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; -import org.elasticsearch.ingest.geoip.GeoIpProcessor.Property; +import org.elasticsearch.ingest.geoip.Database.Property; import org.elasticsearch.test.ESTestCase; import java.io.IOException; From f9d96ae72d6de78a884e79ef1639a92460df43cb Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Mon, 1 Apr 2024 13:44:21 -0700 Subject: [PATCH 037/264] AwaitsFix #106900 --- .../org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java index 4726424ada5f2..bf16456c7476e 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java @@ -228,6 +228,7 @@ protected Query randomTermQuery(MappedFieldType ft, SearchExecutionContext ctx) } @Override + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106900") public void testTermsQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.1\"]}")))); From 9077c5d420a0fb452e1e81c5e6b2777a06489067 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Mon, 1 Apr 2024 14:04:56 -0700 Subject: [PATCH 038/264] AwaitsFix #106968 --- .../reservedstate/service/FileSettingsServiceTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java index 5968be34e985a..b309f10903d09 100644 --- a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java @@ -253,6 +253,7 @@ public void testStopWorksInMiddleOfProcessing() throws Exception { deadThreadLatch.countDown(); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106968") public void testStopWorksIfProcessingDidntReturnYet() throws Exception { var spiedController = spy(controller); var service = new FileSettingsService(clusterService, spiedController, env); From 5cc4a56b1f57f6b8b3da4f43f8e088fe17cecd03 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Mon, 1 Apr 2024 14:09:52 -0700 Subject: [PATCH 039/264] AwaitsFix #106939 --- .../rest-api-spec/test/esql/80_text.yml | 52 ++++++++++--------- 1 file changed, 28 insertions(+), 24 deletions(-) diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml index 329f835a42659..17470af049a45 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml @@ -54,8 +54,8 @@ setup: - match: { columns.4.type: "text" } - length: { values: 2 } - - match: { values.0: [ 10, "IT Director", "IT Director", "Jenny", "foo bar"] } - - match: { values.1: [ 20, "Payroll Specialist", "Payroll Specialist", "John", "baz"] } + - match: { values.0: [ 10, "IT Director", "IT Director", "Jenny", "foo bar" ] } + - match: { values.1: [ 20, "Payroll Specialist", "Payroll Specialist", "John", "baz" ] } --- @@ -77,7 +77,7 @@ setup: - match: { columns.3.type: "text" } - length: { values: 1 } - - match: { values.0: [ 20, "John", "Payroll Specialist", "baz"] } + - match: { values.0: [ 20, "John", "Payroll Specialist", "baz" ] } --- "like by text": @@ -98,7 +98,7 @@ setup: - match: { columns.3.type: "text" } - length: { values: 1 } - - match: { values.0: [ 20, "John", "Payroll Specialist", "baz"] } + - match: { values.0: [ 20, "John", "Payroll Specialist", "baz" ] } --- "rlike by text": @@ -119,7 +119,7 @@ setup: - match: { columns.3.type: "text" } - length: { values: 1 } - - match: { values.0: [ 20, "John", "Payroll Specialist", "baz"] } + - match: { values.0: [ 20, "John", "Payroll Specialist", "baz" ] } --- "IN on text": @@ -144,7 +144,7 @@ setup: - match: { columns.3.type: "text" } - length: { values: 1 } - - match: { values.0: [ 20, "John", "Payroll Specialist", "baz"] } + - match: { values.0: [ 20, "John", "Payroll Specialist", "baz" ] } --- "IN on text and itself": @@ -169,8 +169,8 @@ setup: - match: { columns.3.type: "text" } - length: { values: 2 } - - match: { values.0: [ 10, "Jenny", "IT Director", "foo bar"] } - - match: { values.1: [ 20, "John", "Payroll Specialist", "baz"] } + - match: { values.0: [ 10, "Jenny", "IT Director", "foo bar" ] } + - match: { values.1: [ 20, "John", "Payroll Specialist", "baz" ] } --- "NOT IN on text": @@ -195,7 +195,7 @@ setup: - match: { columns.3.type: "text" } - length: { values: 1 } - - match: { values.0: [ 10, "Jenny", "IT Director", "foo bar"] } + - match: { values.0: [ 10, "Jenny", "IT Director", "foo bar" ] } --- "eval and filter text": @@ -216,7 +216,7 @@ setup: - match: { columns.3.type: "text" } - length: { values: 1 } - - match: { values.0: [ 20, "John", "Payroll Specialist", "baz"] } + - match: { values.0: [ 20, "John", "Payroll Specialist", "baz" ] } --- "filter on text multi-field": @@ -237,7 +237,7 @@ setup: - match: { columns.3.type: "text" } - length: { values: 1 } - - match: { values.0: [ 10, "Jenny", "IT Director", "foo bar"] } + - match: { values.0: [ 10, "Jenny", "IT Director", "foo bar" ] } --- "like by multi-field text": @@ -258,7 +258,7 @@ setup: - match: { columns.3.type: "text" } - length: { values: 1 } - - match: { values.0: [ 20, "John", "Payroll Specialist", "baz"] } + - match: { values.0: [ 20, "John", "Payroll Specialist", "baz" ] } --- "rlike by multi-field text": @@ -279,7 +279,7 @@ setup: - match: { columns.3.type: "text" } - length: { values: 1 } - - match: { values.0: [ 20, "John", "Payroll Specialist", "baz"] } + - match: { values.0: [ 20, "John", "Payroll Specialist", "baz" ] } --- @@ -301,8 +301,8 @@ setup: - match: { columns.3.type: "text" } - length: { values: 2 } - - match: { values.0: [ 20, "John", "Payroll Specialist", "baz"] } - - match: { values.1: [ 10, "Jenny", "IT Director", "foo bar"] } + - match: { values.0: [ 20, "John", "Payroll Specialist", "baz" ] } + - match: { values.1: [ 10, "Jenny", "IT Director", "foo bar" ] } --- @@ -324,8 +324,8 @@ setup: - match: { columns.3.type: "text" } - length: { values: 2 } - - match: { values.0: [ 10, "Jenny", "IT Director", "foo bar"] } - - match: { values.1: [ 20, "John", "Payroll Specialist", "baz"] } + - match: { values.0: [ 10, "Jenny", "IT Director", "foo bar" ] } + - match: { values.1: [ 20, "John", "Payroll Specialist", "baz" ] } --- "sort by text multi-field desc": @@ -346,8 +346,8 @@ setup: - match: { columns.3.type: "text" } - length: { values: 2 } - - match: { values.0: [ 20, "John", "Payroll Specialist", "baz"] } - - match: { values.1: [ 10, "Jenny", "IT Director", "foo bar"] } + - match: { values.0: [ 20, "John", "Payroll Specialist", "baz" ] } + - match: { values.1: [ 10, "Jenny", "IT Director", "foo bar" ] } --- @@ -363,8 +363,8 @@ setup: - match: { columns.0.type: "keyword" } - length: { values: 2 } - - match: { values.0: [ "Jenny - IT Director"] } - - match: { values.1: [ "John - Payroll Specialist"] } + - match: { values.0: [ "Jenny - IT Director" ] } + - match: { values.1: [ "John - Payroll Specialist" ] } --- "split text": @@ -383,8 +383,8 @@ setup: - match: { columns.0.type: "keyword" } - length: { values: 2 } - - match: { values.0: [ ["foo", "bar"] ] } - - match: { values.1: [ "baz"] } + - match: { values.0: [ [ "foo", "bar" ] ] } + - match: { values.1: [ "baz" ] } --- @@ -521,7 +521,7 @@ setup: refresh: true body: - { "index": { } } - - { "emp_no": 10, "name": "Jenny", "job": "IT Director"} + - { "emp_no": 10, "name": "Jenny", "job": "IT Director" } - { "index": { } } - { "emp_no": 20, "name": "John", "job": "Payroll Specialist" } @@ -541,6 +541,10 @@ setup: --- values: + - skip: + version: all + reason: 'AwaitsFix https://github.com/elastic/elasticsearch/issues/106939' + - requires: cluster_features: esql.agg_values reason: "values is available in 8.14+" From 601176364e0b1bd9e141e5bc8e8a796d9ff99c63 Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Mon, 1 Apr 2024 15:52:27 -0600 Subject: [PATCH 040/264] Call out `monitor` privilege for index and component templates (#106970) These can be retrieved using the `monitor` cluster privilege. This just adds that note to the docs. --- docs/reference/indices/get-component-template.asciidoc | 2 +- docs/reference/indices/get-index-template.asciidoc | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/indices/get-component-template.asciidoc b/docs/reference/indices/get-component-template.asciidoc index f3073406be2b1..f35192ca448db 100644 --- a/docs/reference/indices/get-component-template.asciidoc +++ b/docs/reference/indices/get-component-template.asciidoc @@ -51,7 +51,7 @@ GET /_component_template/template_1 * If the {es} {security-features} are enabled, you must have the `manage_index_templates` or `manage` <> to use this API. +privilege>> to update templates, or the `monitor` cluster privilege to retrieve templates. [[get-component-template-api-path-params]] ==== {api-path-parms-title} diff --git a/docs/reference/indices/get-index-template.asciidoc b/docs/reference/indices/get-index-template.asciidoc index 9ae8af6f8441b..2cde5adc8ae23 100644 --- a/docs/reference/indices/get-index-template.asciidoc +++ b/docs/reference/indices/get-index-template.asciidoc @@ -46,7 +46,7 @@ GET /_index_template/template_1 * If the {es} {security-features} are enabled, you must have the `manage_index_templates` or `manage` <> to use this API. +privilege>> to use this API, or the `monitor` cluster privilege to retrieve templates. [[get-template-api-path-params]] ==== {api-path-parms-title} From 22ca5ecfd36e79cad967a129741400b7fecf2657 Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Mon, 1 Apr 2024 16:19:59 -0600 Subject: [PATCH 041/264] Fix warning typo for test failure (#106971) This had the wrong index patterns, which led to test failures Resolves #106965 --- .../resources/rest-api-spec/test/data_stream/10_basic.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml index 23f8715b5787f..6187c1a0dfed1 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml @@ -543,7 +543,7 @@ setup: - do: allowed_warnings: - - "index template [my-template4] has index patterns [failure-data-stream1, failure-data-stream2] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-template4] will take precedence during new index creation" + - "index template [my-template4] has index patterns [failure-data-stream1] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-template4] will take precedence during new index creation" indices.put_index_template: name: my-template4 body: From 393e644165181c114925ee179ca43fef02e99579 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Tue, 2 Apr 2024 00:44:44 +0200 Subject: [PATCH 042/264] Refactor more ActionListener.wrap to delegateFailureAndWrap (#106948) Refactoring a couple more of these and inlining some listeners while at it to save code/memory and get cleaner guarantees around note leaking things. Note: In two cases inlining makes it obvious that we can remove spurious AtomicReference usage. --- .../execution/search/PITAwareQueryClient.java | 2 +- .../TransportGetTrainedModelsAction.java | 154 +++++++++--------- .../ml/action/TransportMlMemoryAction.java | 46 +++--- .../InferencePipelineAggregationBuilder.java | 8 +- .../ml/datafeed/DatafeedContextProvider.java | 36 ++-- .../persistence/DatafeedConfigProvider.java | 20 ++- .../dataframe/DataFrameAnalyticsManager.java | 26 +-- .../xpack/ml/dataframe/DestinationIndex.java | 81 +++++---- .../DataFrameAnalyticsConfigProvider.java | 2 +- .../deployment/DeploymentManager.java | 10 +- .../retention/ExpiredAnnotationsRemover.java | 12 +- .../retention/ExpiredForecastsRemover.java | 12 +- .../ExpiredModelSnapshotsRemover.java | 23 ++- .../job/retention/ExpiredResultsRemover.java | 12 +- .../xpack/ml/process/MlMemoryTracker.java | 8 +- .../ExpiredAnnotationsRemoverTests.java | 1 + .../ExpiredModelSnapshotsRemoverTests.java | 1 + .../retention/ExpiredResultsRemoverTests.java | 1 + 18 files changed, 213 insertions(+), 242 deletions(-) diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/PITAwareQueryClient.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/PITAwareQueryClient.java index befb2c7503515..cce3cdeb97961 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/PITAwareQueryClient.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/PITAwareQueryClient.java @@ -125,7 +125,7 @@ private ActionListener pitListener(Function {}, ex -> {})); + close(ActionListener.noop()); } } ); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsAction.java index 78d030d454f0b..b250df8d5215f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsAction.java @@ -14,7 +14,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.core.Tuple; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.transport.TransportService; @@ -65,82 +64,6 @@ public TransportGetTrainedModelsAction( @Override protected void doExecute(Task task, Request request, ActionListener listener) { final TaskId parentTaskId = new TaskId(clusterService.localNode().getId(), task.getId()); - - Response.Builder responseBuilder = Response.builder(); - - ActionListener> getModelDefinitionStatusListener = ActionListener.wrap(configs -> { - if (request.getIncludes().isIncludeDefinitionStatus() == false) { - listener.onResponse(responseBuilder.setModels(configs).build()); - return; - } - - assert configs.size() <= 1; - if (configs.isEmpty()) { - listener.onResponse(responseBuilder.setModels(configs).build()); - return; - } - - if (configs.get(0).getModelType() != TrainedModelType.PYTORCH) { - listener.onFailure(ExceptionsHelper.badRequestException("Definition status is only relevant to PyTorch model types")); - return; - } - - TransportStartTrainedModelDeploymentAction.checkFullModelDefinitionIsPresent( - new OriginSettingClient(client, ML_ORIGIN), - configs.get(0), - false, // missing docs are not an error - null, // if download is in progress, don't wait for it to complete - ActionListener.wrap(modelIdAndLength -> { - configs.get(0).setFullDefinition(modelIdAndLength.v2() > 0); - listener.onResponse(responseBuilder.setModels(configs).build()); - }, listener::onFailure) - ); - }, listener::onFailure); - - ActionListener>>> idExpansionListener = ActionListener.wrap(totalAndIds -> { - responseBuilder.setTotalCount(totalAndIds.v1()); - - if (totalAndIds.v2().isEmpty()) { - listener.onResponse(responseBuilder.build()); - return; - } - - if (request.getIncludes().isIncludeModelDefinition() && totalAndIds.v2().size() > 1) { - listener.onFailure(ExceptionsHelper.badRequestException(Messages.INFERENCE_TOO_MANY_DEFINITIONS_REQUESTED)); - return; - } - - if (request.getIncludes().isIncludeDefinitionStatus() && totalAndIds.v2().size() > 1) { - listener.onFailure( - ExceptionsHelper.badRequestException( - "Getting the model download status is not supported when getting more than one model" - ) - ); - return; - } - - if (request.getIncludes().isIncludeModelDefinition()) { - Map.Entry> modelIdAndAliases = totalAndIds.v2().entrySet().iterator().next(); - provider.getTrainedModel( - modelIdAndAliases.getKey(), - modelIdAndAliases.getValue(), - request.getIncludes(), - parentTaskId, - ActionListener.wrap( - config -> getModelDefinitionStatusListener.onResponse(Collections.singletonList(config)), - getModelDefinitionStatusListener::onFailure - ) - ); - } else { - provider.getTrainedModels( - totalAndIds.v2(), - request.getIncludes(), - request.isAllowNoResources(), - parentTaskId, - getModelDefinitionStatusListener - ); - } - }, listener::onFailure); provider.expandIds( request.getResourceId(), request.isAllowNoResources(), @@ -149,7 +72,82 @@ protected void doExecute(Task task, Request request, ActionListener li ModelAliasMetadata.fromState(clusterService.state()), parentTaskId, Collections.emptySet(), - idExpansionListener + listener.delegateFailureAndWrap((delegate, totalAndIds) -> { + Response.Builder responseBuilder = Response.builder(); + responseBuilder.setTotalCount(totalAndIds.v1()); + + if (totalAndIds.v2().isEmpty()) { + delegate.onResponse(responseBuilder.build()); + return; + } + + if (request.getIncludes().isIncludeModelDefinition() && totalAndIds.v2().size() > 1) { + delegate.onFailure(ExceptionsHelper.badRequestException(Messages.INFERENCE_TOO_MANY_DEFINITIONS_REQUESTED)); + return; + } + + if (request.getIncludes().isIncludeDefinitionStatus() && totalAndIds.v2().size() > 1) { + delegate.onFailure( + ExceptionsHelper.badRequestException( + "Getting the model download status is not supported when getting more than one model" + ) + ); + return; + } + + ActionListener> getModelDefinitionStatusListener = delegate.delegateFailureAndWrap( + (delegate2, configs) -> { + if (request.getIncludes().isIncludeDefinitionStatus() == false) { + delegate2.onResponse(responseBuilder.setModels(configs).build()); + return; + } + + assert configs.size() <= 1; + if (configs.isEmpty()) { + delegate2.onResponse(responseBuilder.setModels(configs).build()); + return; + } + + if (configs.get(0).getModelType() != TrainedModelType.PYTORCH) { + delegate2.onFailure( + ExceptionsHelper.badRequestException("Definition status is only relevant to PyTorch model types") + ); + return; + } + + TransportStartTrainedModelDeploymentAction.checkFullModelDefinitionIsPresent( + new OriginSettingClient(client, ML_ORIGIN), + configs.get(0), + false, // missing docs are not an error + null, // if download is in progress, don't wait for it to complete + delegate2.delegateFailureAndWrap((l, modelIdAndLength) -> { + configs.get(0).setFullDefinition(modelIdAndLength.v2() > 0); + l.onResponse(responseBuilder.setModels(configs).build()); + }) + ); + } + ); + if (request.getIncludes().isIncludeModelDefinition()) { + Map.Entry> modelIdAndAliases = totalAndIds.v2().entrySet().iterator().next(); + provider.getTrainedModel( + modelIdAndAliases.getKey(), + modelIdAndAliases.getValue(), + request.getIncludes(), + parentTaskId, + getModelDefinitionStatusListener.delegateFailureAndWrap( + (l, config) -> l.onResponse(Collections.singletonList(config)) + ) + ); + } else { + provider.getTrainedModels( + totalAndIds.v2(), + request.getIncludes(), + request.isAllowNoResources(), + parentTaskId, + getModelDefinitionStatusListener + ); + } + }) ); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportMlMemoryAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportMlMemoryAction.java index 0265faaeeb1d6..3223a7c7863f3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportMlMemoryAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportMlMemoryAction.java @@ -95,38 +95,34 @@ protected void masterOperation( ParentTaskAssigningClient parentTaskClient = new ParentTaskAssigningClient(client, task.getParentTaskId()); - ActionListener nodeStatsListener = ActionListener.wrap(nodesStatsResponse -> { - TrainedModelCacheInfoAction.Request trainedModelCacheInfoRequest = new TrainedModelCacheInfoAction.Request( - nodesStatsResponse.getNodes().stream().map(NodeStats::getNode).toArray(DiscoveryNode[]::new) - ).timeout(request.timeout()); - - parentTaskClient.execute( - TrainedModelCacheInfoAction.INSTANCE, - trainedModelCacheInfoRequest, - ActionListener.wrap( - trainedModelCacheInfoResponse -> handleResponses( - state, - clusterSettings, - nodesStatsResponse, - trainedModelCacheInfoResponse, - listener - ), - listener::onFailure - ) - ); - }, listener::onFailure); - // Next get node stats related to the OS and JVM - ActionListener memoryTrackerRefreshListener = ActionListener.wrap( - r -> parentTaskClient.admin() + ActionListener memoryTrackerRefreshListener = listener.delegateFailureAndWrap( + (delegate, r) -> parentTaskClient.admin() .cluster() .prepareNodesStats(nodeIds) .clear() .setOs(true) .setJvm(true) .setTimeout(request.timeout()) - .execute(nodeStatsListener), - listener::onFailure + .execute(delegate.delegateFailureAndWrap((delegate2, nodesStatsResponse) -> { + TrainedModelCacheInfoAction.Request trainedModelCacheInfoRequest = new TrainedModelCacheInfoAction.Request( + nodesStatsResponse.getNodes().stream().map(NodeStats::getNode).toArray(DiscoveryNode[]::new) + ).timeout(request.timeout()); + + parentTaskClient.execute( + TrainedModelCacheInfoAction.INSTANCE, + trainedModelCacheInfoRequest, + delegate2.delegateFailureAndWrap( + (l, trainedModelCacheInfoResponse) -> handleResponses( + state, + clusterSettings, + nodesStatsResponse, + trainedModelCacheInfoResponse, + l + ) + ) + ); + })) ); // If the memory tracker has never been refreshed, do that first diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/inference/InferencePipelineAggregationBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/inference/InferencePipelineAggregationBuilder.java index cac9d88256696..16a0f85028b85 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/inference/InferencePipelineAggregationBuilder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/inference/InferencePipelineAggregationBuilder.java @@ -289,17 +289,17 @@ public InferencePipelineAggregationBuilder rewrite(QueryRewriteContext context) privRequest.indexPrivileges(new RoleDescriptor.IndicesPrivileges[] {}); privRequest.applicationPrivileges(new RoleDescriptor.ApplicationResourcePrivileges[] {}); - ActionListener privResponseListener = ActionListener.wrap(r -> { + ActionListener privResponseListener = listener.delegateFailureAndWrap((l, r) -> { if (r.isCompleteMatch()) { - modelLoadAction.accept(client, listener); + modelLoadAction.accept(client, l); } else { - listener.onFailure( + l.onFailure( Exceptions.authorizationError( "user [" + username + "] does not have the privilege to get trained models so cannot use ml inference" ) ); } - }, listener::onFailure); + }); client.execute(HasPrivilegesAction.INSTANCE, privRequest, privResponseListener); }); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedContextProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedContextProvider.java index defbc01a89d48..1e4db8aff4559 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedContextProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedContextProvider.java @@ -9,15 +9,11 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; -import org.elasticsearch.xpack.core.ml.datafeed.DatafeedTimingStats; -import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.ml.datafeed.persistence.DatafeedConfigProvider; import org.elasticsearch.xpack.ml.job.persistence.JobConfigProvider; import org.elasticsearch.xpack.ml.job.persistence.JobResultsProvider; -import org.elasticsearch.xpack.ml.job.persistence.RestartTimeInfo; import java.util.Objects; -import java.util.function.Consumer; public class DatafeedContextProvider { @@ -38,27 +34,19 @@ public DatafeedContextProvider( public void buildDatafeedContext(String datafeedId, ActionListener listener) { DatafeedContext.Builder context = DatafeedContext.builder(); - Consumer timingStatsListener = timingStats -> { - context.setTimingStats(timingStats); - listener.onResponse(context.build()); - }; - - ActionListener restartTimeInfoListener = ActionListener.wrap(restartTimeInfo -> { - context.setRestartTimeInfo(restartTimeInfo); - resultsProvider.datafeedTimingStats(context.getJob().getId(), timingStatsListener, listener::onFailure); - }, listener::onFailure); - - ActionListener jobConfigListener = ActionListener.wrap(jobBuilder -> { - context.setJob(jobBuilder.build()); - resultsProvider.getRestartTimeInfo(jobBuilder.getId(), restartTimeInfoListener); - }, listener::onFailure); - - ActionListener datafeedListener = ActionListener.wrap(datafeedConfigBuilder -> { + datafeedConfigProvider.getDatafeedConfig(datafeedId, null, listener.delegateFailureAndWrap((delegate1, datafeedConfigBuilder) -> { DatafeedConfig datafeedConfig = datafeedConfigBuilder.build(); context.setDatafeedConfig(datafeedConfig); - jobConfigProvider.getJob(datafeedConfig.getJobId(), null, jobConfigListener); - }, listener::onFailure); - - datafeedConfigProvider.getDatafeedConfig(datafeedId, null, datafeedListener); + jobConfigProvider.getJob(datafeedConfig.getJobId(), null, delegate1.delegateFailureAndWrap((delegate2, jobBuilder) -> { + context.setJob(jobBuilder.build()); + resultsProvider.getRestartTimeInfo(jobBuilder.getId(), delegate2.delegateFailureAndWrap((delegate3, restartTimeInfo) -> { + context.setRestartTimeInfo(restartTimeInfo); + resultsProvider.datafeedTimingStats(context.getJob().getId(), timingStats -> { + context.setTimingStats(timingStats); + delegate3.onResponse(context.build()); + }, delegate3::onFailure); + })); + })); + })); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java index fbabc9903c4cc..20da61a3d6910 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java @@ -351,14 +351,20 @@ public void onResponse(GetResponse getResponse) { return; } - ActionListener validatedListener = ActionListener.wrap( - ok -> indexUpdatedConfig(updatedConfig, seqNo, primaryTerm, ActionListener.wrap(indexResponse -> { - assert indexResponse.getResult() == DocWriteResponse.Result.UPDATED; - delegate.onResponse(updatedConfig); - }, delegate::onFailure)), - delegate::onFailure + validator.accept( + updatedConfig, + delegate.delegateFailureAndWrap( + (l, ok) -> indexUpdatedConfig( + updatedConfig, + seqNo, + primaryTerm, + l.delegateFailureAndWrap((ll, indexResponse) -> { + assert indexResponse.getResult() == DocWriteResponse.Result.UPDATED; + ll.onResponse(updatedConfig); + }) + ) + ) ); - validator.accept(updatedConfig, validatedListener); } } ); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsManager.java index d370e8af52549..203474a3c9d0a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsManager.java @@ -169,29 +169,21 @@ public void execute(DataFrameAnalyticsTask task, ClusterState clusterState, Time }, task::setFailed); - // Retrieve configuration - ActionListener statsIndexListener = configListener.delegateFailureAndWrap( - (l, aBoolean) -> configProvider.get(task.getParams().getId(), l) - ); - - // Make sure the stats index and alias exist - ActionListener stateAliasListener = ActionListener.wrap( - aBoolean -> createStatsIndexAndUpdateMappingsIfNecessary( - new ParentTaskAssigningClient(client, task.getParentTaskId()), - clusterState, - masterNodeTimeout, - statsIndexListener - ), - configListener::onFailure - ); - // Make sure the state index and alias exist AnomalyDetectorsIndex.createStateIndexAndAliasIfNecessaryAndWaitForYellow( new ParentTaskAssigningClient(client, task.getParentTaskId()), clusterState, expressionResolver, masterNodeTimeout, - stateAliasListener + configListener.delegateFailureAndWrap( + (delegate, aBoolean) -> createStatsIndexAndUpdateMappingsIfNecessary( + new ParentTaskAssigningClient(client, task.getParentTaskId()), + clusterState, + masterNodeTimeout, + // Retrieve configuration + delegate.delegateFailureAndWrap((l, ignored) -> configProvider.get(task.getParams().getId(), l)) + ) + ) ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndex.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndex.java index 280984feab4d4..ed12f54ab86b5 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndex.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndex.java @@ -46,7 +46,6 @@ import java.util.List; import java.util.Map; import java.util.Set; -import java.util.concurrent.atomic.AtomicReference; import java.util.function.Supplier; import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; @@ -110,18 +109,22 @@ public static void createDestinationIndex( String[] destIndexAllowedSettings, ActionListener listener ) { - ActionListener createIndexRequestListener = ActionListener.wrap(createIndexRequest -> { - ClientHelper.executeWithHeadersAsync( - analyticsConfig.getHeaders(), - ClientHelper.ML_ORIGIN, - client, - TransportCreateIndexAction.TYPE, - createIndexRequest, - listener - ); - }, listener::onFailure); - - prepareCreateIndexRequest(client, clock, analyticsConfig, destIndexAllowedSettings, createIndexRequestListener); + prepareCreateIndexRequest( + client, + clock, + analyticsConfig, + destIndexAllowedSettings, + listener.delegateFailureAndWrap( + (l, createIndexRequest) -> ClientHelper.executeWithHeadersAsync( + analyticsConfig.getHeaders(), + ClientHelper.ML_ORIGIN, + client, + TransportCreateIndexAction.TYPE, + createIndexRequest, + l + ) + ) + ); } private static void prepareCreateIndexRequest( @@ -131,30 +134,6 @@ private static void prepareCreateIndexRequest( String[] destIndexAllowedSettings, ActionListener listener ) { - AtomicReference settingsHolder = new AtomicReference<>(); - AtomicReference mappingsHolder = new AtomicReference<>(); - - ActionListener fieldCapabilitiesListener = listener.delegateFailureAndWrap( - (l, fieldCapabilitiesResponse) -> l.onResponse( - createIndexRequest(clock, config, settingsHolder.get(), mappingsHolder.get(), fieldCapabilitiesResponse) - ) - ); - - ActionListener mappingsListener = ActionListener.wrap(mappings -> { - mappingsHolder.set(mappings); - getFieldCapsForRequiredFields(client, config, fieldCapabilitiesListener); - }, listener::onFailure); - - ActionListener settingsListener = ActionListener.wrap(settings -> { - settingsHolder.set(settings); - MappingsMerger.mergeMappings(client, config.getHeaders(), config.getSource(), mappingsListener); - }, listener::onFailure); - - ActionListener getSettingsResponseListener = ActionListener.wrap( - settingsResponse -> settingsListener.onResponse(settings(settingsResponse, destIndexAllowedSettings)), - listener::onFailure - ); - GetSettingsRequest getSettingsRequest = new GetSettingsRequest().indices(config.getSource().getIndex()) .indicesOptions(IndicesOptions.lenientExpandOpen()) .names(PRESERVED_SETTINGS); @@ -164,7 +143,25 @@ private static void prepareCreateIndexRequest( client, GetSettingsAction.INSTANCE, getSettingsRequest, - getSettingsResponseListener + listener.delegateFailureAndWrap((delegate, settingsResponse) -> { + final Settings settings = settings(settingsResponse, destIndexAllowedSettings); + MappingsMerger.mergeMappings( + client, + config.getHeaders(), + config.getSource(), + delegate.delegateFailureAndWrap( + (l, mappings) -> getFieldCapsForRequiredFields( + client, + config, + l.delegateFailureAndWrap( + (ll, fieldCapabilitiesResponse) -> ll.onResponse( + createIndexRequest(clock, config, settings, mappings, fieldCapabilitiesResponse) + ) + ) + ) + ) + ); + }) ); } @@ -355,7 +352,7 @@ public static void updateMappingsToDestIndex( // Verify that the results field does not exist in the dest index checkResultsFieldIsNotPresentInProperties(config, destPropertiesAsMap); - ActionListener fieldCapabilitiesListener = ActionListener.wrap(fieldCapabilitiesResponse -> { + getFieldCapsForRequiredFields(client, config, listener.delegateFailureAndWrap((delegate, fieldCapabilitiesResponse) -> { Map addedMappings = new HashMap<>(); // Determine mappings to be added to the destination index @@ -374,11 +371,9 @@ public static void updateMappingsToDestIndex( client, TransportPutMappingAction.TYPE, putMappingRequest, - listener + delegate ); - }, listener::onFailure); - - getFieldCapsForRequiredFields(client, config, fieldCapabilitiesListener); + })); } private static void checkResultsFieldIsNotPresentInProperties(DataFrameAnalyticsConfig config, Map properties) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/persistence/DataFrameAnalyticsConfigProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/persistence/DataFrameAnalyticsConfigProvider.java index 8c7d490f37787..47071c80b90ee 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/persistence/DataFrameAnalyticsConfigProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/persistence/DataFrameAnalyticsConfigProvider.java @@ -142,7 +142,7 @@ private void exists(String jobId, ActionListener listener) { private void deleteLeftOverDocs(DataFrameAnalyticsConfig config, TimeValue timeout, ActionListener listener) { DataFrameAnalyticsDeleter deleter = new DataFrameAnalyticsDeleter(client, auditor); - deleter.deleteAllDocuments(config, timeout, ActionListener.wrap(r -> listener.onResponse(r), e -> { + deleter.deleteAllDocuments(config, timeout, ActionListener.wrap(listener::onResponse, e -> { if (ExceptionsHelper.unwrapCause(e) instanceof ResourceNotFoundException) { // This is expected listener.onResponse(AcknowledgedResponse.TRUE); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java index 17b931d971188..9187969fc25a4 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java @@ -424,7 +424,7 @@ public void clearCache(TrainedModelDeploymentTask task, TimeValue timeout, Actio timeout, processContext, threadPool, - ActionListener.wrap(b -> listener.onResponse(AcknowledgedResponse.TRUE), listener::onFailure) + listener.delegateFailureAndWrap((l, b) -> l.onResponse(AcknowledgedResponse.TRUE)) ); executePyTorchAction(processContext, PriorityProcessWorkerExecutorService.RequestPriority.HIGHEST, controlMessageAction); @@ -533,18 +533,18 @@ synchronized void startAndLoad(TrainedModelLocation modelLocation, ActionListene startTime = Instant.now(); logger.debug("[{}] process started", task.getDeploymentId()); try { - loadModel(modelLocation, ActionListener.wrap(success -> { + loadModel(modelLocation, loadedListener.delegateFailureAndWrap((delegate, success) -> { if (isStopped) { logger.debug("[{}] model loaded but process is stopped", task.getDeploymentId()); killProcessIfPresent(); - loadedListener.onFailure(new IllegalStateException("model loaded but process is stopped")); + delegate.onFailure(new IllegalStateException("model loaded but process is stopped")); return; } logger.debug("[{}] model loaded, starting priority process worker thread", task.getDeploymentId()); startPriorityProcessWorker(); - loadedListener.onResponse(success); - }, loadedListener::onFailure)); + delegate.onResponse(success); + })); } catch (Exception e) { loadedListener.onFailure(e); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredAnnotationsRemover.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredAnnotationsRemover.java index 917d5881ae130..050d01198b910 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredAnnotationsRemover.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredAnnotationsRemover.java @@ -131,18 +131,18 @@ private static DeleteByQueryRequest createDBQRequest(Job job, float requestsPerS @Override void calcCutoffEpochMs(String jobId, long retentionDays, ActionListener listener) { - ThreadedActionListener threadedActionListener = new ThreadedActionListener<>( - threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME), - listener - ); - latestBucketTime(client, getParentTaskId(), jobId, ActionListener.wrap(latestTime -> { + latestBucketTime(client, getParentTaskId(), jobId, listener.delegateFailureAndWrap((l, latestTime) -> { + ThreadedActionListener threadedActionListener = new ThreadedActionListener<>( + threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME), + l + ); if (latestTime == null) { threadedActionListener.onResponse(null); } else { long cutoff = latestTime - new TimeValue(retentionDays, TimeUnit.DAYS).getMillis(); threadedActionListener.onResponse(new CutoffDetails(latestTime, cutoff)); } - }, listener::onFailure)); + })); } private void auditAnnotationsWereDeleted(String jobId, long cutoffEpochMs) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java index 677e71b304cb9..886c19a65a4d0 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java @@ -77,15 +77,9 @@ public void remove(float requestsPerSec, ActionListener listener, Boole LOGGER.debug("Removing forecasts that expire before [{}]", cutoffEpochMs); ActionListener forecastStatsHandler = ActionListener.wrap( searchResponse -> deleteForecasts(searchResponse, requestsPerSec, listener, isTimedOutSupplier), - e -> { - listener.onFailure( - new ElasticsearchStatusException( - "An error occurred while searching forecasts to delete", - RestStatus.TOO_MANY_REQUESTS, - e - ) - ); - } + e -> listener.onFailure( + new ElasticsearchStatusException("An error occurred while searching forecasts to delete", RestStatus.TOO_MANY_REQUESTS, e) + ) ); SearchSourceBuilder source = new SearchSourceBuilder(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredModelSnapshotsRemover.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredModelSnapshotsRemover.java index 27bd3c926d944..cbd505c293c86 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredModelSnapshotsRemover.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredModelSnapshotsRemover.java @@ -100,19 +100,18 @@ Long getRetentionDays(Job job) { @Override void calcCutoffEpochMs(String jobId, long retentionDays, ActionListener listener) { - ThreadedActionListener threadedActionListener = new ThreadedActionListener<>( - threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME), - listener - ); - - latestSnapshotTimeStamp(jobId, ActionListener.wrap(latestTime -> { + latestSnapshotTimeStamp(jobId, listener.delegateFailureAndWrap((l, latestTime) -> { + ThreadedActionListener threadedActionListener = new ThreadedActionListener<>( + threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME), + l + ); if (latestTime == null) { threadedActionListener.onResponse(null); } else { long cutoff = latestTime - new TimeValue(retentionDays, TimeUnit.DAYS).getMillis(); threadedActionListener.onResponse(new CutoffDetails(latestTime, cutoff)); } - }, listener::onFailure)); + })); } private void latestSnapshotTimeStamp(String jobId, ActionListener listener) { @@ -135,22 +134,22 @@ private void latestSnapshotTimeStamp(String jobId, ActionListener listener searchRequest.indicesOptions(MlIndicesUtils.addIgnoreUnavailable(SearchRequest.DEFAULT_INDICES_OPTIONS)); searchRequest.setParentTask(getParentTaskId()); - client.search(searchRequest, ActionListener.wrap(response -> { + client.search(searchRequest, listener.delegateFailureAndWrap((delegate, response) -> { SearchHit[] hits = response.getHits().getHits(); if (hits.length == 0) { // no snapshots found - listener.onResponse(null); + delegate.onResponse(null); } else { String timestamp = stringFieldValueOrNull(hits[0], ModelSnapshot.TIMESTAMP.getPreferredName()); if (timestamp == null) { LOGGER.warn("Model snapshot document [{}] has a null timestamp field", hits[0].getId()); - listener.onResponse(null); + delegate.onResponse(null); } else { long timestampMs = TimeUtils.parseToEpochMs(timestamp); - listener.onResponse(timestampMs); + delegate.onResponse(timestampMs); } } - }, listener::onFailure)); + })); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemover.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemover.java index 35e16b9fa8b88..be0bb53d454fe 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemover.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemover.java @@ -165,18 +165,18 @@ private static DeleteByQueryRequest createDBQRequest(Job job, float requestsPerS @Override void calcCutoffEpochMs(String jobId, long retentionDays, ActionListener listener) { - ThreadedActionListener threadedActionListener = new ThreadedActionListener<>( - threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME), - listener - ); - latestBucketTime(client, getParentTaskId(), jobId, ActionListener.wrap(latestTime -> { + latestBucketTime(client, getParentTaskId(), jobId, listener.delegateFailureAndWrap((l, latestTime) -> { + ThreadedActionListener threadedActionListener = new ThreadedActionListener<>( + threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME), + l + ); if (latestTime == null) { threadedActionListener.onResponse(null); } else { long cutoff = latestTime - new TimeValue(retentionDays, TimeUnit.DAYS).getMillis(); threadedActionListener.onResponse(new CutoffDetails(latestTime, cutoff)); } - }, listener::onFailure)); + })); } static void latestBucketTime(OriginSettingClient client, TaskId parentTaskId, String jobId, ActionListener listener) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/MlMemoryTracker.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/MlMemoryTracker.java index 3f502c4d95cc9..d1f1d0d506c85 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/MlMemoryTracker.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/MlMemoryTracker.java @@ -366,7 +366,7 @@ public void refreshAnomalyDetectorJobMemoryAndAllOthers(String jobId, ActionList refresh( clusterService.state().getMetadata().custom(PersistentTasksCustomMetadata.TYPE), Collections.singleton(jobId), - ActionListener.wrap(aVoid -> refreshAnomalyDetectorJobMemory(jobId, listener), listener::onFailure) + listener.delegateFailureAndWrap((l, aVoid) -> refreshAnomalyDetectorJobMemory(jobId, l)) ); } @@ -503,15 +503,15 @@ private void refreshAllDataFrameAnalyticsJobTasks( .map(task -> ((StartDataFrameAnalyticsAction.TaskParams) task.getParams()).getId()) .collect(Collectors.toSet()); - configProvider.getConfigsForJobsWithTasksLeniently(jobsWithTasks, ActionListener.wrap(analyticsConfigs -> { + configProvider.getConfigsForJobsWithTasksLeniently(jobsWithTasks, listener.delegateFailureAndWrap((delegate, analyticsConfigs) -> { for (DataFrameAnalyticsConfig analyticsConfig : analyticsConfigs) { memoryRequirementByDataFrameAnalyticsJob.put( analyticsConfig.getId(), analyticsConfig.getModelMemoryLimit().getBytes() + DataFrameAnalyticsConfig.PROCESS_MEMORY_OVERHEAD.getBytes() ); } - listener.onResponse(null); - }, listener::onFailure)); + delegate.onResponse(null); + })); } /** diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredAnnotationsRemoverTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredAnnotationsRemoverTests.java index 39f1ead7e24e0..59e0093abfba9 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredAnnotationsRemoverTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredAnnotationsRemoverTests.java @@ -144,6 +144,7 @@ public void testCalcCutoffEpochMs() { List jobs = Collections.singletonList(JobTests.buildJobBuilder(jobId).setResultsRetentionDays(1L).build()); ActionListener cutoffListener = mock(ActionListener.class); + when(cutoffListener.delegateFailureAndWrap(any())).thenCallRealMethod(); createExpiredAnnotationsRemover(jobs.iterator()).calcCutoffEpochMs(jobId, 1L, cutoffListener); long dayInMills = 60 * 60 * 24 * 1000; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredModelSnapshotsRemoverTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredModelSnapshotsRemoverTests.java index 5b3168a425029..98dc3bf3ea84b 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredModelSnapshotsRemoverTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredModelSnapshotsRemoverTests.java @@ -271,6 +271,7 @@ public void testCalcCutoffEpochMs() { long retentionDays = 3L; ActionListener cutoffListener = mock(ActionListener.class); + when(cutoffListener.delegateFailureAndWrap(any())).thenCallRealMethod(); createExpiredModelSnapshotsRemover(Collections.emptyIterator()).calcCutoffEpochMs("job-1", retentionDays, cutoffListener); long dayInMills = 60 * 60 * 24 * 1000; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemoverTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemoverTests.java index 4dbb4eda07b0a..9a768b7f635bd 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemoverTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemoverTests.java @@ -144,6 +144,7 @@ public void testCalcCutoffEpochMs() { List jobs = Collections.singletonList(JobTests.buildJobBuilder(jobId).setResultsRetentionDays(1L).build()); ActionListener cutoffListener = mock(ActionListener.class); + when(cutoffListener.delegateFailureAndWrap(any())).thenCallRealMethod(); createExpiredResultsRemover(jobs.iterator()).calcCutoffEpochMs(jobId, 1L, cutoffListener); long dayInMills = 60 * 60 * 24 * 1000; From 4ddb6406cdbe613ca44cea200675ef0ac2407186 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Tue, 2 Apr 2024 02:41:22 +0200 Subject: [PATCH 043/264] Remove o.e.painless.toxcontent.UserTreeToXContent (#106935) This was never used -> remove it and its test. --- .../toxcontent/UserTreeToXContent.java | 685 ------------------ .../painless/ToXContentTests.java | 124 ---- 2 files changed, 809 deletions(-) delete mode 100644 modules/lang-painless/src/main/java/org/elasticsearch/painless/toxcontent/UserTreeToXContent.java delete mode 100644 modules/lang-painless/src/test/java/org/elasticsearch/painless/ToXContentTests.java diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/toxcontent/UserTreeToXContent.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/toxcontent/UserTreeToXContent.java deleted file mode 100644 index 2756419e68e16..0000000000000 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/toxcontent/UserTreeToXContent.java +++ /dev/null @@ -1,685 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.painless.toxcontent; - -import org.elasticsearch.painless.Operation; -import org.elasticsearch.painless.node.AExpression; -import org.elasticsearch.painless.node.ANode; -import org.elasticsearch.painless.node.EAssignment; -import org.elasticsearch.painless.node.EBinary; -import org.elasticsearch.painless.node.EBooleanComp; -import org.elasticsearch.painless.node.EBooleanConstant; -import org.elasticsearch.painless.node.EBrace; -import org.elasticsearch.painless.node.ECall; -import org.elasticsearch.painless.node.ECallLocal; -import org.elasticsearch.painless.node.EComp; -import org.elasticsearch.painless.node.EConditional; -import org.elasticsearch.painless.node.EDecimal; -import org.elasticsearch.painless.node.EDot; -import org.elasticsearch.painless.node.EElvis; -import org.elasticsearch.painless.node.EExplicit; -import org.elasticsearch.painless.node.EFunctionRef; -import org.elasticsearch.painless.node.EInstanceof; -import org.elasticsearch.painless.node.ELambda; -import org.elasticsearch.painless.node.EListInit; -import org.elasticsearch.painless.node.EMapInit; -import org.elasticsearch.painless.node.ENewArray; -import org.elasticsearch.painless.node.ENewArrayFunctionRef; -import org.elasticsearch.painless.node.ENewObj; -import org.elasticsearch.painless.node.ENull; -import org.elasticsearch.painless.node.ENumeric; -import org.elasticsearch.painless.node.ERegex; -import org.elasticsearch.painless.node.EString; -import org.elasticsearch.painless.node.ESymbol; -import org.elasticsearch.painless.node.EUnary; -import org.elasticsearch.painless.node.SBlock; -import org.elasticsearch.painless.node.SBreak; -import org.elasticsearch.painless.node.SCatch; -import org.elasticsearch.painless.node.SClass; -import org.elasticsearch.painless.node.SContinue; -import org.elasticsearch.painless.node.SDeclBlock; -import org.elasticsearch.painless.node.SDeclaration; -import org.elasticsearch.painless.node.SDo; -import org.elasticsearch.painless.node.SEach; -import org.elasticsearch.painless.node.SExpression; -import org.elasticsearch.painless.node.SFor; -import org.elasticsearch.painless.node.SFunction; -import org.elasticsearch.painless.node.SIf; -import org.elasticsearch.painless.node.SIfElse; -import org.elasticsearch.painless.node.SReturn; -import org.elasticsearch.painless.node.SThrow; -import org.elasticsearch.painless.node.STry; -import org.elasticsearch.painless.node.SWhile; -import org.elasticsearch.painless.phase.UserTreeBaseVisitor; -import org.elasticsearch.painless.symbol.Decorator.Condition; -import org.elasticsearch.painless.symbol.Decorator.Decoration; -import org.elasticsearch.painless.symbol.ScriptScope; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.util.Comparator; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Set; -import java.util.stream.Collectors; - -/** - * Serialize the user tree - */ -public class UserTreeToXContent extends UserTreeBaseVisitor { - public final XContentBuilderWrapper builder; - - public UserTreeToXContent(XContentBuilder builder) { - this.builder = new XContentBuilderWrapper(Objects.requireNonNull(builder)); - } - - public UserTreeToXContent() { - this.builder = new XContentBuilderWrapper(); - } - - static final class Fields { - static final String NODE = "node"; - static final String LOCATION = "location"; - static final String LEFT = "left"; - static final String RIGHT = "right"; - static final String BLOCK = "block"; - static final String CONDITION = "condition"; - static final String TYPE = "type"; - static final String SYMBOL = "symbol"; - static final String DECORATIONS = "decorations"; - static final String CONDITIONS = "conditions"; - } - - @Override - public void visitClass(SClass userClassNode, ScriptScope scope) { - start(userClassNode); - - builder.field("source", scope.getScriptSource()); - builder.startArray("functions"); - userClassNode.visitChildren(this, scope); - builder.endArray(); - - end(userClassNode, scope); - } - - @Override - public void visitFunction(SFunction userFunctionNode, ScriptScope scope) { - start(userFunctionNode); - - builder.field("name", userFunctionNode.getFunctionName()); - builder.field("returns", userFunctionNode.getReturnCanonicalTypeName()); - if (userFunctionNode.getParameterNames().isEmpty() == false) { - builder.field("parameters", userFunctionNode.getParameterNames()); - } - if (userFunctionNode.getCanonicalTypeNameParameters().isEmpty() == false) { - builder.field("parameterTypes", userFunctionNode.getCanonicalTypeNameParameters()); - } - builder.field("isInternal", userFunctionNode.isInternal()); - builder.field("isStatic", userFunctionNode.isStatic()); - builder.field("isSynthetic", userFunctionNode.isSynthetic()); - builder.field("isAutoReturnEnabled", userFunctionNode.isAutoReturnEnabled()); - - builder.startArray(Fields.BLOCK); - userFunctionNode.visitChildren(this, scope); - builder.endArray(); - - end(userFunctionNode, scope); - } - - @Override - public void visitBlock(SBlock userBlockNode, ScriptScope scope) { - start(userBlockNode); - - builder.startArray("statements"); - userBlockNode.visitChildren(this, scope); - builder.endArray(); - - end(userBlockNode, scope); - } - - @Override - public void visitIf(SIf userIfNode, ScriptScope scope) { - start(userIfNode); - - builder.startArray(Fields.CONDITION); - userIfNode.getConditionNode().visit(this, scope); - builder.endArray(); - - block("ifBlock", userIfNode.getIfBlockNode(), scope); - - end(userIfNode, scope); - } - - @Override - public void visitIfElse(SIfElse userIfElseNode, ScriptScope scope) { - start(userIfElseNode); - - builder.startArray(Fields.CONDITION); - userIfElseNode.getConditionNode().visit(this, scope); - builder.endArray(); - - block("ifBlock", userIfElseNode.getIfBlockNode(), scope); - block("elseBlock", userIfElseNode.getElseBlockNode(), scope); - - end(userIfElseNode, scope); - } - - @Override - public void visitWhile(SWhile userWhileNode, ScriptScope scope) { - start(userWhileNode); - loop(userWhileNode.getConditionNode(), userWhileNode.getBlockNode(), scope); - end(userWhileNode, scope); - } - - @Override - public void visitDo(SDo userDoNode, ScriptScope scope) { - start(userDoNode); - loop(userDoNode.getConditionNode(), userDoNode.getBlockNode(), scope); - end(userDoNode, scope); - } - - @Override - public void visitFor(SFor userForNode, ScriptScope scope) { - start(userForNode); - - ANode initializerNode = userForNode.getInitializerNode(); - builder.startArray("initializer"); - if (initializerNode != null) { - initializerNode.visit(this, scope); - } - builder.endArray(); - - builder.startArray("condition"); - AExpression conditionNode = userForNode.getConditionNode(); - if (conditionNode != null) { - conditionNode.visit(this, scope); - } - builder.endArray(); - - builder.startArray("afterthought"); - AExpression afterthoughtNode = userForNode.getAfterthoughtNode(); - if (afterthoughtNode != null) { - afterthoughtNode.visit(this, scope); - } - builder.endArray(); - - block(userForNode.getBlockNode(), scope); - - end(userForNode, scope); - } - - @Override - public void visitEach(SEach userEachNode, ScriptScope scope) { - start(userEachNode); - - builder.field(Fields.TYPE, userEachNode.getCanonicalTypeName()); - builder.field(Fields.SYMBOL, userEachNode.getSymbol()); - - builder.startArray("iterable"); - userEachNode.getIterableNode().visitChildren(this, scope); - builder.endArray(); - - block(userEachNode.getBlockNode(), scope); - - end(userEachNode, scope); - } - - @Override - public void visitDeclBlock(SDeclBlock userDeclBlockNode, ScriptScope scope) { - start(userDeclBlockNode); - - builder.startArray("declarations"); - userDeclBlockNode.visitChildren(this, scope); - builder.endArray(); - - end(userDeclBlockNode, scope); - } - - @Override - public void visitDeclaration(SDeclaration userDeclarationNode, ScriptScope scope) { - start(userDeclarationNode); - - builder.field(Fields.TYPE, userDeclarationNode.getCanonicalTypeName()); - builder.field(Fields.SYMBOL, userDeclarationNode.getSymbol()); - - builder.startArray("value"); - userDeclarationNode.visitChildren(this, scope); - builder.endArray(); - - end(userDeclarationNode, scope); - } - - @Override - public void visitReturn(SReturn userReturnNode, ScriptScope scope) { - start(userReturnNode); - - builder.startArray("value"); - userReturnNode.visitChildren(this, scope); - builder.endArray(); - - end(userReturnNode, scope); - } - - @Override - public void visitExpression(SExpression userExpressionNode, ScriptScope scope) { - start(userExpressionNode); - - builder.startArray("statement"); - userExpressionNode.visitChildren(this, scope); - builder.endArray(); - - end(userExpressionNode, scope); - } - - @Override - public void visitTry(STry userTryNode, ScriptScope scope) { - start(userTryNode); - - block(userTryNode.getBlockNode(), scope); - - builder.startArray("catch"); - for (SCatch catchNode : userTryNode.getCatchNodes()) { - catchNode.visit(this, scope); - } - builder.endArray(); - - end(userTryNode, scope); - } - - @Override - public void visitCatch(SCatch userCatchNode, ScriptScope scope) { - start(userCatchNode); - - builder.field("exception", userCatchNode.getBaseException()); - builder.field(Fields.TYPE, userCatchNode.getCanonicalTypeName()); - builder.field(Fields.SYMBOL, userCatchNode.getSymbol()); - - builder.startArray(Fields.BLOCK); - userCatchNode.visitChildren(this, scope); - builder.endArray(); - - end(userCatchNode, scope); - } - - @Override - public void visitThrow(SThrow userThrowNode, ScriptScope scope) { - start(userThrowNode); - - builder.startArray("expression"); - userThrowNode.visitChildren(this, scope); - builder.endArray(); - - end(userThrowNode, scope); - } - - @Override - public void visitContinue(SContinue userContinueNode, ScriptScope scope) { - start(userContinueNode); - end(userContinueNode, scope); - } - - @Override - public void visitBreak(SBreak userBreakNode, ScriptScope scope) { - start(userBreakNode); - end(userBreakNode, scope); - } - - @Override - public void visitAssignment(EAssignment userAssignmentNode, ScriptScope scope) { - start(userAssignmentNode); - // TODO(stu): why would operation be null? - builder.field("postIfRead", userAssignmentNode.postIfRead()); - binaryOperation(userAssignmentNode.getOperation(), userAssignmentNode.getLeftNode(), userAssignmentNode.getRightNode(), scope); - end(userAssignmentNode, scope); - } - - @Override - public void visitUnary(EUnary userUnaryNode, ScriptScope scope) { - start(userUnaryNode); - - operation(userUnaryNode.getOperation()); - - builder.startArray("child"); - userUnaryNode.visitChildren(this, scope); - builder.endArray(); - - end(userUnaryNode, scope); - } - - @Override - public void visitBinary(EBinary userBinaryNode, ScriptScope scope) { - start(userBinaryNode); - binaryOperation(userBinaryNode.getOperation(), userBinaryNode.getLeftNode(), userBinaryNode.getRightNode(), scope); - end(userBinaryNode, scope); - } - - @Override - public void visitBooleanComp(EBooleanComp userBooleanCompNode, ScriptScope scope) { - start(userBooleanCompNode); - binaryOperation(userBooleanCompNode.getOperation(), userBooleanCompNode.getLeftNode(), userBooleanCompNode.getRightNode(), scope); - end(userBooleanCompNode, scope); - } - - @Override - public void visitComp(EComp userCompNode, ScriptScope scope) { - start(userCompNode); - binaryOperation(userCompNode.getOperation(), userCompNode.getLeftNode(), userCompNode.getRightNode(), scope); - end(userCompNode, scope); - } - - @Override - public void visitExplicit(EExplicit userExplicitNode, ScriptScope scope) { - start(userExplicitNode); - - builder.field(Fields.TYPE, userExplicitNode.getCanonicalTypeName()); - builder.startArray("child"); - userExplicitNode.visitChildren(this, scope); - builder.endArray(); - - end(userExplicitNode, scope); - } - - @Override - public void visitInstanceof(EInstanceof userInstanceofNode, ScriptScope scope) { - start(userInstanceofNode); - - builder.field(Fields.TYPE, userInstanceofNode.getCanonicalTypeName()); - builder.startArray("child"); - userInstanceofNode.visitChildren(this, scope); - builder.endArray(); - - end(userInstanceofNode, scope); - } - - @Override - public void visitConditional(EConditional userConditionalNode, ScriptScope scope) { - start(userConditionalNode); - - builder.startArray("condition"); - userConditionalNode.getConditionNode().visit(this, scope); - builder.endArray(); - - builder.startArray("true"); - userConditionalNode.getTrueNode().visit(this, scope); - builder.endArray(); - - builder.startArray("false"); - userConditionalNode.getFalseNode().visit(this, scope); - builder.endArray(); - - end(userConditionalNode, scope); - } - - @Override - public void visitElvis(EElvis userElvisNode, ScriptScope scope) { - start(userElvisNode); - - builder.startArray(Fields.LEFT); - userElvisNode.getLeftNode().visit(this, scope); - builder.endArray(); - - builder.startArray(Fields.RIGHT); - userElvisNode.getRightNode().visit(this, scope); - builder.endArray(); - - end(userElvisNode, scope); - } - - @Override - public void visitListInit(EListInit userListInitNode, ScriptScope scope) { - start(userListInitNode); - builder.startArray("values"); - userListInitNode.visitChildren(this, scope); - builder.endArray(); - end(userListInitNode, scope); - } - - @Override - public void visitMapInit(EMapInit userMapInitNode, ScriptScope scope) { - start(userMapInitNode); - expressions("keys", userMapInitNode.getKeyNodes(), scope); - expressions("values", userMapInitNode.getValueNodes(), scope); - end(userMapInitNode, scope); - } - - @Override - public void visitNewArray(ENewArray userNewArrayNode, ScriptScope scope) { - start(userNewArrayNode); - builder.field(Fields.TYPE, userNewArrayNode.getCanonicalTypeName()); - builder.field("isInitializer", userNewArrayNode.isInitializer()); - expressions("values", userNewArrayNode.getValueNodes(), scope); - end(userNewArrayNode, scope); - } - - @Override - public void visitNewObj(ENewObj userNewObjNode, ScriptScope scope) { - start(userNewObjNode); - builder.field(Fields.TYPE, userNewObjNode.getCanonicalTypeName()); - arguments(userNewObjNode.getArgumentNodes(), scope); - end(userNewObjNode, scope); - } - - @Override - public void visitCallLocal(ECallLocal userCallLocalNode, ScriptScope scope) { - start(userCallLocalNode); - builder.field("methodName", userCallLocalNode.getMethodName()); - arguments(userCallLocalNode.getArgumentNodes(), scope); - end(userCallLocalNode, scope); - } - - @Override - public void visitBooleanConstant(EBooleanConstant userBooleanConstantNode, ScriptScope scope) { - start(userBooleanConstantNode); - builder.field("value", userBooleanConstantNode.getBool()); - end(userBooleanConstantNode, scope); - } - - @Override - public void visitNumeric(ENumeric userNumericNode, ScriptScope scope) { - start(userNumericNode); - builder.field("numeric", userNumericNode.getNumeric()); - builder.field("radix", userNumericNode.getRadix()); - end(userNumericNode, scope); - } - - @Override - public void visitDecimal(EDecimal userDecimalNode, ScriptScope scope) { - start(userDecimalNode); - builder.field("value", userDecimalNode.getDecimal()); - end(userDecimalNode, scope); - } - - @Override - public void visitString(EString userStringNode, ScriptScope scope) { - start(userStringNode); - builder.field("value", userStringNode.getString()); - end(userStringNode, scope); - } - - @Override - public void visitNull(ENull userNullNode, ScriptScope scope) { - start(userNullNode); - end(userNullNode, scope); - } - - @Override - public void visitRegex(ERegex userRegexNode, ScriptScope scope) { - start(userRegexNode); - builder.field("pattern", userRegexNode.getPattern()); - builder.field("flags", userRegexNode.getFlags()); - end(userRegexNode, scope); - } - - @Override - public void visitLambda(ELambda userLambdaNode, ScriptScope scope) { - start(userLambdaNode); - builder.field("types", userLambdaNode.getCanonicalTypeNameParameters()); - builder.field("parameters", userLambdaNode.getParameterNames()); - block(userLambdaNode.getBlockNode(), scope); - end(userLambdaNode, scope); - } - - @Override - public void visitFunctionRef(EFunctionRef userFunctionRefNode, ScriptScope scope) { - start(userFunctionRefNode); - builder.field(Fields.SYMBOL, userFunctionRefNode.getSymbol()); - builder.field("methodName", userFunctionRefNode.getMethodName()); - end(userFunctionRefNode, scope); - } - - @Override - public void visitNewArrayFunctionRef(ENewArrayFunctionRef userNewArrayFunctionRefNode, ScriptScope scope) { - start(userNewArrayFunctionRefNode); - builder.field(Fields.TYPE, userNewArrayFunctionRefNode.getCanonicalTypeName()); - end(userNewArrayFunctionRefNode, scope); - } - - @Override - public void visitSymbol(ESymbol userSymbolNode, ScriptScope scope) { - start(userSymbolNode); - builder.field(Fields.SYMBOL, userSymbolNode.getSymbol()); - end(userSymbolNode, scope); - } - - @Override - public void visitDot(EDot userDotNode, ScriptScope scope) { - start(userDotNode); - - builder.startArray("prefix"); - userDotNode.visitChildren(this, scope); - builder.endArray(); - - builder.field("index", userDotNode.getIndex()); - builder.field("nullSafe", userDotNode.isNullSafe()); - - end(userDotNode, scope); - } - - @Override - public void visitBrace(EBrace userBraceNode, ScriptScope scope) { - start(userBraceNode); - - builder.startArray("prefix"); - userBraceNode.getPrefixNode().visit(this, scope); - builder.endArray(); - - builder.startArray("index"); - userBraceNode.getIndexNode().visit(this, scope); - builder.endArray(); - - end(userBraceNode, scope); - } - - @Override - public void visitCall(ECall userCallNode, ScriptScope scope) { - start(userCallNode); - - builder.startArray("prefix"); - userCallNode.getPrefixNode().visitChildren(this, scope); - builder.endArray(); - - builder.field("isNullSafe", userCallNode.isNullSafe()); - builder.field("methodName", userCallNode.getMethodName()); - - arguments(userCallNode.getArgumentNodes(), scope); - - end(userCallNode, scope); - } - - private void start(ANode node) { - builder.startObject(); - builder.field(Fields.NODE, node.getClass().getSimpleName()); - builder.field(Fields.LOCATION, node.getLocation().getOffset()); - } - - private void end(ANode node, ScriptScope scope) { - decorations(node, scope); - builder.endObject(); - } - - private void block(String name, SBlock block, ScriptScope scope) { - builder.startArray(name); - if (block != null) { - block.visit(this, scope); - } - builder.endArray(); - } - - private void block(SBlock block, ScriptScope scope) { - block(Fields.BLOCK, block, scope); - } - - private void loop(AExpression condition, SBlock block, ScriptScope scope) { - builder.startArray(Fields.CONDITION); - condition.visit(this, scope); - builder.endArray(); - - block(block, scope); - } - - private void operation(Operation op) { - builder.startObject("operation"); - if (op != null) { - builder.field(Fields.SYMBOL, op.symbol); - builder.field("name", op.name); - } - builder.endObject(); - } - - private void binaryOperation(Operation op, AExpression left, AExpression right, ScriptScope scope) { - operation(op); - - builder.startArray(Fields.LEFT); - left.visit(this, scope); - builder.endArray(); - - builder.startArray(Fields.RIGHT); - right.visit(this, scope); - builder.endArray(); - } - - private void arguments(List arguments, ScriptScope scope) { - if (arguments.isEmpty() == false) { - expressions("arguments", arguments, scope); - } - } - - private void expressions(String name, List expressions, ScriptScope scope) { - if (expressions.isEmpty() == false) { - builder.startArray(name); - for (AExpression expression : expressions) { - expression.visit(this, scope); - } - builder.endArray(); - } - } - - private void decorations(ANode node, ScriptScope scope) { - Set> conditions = scope.getAllConditions(node.getIdentifier()); - if (conditions.isEmpty() == false) { - builder.field(Fields.CONDITIONS, conditions.stream().map(Class::getSimpleName).sorted().collect(Collectors.toList())); - } - - Map, Decoration> decorations = scope.getAllDecorations(node.getIdentifier()); - if (decorations.isEmpty() == false) { - builder.startArray(Fields.DECORATIONS); - - decorations.keySet() - .stream() - .sorted(Comparator.comparing(Class::getName)) - .forEachOrdered(dkey -> DecorationToXContent.ToXContent(decorations.get(dkey), builder)); - builder.endArray(); - } - } - - @Override - public String toString() { - return builder.toString(); - } -} diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ToXContentTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ToXContentTests.java deleted file mode 100644 index 2ac349ba697cf..0000000000000 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ToXContentTests.java +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.painless; - -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.painless.phase.UserTreeVisitor; -import org.elasticsearch.painless.symbol.ScriptScope; -import org.elasticsearch.painless.toxcontent.UserTreeToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentFactory; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Map; - -public class ToXContentTests extends ScriptTestCase { - public void testUserFunction() { - Map func = getFunction("def twofive(int i) { return 25 + i; } int j = 23; twofive(j)", "twofive"); - assertFalse((Boolean) func.get("isInternal")); - assertFalse((Boolean) func.get("isStatic")); - assertEquals("SFunction", func.get("node")); - assertEquals("def", func.get("returns")); - assertEquals(List.of("int"), func.get("parameterTypes")); - assertEquals(List.of("i"), func.get("parameters")); - } - - public void testBlock() { - Map execute = getExecute("int i = 5; return i;"); - Map block = getNode(execute, "block", "SBlock"); - for (Object obj : (List) block.get("statements")) { - Map statement = (Map) obj; - } - Map decl = getStatement(block, "SDeclBlock"); - List decls = (List) decl.get("declarations"); - assertEquals(1, decls.size()); - assertEquals("i", ((Map) decls.get(0)).get("symbol")); - assertEquals("int", ((Map) decls.get(0)).get("type")); - - Map ret = getStatement(block, "SReturn"); - Map symbol = (Map) ((List) ret.get("value")).get(0); - assertEquals("ESymbol", symbol.get("node")); - assertEquals("i", symbol.get("symbol")); - } - - public void testFor() { - Map execute = getExecute("int q = 0; for (int j = 0; j < 100; j++) { q += j; } return q"); - Map sfor = getStatement(getNode(execute, "block", "SBlock"), "SFor"); - - Map ecomp = getNode(sfor, "condition", "EComp"); - assertEquals("j", getNode(ecomp, "left", "ESymbol").get("symbol")); - assertEquals("100", getNode(ecomp, "right", "ENumeric").get("numeric")); - assertEquals("less than", ((Map) ecomp.get("operation")).get("name")); - - Map init = getNode(sfor, "initializer", "SDeclBlock"); - Map decl = getNode(init, "declarations", "SDeclaration"); - assertEquals("j", decl.get("symbol")); - assertEquals("int", decl.get("type")); - assertEquals("0", getNode(decl, "value", "ENumeric").get("numeric")); - - Map after = getNode(sfor, "afterthought", "EAssignment"); - assertEquals("j", getNode(after, "left", "ESymbol").get("symbol")); - assertEquals("1", getNode(after, "right", "ENumeric").get("numeric")); - assertTrue((Boolean) after.get("postIfRead")); - } - - private Map getStatement(Map block, String node) { - return getNode(block, "statements", node); - } - - private Map getNode(Map map, String key, String node) { - for (Object obj : (List) map.get(key)) { - Map nodeMap = (Map) obj; - if (node.equals(nodeMap.get("node"))) { - return nodeMap; - } - } - fail("Missing node [" + node + "]"); - return Collections.emptyMap(); - } - - private Map getExecute(String script) { - return getFunction(script, "execute"); - } - - private Map getFunction(String script, String function) { - return getFunction(semanticPhase(script), function); - } - - private Map getFunction(XContentBuilder builder, String function) { - Map map = XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2(); - for (Object funcObj : ((List) map.get("functions"))) { - if (funcObj instanceof Map) { - if (function.equals(((Map) funcObj).get("name"))) { - return (Map) funcObj; - } - } - } - fail("Function [" + function + "] not found"); - return Collections.emptyMap(); - } - - private XContentBuilder semanticPhase(String script) { - XContentBuilder builder; - try { - builder = XContentFactory.jsonBuilder(); - } catch (IOException err) { - fail("script [" + script + "] threw IOException [" + err.getMessage() + "]"); - return null; - } - UserTreeVisitor semantic = new UserTreeToXContent(builder); - Debugger.phases(script, semantic, null, null); - Map map = XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2(); - assertEquals(script, map.get("source")); - return builder; - } -} From df0fd30e7acc587874f4531cb68e4dfc45dc01e8 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 2 Apr 2024 09:35:02 +0300 Subject: [PATCH 044/264] [Doc] Privileges required to retrieve the status of async searches Document that users can retrieve the status of the async searches they submitted without any extra privileges. --- docs/reference/search/async-search.asciidoc | 20 +++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/docs/reference/search/async-search.asciidoc b/docs/reference/search/async-search.asciidoc index ec8a95ec4006b..786cfaee8024c 100644 --- a/docs/reference/search/async-search.asciidoc +++ b/docs/reference/search/async-search.asciidoc @@ -143,8 +143,10 @@ allowed size for a stored async search response can be set by changing the ==== Get async search The get async search API retrieves the results of a previously submitted async -search request given its id. If the {es} {security-features} are enabled, the -access to the results of a specific async search is restricted to +search request given its id. + +If the {es} {security-features} are enabled, the access to the results of a +specific async search is restricted to only <>. [source,console,id=get-async-search-date-histogram-example] @@ -235,9 +237,13 @@ its saved results are deleted. ==== Get async search status The get async search status API, without retrieving search results, shows only -the status of a previously submitted async search request given its `id`. If the -{es} {security-features} are enabled, the access to the get async search status -API is restricted to the <>. +the status of a previously submitted async search request given its `id`. + +If the {es} {security-features} are enabled, the access to the status of a +specific async search is restricted to: + +* The <> the original async search request. +* Users that have the `monitor` cluster privilege or higher. You can also specify how long the async search needs to be available through the `keep_alive` parameter, which defaults to `5d` (five days). Ongoing async @@ -333,5 +339,5 @@ DELETE /_async_search/FmRldE8zREVEUzA2ZVpUeGs2ejJFUFEaMkZ5QTVrSTZSaVN3WlNFVmtlWH If the {es} {security-features} are enabled, the deletion of a specific async search is restricted to: - * The authenticated user that submitted the original search request. - * Users that have the `cancel_task` cluster privilege. + * The <> the original async search request. + * Users that have the `cancel_task` cluster privilege or higher. From 89cfb85c82927a1ba91bcf8874e312ed25eb0c54 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 2 Apr 2024 09:36:50 +0300 Subject: [PATCH 045/264] [Test] Fix AsyncSearchSecurityIT testStatusWithUsersWhileSearchIsRunning (#106912) The error_query is only available in snapshot builds. All test failures have the release-tests tag. Closes #106871 --- .../org/elasticsearch/xpack/search/AsyncSearchSecurityIT.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/x-pack/plugin/async-search/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/search/AsyncSearchSecurityIT.java b/x-pack/plugin/async-search/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/search/AsyncSearchSecurityIT.java index 1f4830d8b6d0c..f628566587611 100644 --- a/x-pack/plugin/async-search/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/search/AsyncSearchSecurityIT.java +++ b/x-pack/plugin/async-search/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/search/AsyncSearchSecurityIT.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.search; import org.apache.http.util.EntityUtils; +import org.elasticsearch.Build; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Request; @@ -178,6 +179,7 @@ public void testWithUsers() throws Exception { */ @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106871") public void testStatusWithUsersWhileSearchIsRunning() throws IOException { + assumeTrue("[error_query] is only available in snapshot builds", Build.current().isSnapshot()); String user = randomFrom("user1", "user2"); String other = user.equals("user1") ? "user2" : "user1"; String indexName = "index-" + user; From 0eca03cae1ea9bc696a6bd3f0c418ae2634645da Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 2 Apr 2024 07:52:25 +0100 Subject: [PATCH 046/264] Remove unused `SnapshotsInRepo#remaining` (#106674) We only discard snapshots using the `?size` parameter when constructing the final response, so we can count the `remaining` snapshots in a local variable rather than tracking an unnecessary `0` on every `SnapshotsInRepo`. Indeed by inlining `sortSnapshots` into the routine that constructs the final response we can avoid even having to build the a final `SnapshotsInRepo` here. --- .../get/TransportGetSnapshotsAction.java | 56 +++++++------------ 1 file changed, 21 insertions(+), 35 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java index 6d29c36bdcda1..a66b318b16258 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java @@ -67,7 +67,6 @@ import java.util.function.BooleanSupplier; import java.util.function.Predicate; import java.util.function.ToLongFunction; -import java.util.stream.Stream; /** * Transport Action for get snapshots operation @@ -181,7 +180,6 @@ private class GetSnapshotsOperation { // results private final Map failuresByRepository = ConcurrentCollections.newConcurrentMap(); private final Queue> allSnapshotInfos = ConcurrentCollections.newQueue(); - private final AtomicInteger remaining = new AtomicInteger(); private final AtomicInteger totalCount = new AtomicInteger(); GetSnapshotsOperation( @@ -256,7 +254,6 @@ void getMultipleReposSnapshotInfo(ActionListener listener) @Override public void onResponse(SnapshotsInRepo snapshotsInRepo) { allSnapshotInfos.add(snapshotsInRepo.snapshotInfos()); - remaining.addAndGet(snapshotsInRepo.remaining()); totalCount.addAndGet(snapshotsInRepo.totalCount()); delegate.onResponse(null); } @@ -275,26 +272,7 @@ public void onFailure(Exception e) { } }) - .addListener(listener.map(ignored -> { - assert ThreadPool.assertCurrentThreadPool(ThreadPool.Names.MANAGEMENT); - cancellableTask.ensureNotCancelled(); - final var sortedSnapshotsInRepos = sortSnapshots( - allSnapshotInfos.stream().flatMap(Collection::stream), - totalCount.get(), - offset, - size - ); - final var snapshotInfos = sortedSnapshotsInRepos.snapshotInfos(); - assert indices || snapshotInfos.stream().allMatch(snapshotInfo -> snapshotInfo.indices().isEmpty()); - final int finalRemaining = sortedSnapshotsInRepos.remaining() + remaining.get(); - return new GetSnapshotsResponse( - snapshotInfos, - failuresByRepository, - finalRemaining > 0 ? sortBy.encodeAfterQueryParam(snapshotInfos.get(snapshotInfos.size() - 1)) : null, - totalCount.get(), - finalRemaining - ); - })); + .addListener(listener.map(ignored -> buildResponse())); } private boolean skipRepository(String repositoryName) { @@ -486,30 +464,40 @@ private SnapshotsInRepo buildSimpleSnapshotInfos( } private SnapshotsInRepo applyAfterPredicate(List snapshotInfos) { - return new SnapshotsInRepo(snapshotInfos.stream().filter(afterPredicate).toList(), snapshotInfos.size(), 0); + return new SnapshotsInRepo(snapshotInfos.stream().filter(afterPredicate).toList(), snapshotInfos.size()); } - private SnapshotsInRepo sortSnapshots(Stream snapshotInfoStream, int totalCount, int offset, int size) { + private GetSnapshotsResponse buildResponse() { assert ThreadPool.assertCurrentThreadPool(ThreadPool.Names.MANAGEMENT); - final var resultsStream = snapshotInfoStream.peek(this::assertSatisfiesAllPredicates) + cancellableTask.ensureNotCancelled(); + int remaining = 0; + final var resultsStream = allSnapshotInfos.stream() + .flatMap(Collection::stream) + .peek(this::assertSatisfiesAllPredicates) .sorted(sortBy.getSnapshotInfoComparator(order)) .skip(offset); + final List snapshotInfos; if (size == GetSnapshotsRequest.NO_LIMIT) { - return new SnapshotsInRepo(resultsStream.toList(), totalCount, 0); + snapshotInfos = resultsStream.toList(); } else { final var allocateSize = Math.min(size, 1000); // ignore excessively-large sizes in request params - final var results = new ArrayList(allocateSize); - var remaining = 0; + snapshotInfos = new ArrayList<>(allocateSize); for (var iterator = resultsStream.iterator(); iterator.hasNext();) { final var snapshotInfo = iterator.next(); - if (results.size() < size) { - results.add(snapshotInfo); + if (snapshotInfos.size() < size) { + snapshotInfos.add(snapshotInfo); } else { remaining += 1; } } - return new SnapshotsInRepo(results, totalCount, remaining); } + return new GetSnapshotsResponse( + snapshotInfos, + failuresByRepository, + remaining > 0 ? sortBy.encodeAfterQueryParam(snapshotInfos.get(snapshotInfos.size() - 1)) : null, + totalCount.get(), + remaining + ); } private void assertSatisfiesAllPredicates(SnapshotInfo snapshotInfo) { @@ -684,9 +672,7 @@ private static int indexCount(SnapshotId snapshotId, RepositoryData repositoryDa } } - private record SnapshotsInRepo(List snapshotInfos, int totalCount, int remaining) { - private static final SnapshotsInRepo EMPTY = new SnapshotsInRepo(List.of(), 0, 0); - } + private record SnapshotsInRepo(List snapshotInfos, int totalCount) {} /** * Throttling executor for retrieving {@link SnapshotInfo} instances from the repository without spamming the SNAPSHOT_META threadpool From 519580e62b3a71cf2e9b6bd8ab14b92c6c5076b2 Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 2 Apr 2024 08:42:07 +0100 Subject: [PATCH 047/264] AwaitsFix for #106957 --- .../org/elasticsearch/indices/SystemIndexThreadPoolTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/test/framework/src/main/java/org/elasticsearch/indices/SystemIndexThreadPoolTests.java b/test/framework/src/main/java/org/elasticsearch/indices/SystemIndexThreadPoolTests.java index b97c39ce70792..7db5d10c5fcfa 100644 --- a/test/framework/src/main/java/org/elasticsearch/indices/SystemIndexThreadPoolTests.java +++ b/test/framework/src/main/java/org/elasticsearch/indices/SystemIndexThreadPoolTests.java @@ -67,6 +67,7 @@ protected void runWithBlockedThreadPools(Runnable runnable) { } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106957") public void testUserThreadPoolsAreBlocked() { assertAcked(client().admin().indices().prepareCreate(USER_INDEX)); From 7ee63dfd512736ad5bbfe2cfb3cbe2fda605b64d Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 2 Apr 2024 09:18:59 +0100 Subject: [PATCH 048/264] Expand docs on reset desired balance API (#106921) Explains its purpose and surrounding context a little more, including a note that this should never be necessary (i.e. if you find you need it, that's a bug). --- docs/reference/cluster/delete-desired-balance.asciidoc | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/docs/reference/cluster/delete-desired-balance.asciidoc b/docs/reference/cluster/delete-desired-balance.asciidoc index 7b89fed881e41..f81dcab011da4 100644 --- a/docs/reference/cluster/delete-desired-balance.asciidoc +++ b/docs/reference/cluster/delete-desired-balance.asciidoc @@ -6,9 +6,13 @@ NOTE: {cloud-only} -Resets the desired balance and starts a new computation from the current allocation. -This API may be used if desired balance computation diverged from the current state -and is trying to move too many shards. +Discards the current desired balance and computes a new desired balance starting from the current allocation of shards. +This can sometimes help {es} find a desired balance which needs fewer shard movements to achieve, especially if the +cluster has experienced changes so substantial that the current desired balance is no longer optimal without {es} having +detected that the current desired balance will take more shard movements to achieve than needed. However, this API +imposes a heavy load on the elected master node and may not always have the expected effect. Calling this API should +never be necessary. Consider instead <> to avoid excessive shard movements. [[delete-desired-balance-request]] ==== {api-request-title} From 20fc2d2d5e00043e0d830de5e700a8bce7098b3e Mon Sep 17 00:00:00 2001 From: Tim Vernum Date: Tue, 2 Apr 2024 19:27:33 +1100 Subject: [PATCH 049/264] Test modifying and removing data in ProfileIT (#106582) User Profiles can be used to store application data against a user (e.g. user preferences). This commit extends the integration tests for profile date storage to include explict tests for partial updates and clearing existing data --- .../xpack/security/profile/ProfileIT.java | 139 +++++++++++++++++- 1 file changed, 132 insertions(+), 7 deletions(-) diff --git a/x-pack/plugin/security/qa/profile/src/javaRestTest/java/org/elasticsearch/xpack/security/profile/ProfileIT.java b/x-pack/plugin/security/qa/profile/src/javaRestTest/java/org/elasticsearch/xpack/security/profile/ProfileIT.java index e87d548bc23f2..b11c8fd39fe2c 100644 --- a/x-pack/plugin/security/qa/profile/src/javaRestTest/java/org/elasticsearch/xpack/security/profile/ProfileIT.java +++ b/x-pack/plugin/security/qa/profile/src/javaRestTest/java/org/elasticsearch/xpack/security/profile/ProfileIT.java @@ -13,6 +13,7 @@ import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -24,6 +25,9 @@ import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.cluster.util.resource.Resource; import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.xcontent.ObjectPath; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; import org.junit.ClassRule; import java.io.IOException; @@ -253,11 +257,11 @@ public void testGetProfiles() throws IOException { errorDetails4.values().forEach(value -> assertThat(castToMap(value).get("type"), equalTo("resource_not_found_exception"))); } - public void testUpdateProfileData() throws IOException { + public void testStoreProfileData() throws IOException { final Map activateProfileMap = doActivateProfile(); final String uid = (String) activateProfileMap.get("uid"); - final Request updateProfileRequest1 = new Request(randomFrom("PUT", "POST"), "_security/profile/" + uid + "/_data"); - updateProfileRequest1.setJsonEntity(""" + final Request updateProfileRequest = new Request(randomFrom("PUT", "POST"), "_security/profile/" + uid + "/_data"); + updateProfileRequest.setJsonEntity(""" { "labels": { "app1": { "tags": [ "prod", "east" ] } @@ -266,11 +270,125 @@ public void testUpdateProfileData() throws IOException { "app1": { "theme": "default" } } }"""); - assertOK(adminClient().performRequest(updateProfileRequest1)); + assertOK(adminClient().performRequest(updateProfileRequest)); - final Map profileMap1 = doGetProfile(uid, "app1"); - assertThat(castToMap(profileMap1.get("labels")), equalTo(Map.of("app1", Map.of("tags", List.of("prod", "east"))))); - assertThat(castToMap(profileMap1.get("data")), equalTo(Map.of("app1", Map.of("theme", "default")))); + final Map profileMap = doGetProfile(uid, "app1"); + assertThat(castToMap(profileMap.get("labels")), equalTo(Map.of("app1", Map.of("tags", List.of("prod", "east"))))); + assertThat(castToMap(profileMap.get("data")), equalTo(Map.of("app1", Map.of("theme", "default")))); + } + + public void testModifyProfileData() throws IOException { + final Map activateProfileMap = doActivateProfile(); + final String uid = (String) activateProfileMap.get("uid"); + final String endpoint = "_security/profile/" + uid + "/_data"; + final String appName1 = randomAlphaOfLengthBetween(3, 5); + final String appName2 = randomAlphaOfLengthBetween(6, 8); + final List tags = randomList(1, 5, () -> randomAlphaOfLengthBetween(4, 12)); + final String labelKey = randomAlphaOfLengthBetween(4, 6); + final String dataKey1 = randomAlphaOfLengthBetween(3, 5); + final String dataKey2 = randomAlphaOfLengthBetween(6, 8); + final String dataKey3 = randomAlphaOfLengthBetween(9, 10); + final String dataValue1a = randomAlphaOfLengthBetween(6, 9); + final String dataValue1b = randomAlphaOfLengthBetween(10, 12); + final String dataValue2 = randomAlphaOfLengthBetween(6, 12); + final String dataValue3 = randomAlphaOfLengthBetween(4, 10); + + // Store the data + { + final Request updateProfileRequest = new Request(randomFrom("PUT", "POST"), endpoint); + final Map dataBlock = Map.ofEntries( + // { k1: v1, k2: v2 } + Map.entry(dataKey1, dataValue1a), + Map.entry(dataKey2, dataValue2) + ); + updateProfileRequest.setJsonEntity( + toJson( + Map.ofEntries( + Map.entry("labels", Map.of(appName1, Map.of(labelKey, tags))), + // Store the same data under both app-names + Map.entry("data", Map.of(appName1, dataBlock, appName2, dataBlock)) + ) + ) + ); + assertOK(adminClient().performRequest(updateProfileRequest)); + + final Map profileMap1 = doGetProfile(uid, appName1); + logger.info("Profile Map [{}][app={}] : {}", getTestName(), appName1, profileMap1); + assertThat(ObjectPath.eval("labels." + appName1 + "." + labelKey, profileMap1), equalTo(tags)); + assertThat(ObjectPath.eval("data." + appName1 + "." + dataKey1, profileMap1), equalTo(dataValue1a)); + assertThat(ObjectPath.eval("data." + appName1 + "." + dataKey2, profileMap1), equalTo(dataValue2)); + final Map profileMap2 = doGetProfile(uid, appName2); + logger.info("Profile Map [{}][app={}] : {}", getTestName(), appName2, profileMap2); + assertThat(ObjectPath.eval("data." + appName2 + "." + dataKey1, profileMap2), equalTo(dataValue1a)); + assertThat(ObjectPath.eval("data." + appName2 + "." + dataKey2, profileMap2), equalTo(dataValue2)); + } + + // Store modified data + { + // Add a new tag, remove an old one + final String newTag = randomValueOtherThanMany(tags::contains, () -> randomAlphaOfLengthBetween(3, 9)); + tags.remove(randomFrom(tags)); + tags.add(newTag); + final Request updateProfileRequest = new Request(randomFrom("PUT", "POST"), endpoint); + final Map dataBlock = Map.ofEntries( + // { k1: v1b, k3: v3 } + Map.entry(dataKey1, dataValue1b), + Map.entry(dataKey3, dataValue3) + ); + updateProfileRequest.setJsonEntity( + toJson( + Map.ofEntries( + Map.entry("labels", Map.of(appName1, Map.of(labelKey, tags))), + // We don't make any changes to appName2, so it should keep the original data + Map.entry("data", Map.of(appName1, dataBlock)) + ) + ) + ); + assertOK(adminClient().performRequest(updateProfileRequest)); + + final Map profileMap1 = doGetProfile(uid, appName1); + logger.info("Profile Map [{}][app={}] : {}", getTestName(), appName1, profileMap1); + assertThat(ObjectPath.eval("labels." + appName1 + "." + labelKey, profileMap1), equalTo(tags)); + assertThat(ObjectPath.eval("data." + appName1 + "." + dataKey1, profileMap1), equalTo(dataValue1b)); + assertThat(ObjectPath.eval("data." + appName1 + "." + dataKey2, profileMap1), equalTo(dataValue2)); + assertThat(ObjectPath.eval("data." + appName1 + "." + dataKey3, profileMap1), equalTo(dataValue3)); + final Map profileMap2 = doGetProfile(uid, appName2); + logger.info("Profile Map [{}][app={}] : {}", getTestName(), appName2, profileMap2); + assertThat(ObjectPath.eval("data." + appName2 + "." + dataKey1, profileMap2), equalTo(dataValue1a)); + assertThat(ObjectPath.eval("data." + appName2 + "." + dataKey2, profileMap2), equalTo(dataValue2)); + assertThat(ObjectPath.eval("data." + appName2 + "." + dataKey3, profileMap2), nullValue()); + } + } + + public void testRemoveProfileData() throws IOException { + final Map activateProfileMap = doActivateProfile(); + final String uid = (String) activateProfileMap.get("uid"); + { + final Request request = new Request(randomFrom("PUT", "POST"), "_security/profile/" + uid + "/_data"); + request.setJsonEntity(""" + { + "data": { + "app1": { "top": { "inner" : { "leaf": "data_value" } } } + } + }"""); + assertOK(adminClient().performRequest(request)); + + final Map profileMap = doGetProfile(uid, "app1"); + assertThat(ObjectPath.eval("data.app1.top.inner.leaf", profileMap), equalTo("data_value")); + } + { + final Request request = new Request(randomFrom("PUT", "POST"), "_security/profile/" + uid + "/_data"); + request.setJsonEntity(""" + { + "data": { + "app1": { "top": null } + } + }"""); + assertOK(adminClient().performRequest(request)); + + final Map profileMap = doGetProfile(uid, "app1"); + assertThat(ObjectPath.eval("data.app1.top", profileMap), nullValue()); + } } public void testSuggestProfile() throws IOException { @@ -559,4 +677,11 @@ private void doSetEnabled(String uid, boolean enabled) throws IOException { private Map castToMap(Object o) { return (Map) o; } + + private static String toJson(Map map) throws IOException { + final XContentBuilder builder = XContentFactory.jsonBuilder().map(map); + final BytesReference bytes = BytesReference.bytes(builder); + return bytes.utf8ToString(); + } + } From 2380492fac306fe104115ea6dabc57fc7fb02cae Mon Sep 17 00:00:00 2001 From: Craig Taverner Date: Tue, 2 Apr 2024 10:31:00 +0200 Subject: [PATCH 050/264] ESQL: Support ST_CONTAINS and ST_WITHIN (#106503) * WIP Started adding ST_CONTAINS * Add generated evaluators * Reduced warnings and use correct evaluators * Refactored tests to remove duplicate code, and fixed Contains/multi-components * Gradle build disallows using getDeclaredField * Fixed cases where rectangles cross the dateline * Fixed meta function tests * Added ST_WITHIN to support inverting ST_CONTAINS If the ST_CONTAINS is called with the constant on the left, we either have to create a lot more Evaluators to cover that case, or we have to invert it to ST_WITHIN. This inversion was a much easier option. * Simplify inversion logic * Add comment on choice of surrogate approach * Add unit tests and missing fold() function * Simple code cleanup * Add integration tests for literals * Add more integration tests based on actual data * Generated documentation files * Add documentation * Fixed failing function count test * Add tests that push-to-source works for ST_CONTAINS and ST_WITHIN * Test more combinations of WITH/CONTAINS and literal on right and left This also verifies that the re-writing of CONTAINS to WITHIN or vice versa occurs when the literal is on the left. * test that physical planning also handles doc-values from STATS * Added more tests for WITHIN/CONTAINS together with CENTROID This should test the doc-values for points. * Add cartesian_point tests * Add cartesian_shape tests * Disable Lucene-push-down for CARTESIAN data This is a limitation in Lucene, which we could address as a performance optimization in a future PR, but since it probably requires Lucene changes, it cannot be done in this work. * Fix doc links * Added test data and tests for cartesian multi-polygons Testing INTERSECTS, CONTAINS and WITHIN with multi-polydon fields * Use required features for spatial points, shapes and centroid * 8.13.0 is not yet historical version This needs to be reverted as soon as 8.13.0 is released * Added st_intersects and st_contains_within 'features' * Code review updates * Re-enable lucene push-down * Added more required_features * Fix point contains non-point * Fix point contains point * Re-enable lucene push-down in tests too Forgot to change the physical planner unit tests after re-enabling lucene push-down * Generate automatic docs * Use generated examples docs * Generated examples use '-result' prefix (singular) * Mark spatial functions as preview/experimental --- docs/changelog/106503.yaml | 5 + .../functions/aggregation-functions.asciidoc | 2 +- .../description/st_contains.asciidoc | 7 + .../description/st_intersects.asciidoc | 2 + .../functions/description/st_within.asciidoc | 7 + .../functions/examples/st_contains.asciidoc | 13 + .../functions/examples/st_intersects.asciidoc | 13 + .../functions/examples/st_within.asciidoc | 13 + .../functions/layout/st_contains.asciidoc | 15 + .../functions/layout/st_intersects.asciidoc | 1 + .../esql/functions/layout/st_within.asciidoc | 15 + .../functions/parameters/st_contains.asciidoc | 7 + .../functions/parameters/st_within.asciidoc | 7 + .../esql/functions/signature/st_contains.svg | 1 + .../esql/functions/signature/st_within.svg | 1 + .../esql/functions/spatial-functions.asciidoc | 10 +- .../esql/functions/st_centroid.asciidoc | 2 + .../esql/functions/st_contains.asciidoc | 26 + .../esql/functions/st_intersects.asciidoc | 16 +- .../esql/functions/st_within.asciidoc | 26 + docs/reference/esql/functions/st_x.asciidoc | 2 + docs/reference/esql/functions/st_y.asciidoc | 2 + .../esql/functions/types/st_contains.asciidoc | 16 + .../esql/functions/types/st_within.asciidoc | 16 + .../xpack/esql/CsvTestsDataLoader.java | 8 +- .../resources/cartesian_multipolygons.csv | 11 + .../cartesian_multipolygons.csv-spec | 201 ++++++ .../mapping-cartesian_multipolygons.json | 13 + .../src/main/resources/meta.csv-spec | 6 +- .../src/main/resources/spatial.csv-spec | 616 +++++++++++++++--- .../main/resources/spatial_shapes.csv-spec | 198 +++++- ...ianPointDocValuesAndConstantEvaluator.java | 128 ++++ ...esianPointDocValuesAndSourceEvaluator.java | 142 ++++ ...nsCartesianSourceAndConstantEvaluator.java | 132 ++++ ...ainsCartesianSourceAndSourceEvaluator.java | 152 +++++ ...GeoPointDocValuesAndConstantEvaluator.java | 128 ++++ ...nsGeoPointDocValuesAndSourceEvaluator.java | 151 +++++ ...ContainsGeoSourceAndConstantEvaluator.java | 132 ++++ ...alContainsGeoSourceAndSourceEvaluator.java | 152 +++++ ...ianPointDocValuesAndConstantEvaluator.java | 128 ++++ ...esianPointDocValuesAndSourceEvaluator.java | 142 ++++ ...inCartesianSourceAndConstantEvaluator.java | 132 ++++ ...thinCartesianSourceAndSourceEvaluator.java | 152 +++++ ...GeoPointDocValuesAndConstantEvaluator.java | 128 ++++ ...inGeoPointDocValuesAndSourceEvaluator.java | 151 +++++ ...alWithinGeoSourceAndConstantEvaluator.java | 132 ++++ ...tialWithinGeoSourceAndSourceEvaluator.java | 152 +++++ .../function/EsqlFunctionRegistry.java | 4 + .../spatial/LuceneComponent2DUtils.java | 91 +++ .../scalar/spatial/SpatialContains.java | 287 ++++++++ .../spatial/SpatialEvaluatorFactory.java | 4 +- .../scalar/spatial/SpatialIntersects.java | 23 +- .../spatial/SpatialRelatesFunction.java | 24 +- .../scalar/spatial/SpatialRelatesUtils.java | 10 + .../scalar/spatial/SpatialWithin.java | 252 +++++++ .../xpack/esql/io/stream/PlanNamedTypes.java | 23 +- .../esql/optimizer/LogicalPlanOptimizer.java | 23 +- .../xpack/esql/plugin/EsqlFeatures.java | 49 +- .../querydsl/query/SpatialRelatesQuery.java | 6 +- .../function/AbstractFunctionTestCase.java | 2 +- .../scalar/spatial/SpatialContainsTests.java | 46 ++ .../spatial/SpatialIntersectsTests.java | 173 +---- .../SpatialRelatesFunctionTestCase.java | 207 ++++++ .../scalar/spatial/SpatialWithinTests.java | 46 ++ .../optimizer/PhysicalPlanOptimizerTests.java | 172 +++++ 65 files changed, 4627 insertions(+), 327 deletions(-) create mode 100644 docs/changelog/106503.yaml create mode 100644 docs/reference/esql/functions/description/st_contains.asciidoc create mode 100644 docs/reference/esql/functions/description/st_within.asciidoc create mode 100644 docs/reference/esql/functions/examples/st_contains.asciidoc create mode 100644 docs/reference/esql/functions/examples/st_intersects.asciidoc create mode 100644 docs/reference/esql/functions/examples/st_within.asciidoc create mode 100644 docs/reference/esql/functions/layout/st_contains.asciidoc create mode 100644 docs/reference/esql/functions/layout/st_within.asciidoc create mode 100644 docs/reference/esql/functions/parameters/st_contains.asciidoc create mode 100644 docs/reference/esql/functions/parameters/st_within.asciidoc create mode 100644 docs/reference/esql/functions/signature/st_contains.svg create mode 100644 docs/reference/esql/functions/signature/st_within.svg create mode 100644 docs/reference/esql/functions/st_contains.asciidoc create mode 100644 docs/reference/esql/functions/st_within.asciidoc create mode 100644 docs/reference/esql/functions/types/st_contains.asciidoc create mode 100644 docs/reference/esql/functions/types/st_within.asciidoc create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv-spec create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-cartesian_multipolygons.json create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndConstantEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndSourceEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndConstantEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndSourceEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndConstantEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndSourceEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndConstantEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndSourceEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndConstantEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndSourceEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndConstantEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndSourceEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndConstantEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndSourceEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndConstantEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndSourceEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/LuceneComponent2DUtils.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithin.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunctionTestCase.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinTests.java diff --git a/docs/changelog/106503.yaml b/docs/changelog/106503.yaml new file mode 100644 index 0000000000000..1b7e78d8ffc27 --- /dev/null +++ b/docs/changelog/106503.yaml @@ -0,0 +1,5 @@ +pr: 106503 +summary: "Support ST_CONTAINS and ST_WITHIN" +area: "ES|QL" +type: enhancement +issues: [] diff --git a/docs/reference/esql/functions/aggregation-functions.asciidoc b/docs/reference/esql/functions/aggregation-functions.asciidoc index 373b1c140a896..c040e7fe01327 100644 --- a/docs/reference/esql/functions/aggregation-functions.asciidoc +++ b/docs/reference/esql/functions/aggregation-functions.asciidoc @@ -16,7 +16,7 @@ The <> function supports these aggregate functions: * <> * <> * <> -* <> +* experimental:[] <> * <> * <> // end::agg_list[] diff --git a/docs/reference/esql/functions/description/st_contains.asciidoc b/docs/reference/esql/functions/description/st_contains.asciidoc new file mode 100644 index 0000000000000..ed79fe3d9c1f3 --- /dev/null +++ b/docs/reference/esql/functions/description/st_contains.asciidoc @@ -0,0 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns whether the first geometry contains the second geometry. + +NOTE: The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters. diff --git a/docs/reference/esql/functions/description/st_intersects.asciidoc b/docs/reference/esql/functions/description/st_intersects.asciidoc index b736ba29a6c8b..3a36d79cbd123 100644 --- a/docs/reference/esql/functions/description/st_intersects.asciidoc +++ b/docs/reference/esql/functions/description/st_intersects.asciidoc @@ -3,3 +3,5 @@ *Description* Returns whether the two geometries or geometry columns intersect. + +NOTE: The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters. diff --git a/docs/reference/esql/functions/description/st_within.asciidoc b/docs/reference/esql/functions/description/st_within.asciidoc new file mode 100644 index 0000000000000..be52db3f694bf --- /dev/null +++ b/docs/reference/esql/functions/description/st_within.asciidoc @@ -0,0 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns whether the first geometry is within the second geometry. + +NOTE: The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters. diff --git a/docs/reference/esql/functions/examples/st_contains.asciidoc b/docs/reference/esql/functions/examples/st_contains.asciidoc new file mode 100644 index 0000000000000..ad60944d28562 --- /dev/null +++ b/docs/reference/esql/functions/examples/st_contains.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/spatial_shapes.csv-spec[tag=st_contains-airport_city_boundaries] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/spatial_shapes.csv-spec[tag=st_contains-airport_city_boundaries-result] +|=== + diff --git a/docs/reference/esql/functions/examples/st_intersects.asciidoc b/docs/reference/esql/functions/examples/st_intersects.asciidoc new file mode 100644 index 0000000000000..135fd6afee17c --- /dev/null +++ b/docs/reference/esql/functions/examples/st_intersects.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/spatial.csv-spec[tag=st_intersects-airports] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/spatial.csv-spec[tag=st_intersects-airports-result] +|=== + diff --git a/docs/reference/esql/functions/examples/st_within.asciidoc b/docs/reference/esql/functions/examples/st_within.asciidoc new file mode 100644 index 0000000000000..0943ee4ba862a --- /dev/null +++ b/docs/reference/esql/functions/examples/st_within.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/spatial_shapes.csv-spec[tag=st_within-airport_city_boundaries] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/spatial_shapes.csv-spec[tag=st_within-airport_city_boundaries-result] +|=== + diff --git a/docs/reference/esql/functions/layout/st_contains.asciidoc b/docs/reference/esql/functions/layout/st_contains.asciidoc new file mode 100644 index 0000000000000..d2b115b5bd727 --- /dev/null +++ b/docs/reference/esql/functions/layout/st_contains.asciidoc @@ -0,0 +1,15 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-st_contains]] +=== `ST_CONTAINS` + +*Syntax* + +[.text-center] +image::esql/functions/signature/st_contains.svg[Embedded,opts=inline] + +include::../parameters/st_contains.asciidoc[] +include::../description/st_contains.asciidoc[] +include::../types/st_contains.asciidoc[] +include::../examples/st_contains.asciidoc[] diff --git a/docs/reference/esql/functions/layout/st_intersects.asciidoc b/docs/reference/esql/functions/layout/st_intersects.asciidoc index 1d0721b65606e..820bfd2c1fee6 100644 --- a/docs/reference/esql/functions/layout/st_intersects.asciidoc +++ b/docs/reference/esql/functions/layout/st_intersects.asciidoc @@ -12,3 +12,4 @@ image::esql/functions/signature/st_intersects.svg[Embedded,opts=inline] include::../parameters/st_intersects.asciidoc[] include::../description/st_intersects.asciidoc[] include::../types/st_intersects.asciidoc[] +include::../examples/st_intersects.asciidoc[] diff --git a/docs/reference/esql/functions/layout/st_within.asciidoc b/docs/reference/esql/functions/layout/st_within.asciidoc new file mode 100644 index 0000000000000..38e367abc3c31 --- /dev/null +++ b/docs/reference/esql/functions/layout/st_within.asciidoc @@ -0,0 +1,15 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-st_within]] +=== `ST_WITHIN` + +*Syntax* + +[.text-center] +image::esql/functions/signature/st_within.svg[Embedded,opts=inline] + +include::../parameters/st_within.asciidoc[] +include::../description/st_within.asciidoc[] +include::../types/st_within.asciidoc[] +include::../examples/st_within.asciidoc[] diff --git a/docs/reference/esql/functions/parameters/st_contains.asciidoc b/docs/reference/esql/functions/parameters/st_contains.asciidoc new file mode 100644 index 0000000000000..dbc9adf478948 --- /dev/null +++ b/docs/reference/esql/functions/parameters/st_contains.asciidoc @@ -0,0 +1,7 @@ +*Parameters* + +`geomA`:: +Geometry column name or variable of geometry type + +`geomB`:: +Geometry column name or variable of geometry type diff --git a/docs/reference/esql/functions/parameters/st_within.asciidoc b/docs/reference/esql/functions/parameters/st_within.asciidoc new file mode 100644 index 0000000000000..dbc9adf478948 --- /dev/null +++ b/docs/reference/esql/functions/parameters/st_within.asciidoc @@ -0,0 +1,7 @@ +*Parameters* + +`geomA`:: +Geometry column name or variable of geometry type + +`geomB`:: +Geometry column name or variable of geometry type diff --git a/docs/reference/esql/functions/signature/st_contains.svg b/docs/reference/esql/functions/signature/st_contains.svg new file mode 100644 index 0000000000000..dde41fc527454 --- /dev/null +++ b/docs/reference/esql/functions/signature/st_contains.svg @@ -0,0 +1 @@ +ST_CONTAINS(geomA,geomB) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/st_within.svg b/docs/reference/esql/functions/signature/st_within.svg new file mode 100644 index 0000000000000..a88522cfbc5cb --- /dev/null +++ b/docs/reference/esql/functions/signature/st_within.svg @@ -0,0 +1 @@ +ST_WITHIN(geomA,geomB) \ No newline at end of file diff --git a/docs/reference/esql/functions/spatial-functions.asciidoc b/docs/reference/esql/functions/spatial-functions.asciidoc index c1758f61de723..739d6b2d6f58f 100644 --- a/docs/reference/esql/functions/spatial-functions.asciidoc +++ b/docs/reference/esql/functions/spatial-functions.asciidoc @@ -8,11 +8,15 @@ {esql} supports these spatial functions: // tag::spatial_list[] -* <> -* <> -* <> +* experimental:[] <> +* experimental:[] <> +* experimental:[] <> +* experimental:[] <> +* experimental:[] <> // end::spatial_list[] include::st_intersects.asciidoc[] +include::st_contains.asciidoc[] +include::st_within.asciidoc[] include::st_x.asciidoc[] include::st_y.asciidoc[] diff --git a/docs/reference/esql/functions/st_centroid.asciidoc b/docs/reference/esql/functions/st_centroid.asciidoc index cee0c85d5cb45..e91a325a5597b 100644 --- a/docs/reference/esql/functions/st_centroid.asciidoc +++ b/docs/reference/esql/functions/st_centroid.asciidoc @@ -2,6 +2,8 @@ [[esql-agg-st-centroid]] === `ST_CENTROID` +experimental::[] + Calculate the spatial centroid over a field with spatial point geometry type. [source.merge.styled,esql] diff --git a/docs/reference/esql/functions/st_contains.asciidoc b/docs/reference/esql/functions/st_contains.asciidoc new file mode 100644 index 0000000000000..07b1a11aa7247 --- /dev/null +++ b/docs/reference/esql/functions/st_contains.asciidoc @@ -0,0 +1,26 @@ +[discrete] +[[esql-st_contains]] +=== `ST_CONTAINS` + +experimental::[] + +*Syntax* + +[.text-center] +image::esql/functions/signature/st_contains.svg[Embedded,opts=inline] + +*Parameters* + +`geomA`:: +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. + +`geomB`:: +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. +The second parameter must also have the same coordinate system as the first. +This means it is not possible to combine `geo_*` and `cartesian_*` parameters. + +include::description/st_contains.asciidoc[] +This is the inverse of the `<>` function. + +include::types/st_contains.asciidoc[] +include::examples/st_contains.asciidoc[] diff --git a/docs/reference/esql/functions/st_intersects.asciidoc b/docs/reference/esql/functions/st_intersects.asciidoc index 1bf4cef0e2977..fbe313d10b0e7 100644 --- a/docs/reference/esql/functions/st_intersects.asciidoc +++ b/docs/reference/esql/functions/st_intersects.asciidoc @@ -2,6 +2,8 @@ [[esql-st_intersects]] === `ST_INTERSECTS` +experimental::[] + *Syntax* [.text-center] @@ -24,17 +26,5 @@ They intersect if they have any point in common, including their interior points (points along lines or within polygons). In mathematical terms: ST_Intersects(A, B) ⇔ A ⋂ B ≠ ∅ -*Supported types* - include::types/st_intersects.asciidoc[] - -*Example* - -[source.merge.styled,esql] ----- -include::{esql-specs}/spatial.csv-spec[tag=st_intersects-airports] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/spatial.csv-spec[tag=st_intersects-airports-results] -|=== +include::examples/st_intersects.asciidoc[] diff --git a/docs/reference/esql/functions/st_within.asciidoc b/docs/reference/esql/functions/st_within.asciidoc new file mode 100644 index 0000000000000..64adb91219c4a --- /dev/null +++ b/docs/reference/esql/functions/st_within.asciidoc @@ -0,0 +1,26 @@ +[discrete] +[[esql-st_within]] +=== `ST_WITHIN` + +experimental::[] + +*Syntax* + +[.text-center] +image::esql/functions/signature/st_within.svg[Embedded,opts=inline] + +*Parameters* + +`geomA`:: +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. + +`geomB`:: +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. +The second parameter must also have the same coordinate system as the first. +This means it is not possible to combine `geo_*` and `cartesian_*` parameters. + +include::description/st_within.asciidoc[] +This is the inverse of the `<>` function. + +include::types/st_within.asciidoc[] +include::examples/st_within.asciidoc[] diff --git a/docs/reference/esql/functions/st_x.asciidoc b/docs/reference/esql/functions/st_x.asciidoc index 692373f054d99..eec48894b5150 100644 --- a/docs/reference/esql/functions/st_x.asciidoc +++ b/docs/reference/esql/functions/st_x.asciidoc @@ -2,6 +2,8 @@ [[esql-st_x]] === `ST_X` +experimental::[] + *Syntax* [.text-center] diff --git a/docs/reference/esql/functions/st_y.asciidoc b/docs/reference/esql/functions/st_y.asciidoc index dba9b3d450006..8fc7281e395d2 100644 --- a/docs/reference/esql/functions/st_y.asciidoc +++ b/docs/reference/esql/functions/st_y.asciidoc @@ -2,6 +2,8 @@ [[esql-st_y]] === `ST_Y` +experimental::[] + *Syntax* [.text-center] diff --git a/docs/reference/esql/functions/types/st_contains.asciidoc b/docs/reference/esql/functions/types/st_contains.asciidoc new file mode 100644 index 0000000000000..36bd9cc036ade --- /dev/null +++ b/docs/reference/esql/functions/types/st_contains.asciidoc @@ -0,0 +1,16 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +geomA | geomB | result +cartesian_point | cartesian_point | boolean +cartesian_point | cartesian_shape | boolean +cartesian_shape | cartesian_point | boolean +cartesian_shape | cartesian_shape | boolean +geo_point | geo_point | boolean +geo_point | geo_shape | boolean +geo_shape | geo_point | boolean +geo_shape | geo_shape | boolean +|=== diff --git a/docs/reference/esql/functions/types/st_within.asciidoc b/docs/reference/esql/functions/types/st_within.asciidoc new file mode 100644 index 0000000000000..36bd9cc036ade --- /dev/null +++ b/docs/reference/esql/functions/types/st_within.asciidoc @@ -0,0 +1,16 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +geomA | geomB | result +cartesian_point | cartesian_point | boolean +cartesian_point | cartesian_shape | boolean +cartesian_shape | cartesian_point | boolean +cartesian_shape | cartesian_shape | boolean +geo_point | geo_point | boolean +geo_point | geo_shape | boolean +geo_shape | geo_point | boolean +geo_shape | geo_shape | boolean +|=== diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java index ec181c552bf22..b097d7f2d077a 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java @@ -78,6 +78,11 @@ public class CsvTestsDataLoader { "mapping-airport_city_boundaries.json", "airport_city_boundaries.csv" ); + private static final TestsDataset CARTESIAN_MULTIPOLYGONS = new TestsDataset( + "cartesian_multipolygons", + "mapping-cartesian_multipolygons.json", + "cartesian_multipolygons.csv" + ); public static final Map CSV_DATASET_MAP = Map.ofEntries( Map.entry(EMPLOYEES.indexName, EMPLOYEES), @@ -96,7 +101,8 @@ public class CsvTestsDataLoader { Map.entry(AIRPORTS_WEB.indexName, AIRPORTS_WEB), Map.entry(COUNTRIES_BBOX.indexName, COUNTRIES_BBOX), Map.entry(COUNTRIES_BBOX_WEB.indexName, COUNTRIES_BBOX_WEB), - Map.entry(AIRPORT_CITY_BOUNDARIES.indexName, AIRPORT_CITY_BOUNDARIES) + Map.entry(AIRPORT_CITY_BOUNDARIES.indexName, AIRPORT_CITY_BOUNDARIES), + Map.entry(CARTESIAN_MULTIPOLYGONS.indexName, CARTESIAN_MULTIPOLYGONS) ); private static final EnrichConfig LANGUAGES_ENRICH = new EnrichConfig("languages_policy", "enrich-policy-languages.json"); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv new file mode 100644 index 0000000000000..e65cdd29a22b8 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv @@ -0,0 +1,11 @@ +id:l, name:keyword, shape:cartesian_shape +0, Four squares, "MULTIPOLYGON(((0 0\, 1 0\, 1 1\, 0 1\, 0 0))\, ((2 0\, 3 0\, 3 1\, 2 1\, 2 0))\, ((2 2\, 3 2\, 3 3\, 2 3\, 2 2))\, ((0 2\, 1 2\, 1 3\, 0 3\, 0 2)))" +1, Bottom left, "POLYGON((0 0\, 1 0\, 1 1\, 0 1\, 0 0))" +2, Bottom right, "POLYGON((2 0\, 3 0\, 3 1\, 2 1\, 2 0))" +3, Top right, "POLYGON((2 2\, 3 2\, 3 3\, 2 3\, 2 2))" +4, Top left, "POLYGON((0 2\, 1 2\, 1 3\, 0 3\, 0 2))" +5, Four squares with holes, "MULTIPOLYGON(((0 0\, 1 0\, 1 1\, 0 1\, 0 0)\, (0.4 0.4\, 0.6 0.4\, 0.6 0.6\, 0.4 0.6\, 0.4 0.4))\, ((2 0\, 3 0\, 3 1\, 2 1\, 2 0)\, (2.4 0.4\, 2.6 0.4\, 2.6 0.6\, 2.4 0.6\, 2.4 0.4))\, ((2 2\, 3 2\, 3 3\, 2 3\, 2 2)\, (2.4 2.4\, 2.6 2.4\, 2.6 2.6\, 2.4 2.6\, 2.4 2.4))\, ((0 2\, 1 2\, 1 3\, 0 3\, 0 2)\, (0.4 2.4\, 0.6 2.4\, 0.6 2.6\, 0.4 2.6\, 0.4 2.4)))" +6, Bottom left with holes, "POLYGON((0 0\, 1 0\, 1 1\, 0 1\, 0 0)\, (0.4 0.4\, 0.6 0.4\, 0.6 0.6\, 0.4 0.6\, 0.4 0.4))" +7, Bottom right with holes, "POLYGON((2 0\, 3 0\, 3 1\, 2 1\, 2 0)\, (2.4 0.4\, 2.6 0.4\, 2.6 0.6\, 2.4 0.6\, 2.4 0.4))" +8, Top right with holes, "POLYGON((2 2\, 3 2\, 3 3\, 2 3\, 2 2)\, (2.4 2.4\, 2.6 2.4\, 2.6 2.6\, 2.4 2.6\, 2.4 2.4))" +9, Top left with holes, "POLYGON((0 2\, 1 2\, 1 3\, 0 3\, 0 2)\, (0.4 2.4\, 0.6 2.4\, 0.6 2.6\, 0.4 2.6\, 0.4 2.4))" diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv-spec new file mode 100644 index 0000000000000..c721d3c4899db --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv-spec @@ -0,0 +1,201 @@ +#################################################################################################### +# The test data contains four square polygons, with and without holes, and multipolygon combinations of these +# We test this data against smaller, similar sized and larger query polygons with INTERSECTS, CONTAINS and WITHIN + +#################################################################################################### +# Test against a polygon similar in size to the Bottom Left polygon + +whereIntersectsSinglePolygon +required_feature: esql.st_intersects + +FROM cartesian_multipolygons +| WHERE ST_Intersects(shape, TO_CARTESIANSHAPE("POLYGON((0 0, 1 0, 1 1, 0 1, 0 0))")) +| SORT id +; + +id:l | name:keyword | shape:cartesian_shape +0 | Four squares | MULTIPOLYGON(((0 0, 1 0, 1 1, 0 1, 0 0)), ((2 0, 3 0, 3 1, 2 1, 2 0)), ((2 2, 3 2, 3 3, 2 3, 2 2)), ((0 2, 1 2, 1 3, 0 3, 0 2))) +1 | Bottom left | POLYGON((0 0, 1 0, 1 1, 0 1, 0 0)) +5 | Four squares with holes | MULTIPOLYGON(((0 0, 1 0, 1 1, 0 1, 0 0), (0.4 0.4, 0.6 0.4, 0.6 0.6, 0.4 0.6, 0.4 0.4)), ((2 0, 3 0, 3 1, 2 1, 2 0), (2.4 0.4, 2.6 0.4, 2.6 0.6, 2.4 0.6, 2.4 0.4)), ((2 2, 3 2, 3 3, 2 3, 2 2), (2.4 2.4, 2.6 2.4, 2.6 2.6, 2.4 2.6, 2.4 2.4)), ((0 2, 1 2, 1 3, 0 3, 0 2), (0.4 2.4, 0.6 2.4, 0.6 2.6, 0.4 2.6, 0.4 2.4))) +6 | Bottom left with holes | POLYGON((0 0, 1 0, 1 1, 0 1, 0 0), (0.4 0.4, 0.6 0.4, 0.6 0.6, 0.4 0.6, 0.4 0.4)) +; + +whereContainsSinglePolygon +required_feature: esql.st_contains_within + +FROM cartesian_multipolygons +| WHERE ST_Contains(shape, TO_CARTESIANSHAPE("POLYGON((0.001 0.001, 0.999 0.001, 0.999 0.999, 0.001 0.999, 0.001 0.001))")) +| SORT id +; + +id:l | name:keyword | shape:cartesian_shape +0 | Four squares | MULTIPOLYGON(((0 0, 1 0, 1 1, 0 1, 0 0)), ((2 0, 3 0, 3 1, 2 1, 2 0)), ((2 2, 3 2, 3 3, 2 3, 2 2)), ((0 2, 1 2, 1 3, 0 3, 0 2))) +1 | Bottom left | POLYGON((0 0, 1 0, 1 1, 0 1, 0 0)) +; + +whereWithinSinglePolygon +required_feature: esql.st_contains_within + +FROM cartesian_multipolygons +| WHERE ST_Within(shape, TO_CARTESIANSHAPE("POLYGON((0 0, 1 0, 1 1, 0 1, 0 0))")) +| SORT id +; + +id:l | name:keyword | shape:cartesian_shape +1 | Bottom left | POLYGON((0 0, 1 0, 1 1, 0 1, 0 0)) +6 | Bottom left with holes | POLYGON((0 0, 1 0, 1 1, 0 1, 0 0), (0.4 0.4, 0.6 0.4, 0.6 0.6, 0.4 0.6, 0.4 0.4)) +; + +#################################################################################################### +# Test against a polygon smaller in size to the Bottom Left polygon + +whereIntersectsSmallerPolygon +required_feature: esql.st_intersects + +FROM cartesian_multipolygons +| WHERE ST_Intersects(shape, TO_CARTESIANSHAPE("POLYGON((0.2 0.2, 0.8 0.2, 0.8 0.8, 0.2 0.8, 0.2 0.2))")) +| SORT id +; + +id:l | name:keyword | shape:cartesian_shape +0 | Four squares | MULTIPOLYGON(((0 0, 1 0, 1 1, 0 1, 0 0)), ((2 0, 3 0, 3 1, 2 1, 2 0)), ((2 2, 3 2, 3 3, 2 3, 2 2)), ((0 2, 1 2, 1 3, 0 3, 0 2))) +1 | Bottom left | POLYGON((0 0, 1 0, 1 1, 0 1, 0 0)) +5 | Four squares with holes | MULTIPOLYGON(((0 0, 1 0, 1 1, 0 1, 0 0), (0.4 0.4, 0.6 0.4, 0.6 0.6, 0.4 0.6, 0.4 0.4)), ((2 0, 3 0, 3 1, 2 1, 2 0), (2.4 0.4, 2.6 0.4, 2.6 0.6, 2.4 0.6, 2.4 0.4)), ((2 2, 3 2, 3 3, 2 3, 2 2), (2.4 2.4, 2.6 2.4, 2.6 2.6, 2.4 2.6, 2.4 2.4)), ((0 2, 1 2, 1 3, 0 3, 0 2), (0.4 2.4, 0.6 2.4, 0.6 2.6, 0.4 2.6, 0.4 2.4))) +6 | Bottom left with holes | POLYGON((0 0, 1 0, 1 1, 0 1, 0 0), (0.4 0.4, 0.6 0.4, 0.6 0.6, 0.4 0.6, 0.4 0.4)) +; + +whereContainsSmallerPolygon +required_feature: esql.st_contains_within + +FROM cartesian_multipolygons +| WHERE ST_Contains(shape, TO_CARTESIANSHAPE("POLYGON((0.2 0.2, 0.8 0.2, 0.8 0.8, 0.2 0.8, 0.2 0.2))")) +| SORT id +; + +id:l | name:keyword | shape:cartesian_shape +0 | Four squares | MULTIPOLYGON(((0 0, 1 0, 1 1, 0 1, 0 0)), ((2 0, 3 0, 3 1, 2 1, 2 0)), ((2 2, 3 2, 3 3, 2 3, 2 2)), ((0 2, 1 2, 1 3, 0 3, 0 2))) +1 | Bottom left | POLYGON((0 0, 1 0, 1 1, 0 1, 0 0)) +; + +whereWithinSmallerPolygon +required_feature: esql.st_contains_within + +FROM cartesian_multipolygons +| WHERE ST_Within(shape, TO_CARTESIANSHAPE("POLYGON((0.2 0.2, 0.8 0.2, 0.8 0.8, 0.2 0.8, 0.2 0.2))")) +| SORT id +; + +id:l | name:keyword | shape:cartesian_shape +; + +#################################################################################################### +# Test against a polygon similar in size to the entire test data + +whereIntersectsLargerPolygon +required_feature: esql.st_intersects + +FROM cartesian_multipolygons +| WHERE ST_Intersects(shape, TO_CARTESIANSHAPE("POLYGON((0 0, 3 0, 3 3, 0 3, 0 0))")) +| SORT id +; + +id:l | name:keyword | shape:cartesian_shape +0 | Four squares | MULTIPOLYGON(((0 0, 1 0, 1 1, 0 1, 0 0)), ((2 0, 3 0, 3 1, 2 1, 2 0)), ((2 2, 3 2, 3 3, 2 3, 2 2)), ((0 2, 1 2, 1 3, 0 3, 0 2))) +1 | Bottom left | POLYGON((0 0, 1 0, 1 1, 0 1, 0 0)) +2 | Bottom right | POLYGON((2 0, 3 0, 3 1, 2 1, 2 0)) +3 | Top right | POLYGON((2 2, 3 2, 3 3, 2 3, 2 2)) +4 | Top left | POLYGON((0 2, 1 2, 1 3, 0 3, 0 2)) +5 | Four squares with holes | MULTIPOLYGON(((0 0, 1 0, 1 1, 0 1, 0 0), (0.4 0.4, 0.6 0.4, 0.6 0.6, 0.4 0.6, 0.4 0.4)), ((2 0, 3 0, 3 1, 2 1, 2 0), (2.4 0.4, 2.6 0.4, 2.6 0.6, 2.4 0.6, 2.4 0.4)), ((2 2, 3 2, 3 3, 2 3, 2 2), (2.4 2.4, 2.6 2.4, 2.6 2.6, 2.4 2.6, 2.4 2.4)), ((0 2, 1 2, 1 3, 0 3, 0 2), (0.4 2.4, 0.6 2.4, 0.6 2.6, 0.4 2.6, 0.4 2.4))) +6 | Bottom left with holes | POLYGON((0 0, 1 0, 1 1, 0 1, 0 0), (0.4 0.4, 0.6 0.4, 0.6 0.6, 0.4 0.6, 0.4 0.4)) +7 | Bottom right with holes | POLYGON((2 0, 3 0, 3 1, 2 1, 2 0), (2.4 0.4, 2.6 0.4, 2.6 0.6, 2.4 0.6, 2.4 0.4)) +8 | Top right with holes | POLYGON((2 2, 3 2, 3 3, 2 3, 2 2), (2.4 2.4, 2.6 2.4, 2.6 2.6, 2.4 2.6, 2.4 2.4)) +9 | Top left with holes | POLYGON((0 2, 1 2, 1 3, 0 3, 0 2), (0.4 2.4, 0.6 2.4, 0.6 2.6, 0.4 2.6, 0.4 2.4)) +; + +whereContainsLargerPolygon +required_feature: esql.st_contains_within + +FROM cartesian_multipolygons +| WHERE ST_Contains(shape, TO_CARTESIANSHAPE("POLYGON((0 0, 3 0, 3 3, 0 3, 0 0))")) +| SORT id +; + +id:l | name:keyword | shape:cartesian_shape +; + +whereWithinLargerPolygon +required_feature: esql.st_contains_within + +FROM cartesian_multipolygons +| WHERE ST_Within(shape, TO_CARTESIANSHAPE("POLYGON((0 0, 3 0, 3 3, 0 3, 0 0))")) +| SORT id +; + +id:l | name:keyword | shape:cartesian_shape +0 | Four squares | MULTIPOLYGON(((0 0, 1 0, 1 1, 0 1, 0 0)), ((2 0, 3 0, 3 1, 2 1, 2 0)), ((2 2, 3 2, 3 3, 2 3, 2 2)), ((0 2, 1 2, 1 3, 0 3, 0 2))) +1 | Bottom left | POLYGON((0 0, 1 0, 1 1, 0 1, 0 0)) +2 | Bottom right | POLYGON((2 0, 3 0, 3 1, 2 1, 2 0)) +3 | Top right | POLYGON((2 2, 3 2, 3 3, 2 3, 2 2)) +4 | Top left | POLYGON((0 2, 1 2, 1 3, 0 3, 0 2)) +5 | Four squares with holes | MULTIPOLYGON(((0 0, 1 0, 1 1, 0 1, 0 0), (0.4 0.4, 0.6 0.4, 0.6 0.6, 0.4 0.6, 0.4 0.4)), ((2 0, 3 0, 3 1, 2 1, 2 0), (2.4 0.4, 2.6 0.4, 2.6 0.6, 2.4 0.6, 2.4 0.4)), ((2 2, 3 2, 3 3, 2 3, 2 2), (2.4 2.4, 2.6 2.4, 2.6 2.6, 2.4 2.6, 2.4 2.4)), ((0 2, 1 2, 1 3, 0 3, 0 2), (0.4 2.4, 0.6 2.4, 0.6 2.6, 0.4 2.6, 0.4 2.4))) +6 | Bottom left with holes | POLYGON((0 0, 1 0, 1 1, 0 1, 0 0), (0.4 0.4, 0.6 0.4, 0.6 0.6, 0.4 0.6, 0.4 0.4)) +7 | Bottom right with holes | POLYGON((2 0, 3 0, 3 1, 2 1, 2 0), (2.4 0.4, 2.6 0.4, 2.6 0.6, 2.4 0.6, 2.4 0.4)) +8 | Top right with holes | POLYGON((2 2, 3 2, 3 3, 2 3, 2 2), (2.4 2.4, 2.6 2.4, 2.6 2.6, 2.4 2.6, 2.4 2.4)) +9 | Top left with holes | POLYGON((0 2, 1 2, 1 3, 0 3, 0 2), (0.4 2.4, 0.6 2.4, 0.6 2.6, 0.4 2.6, 0.4 2.4)) +; + +#################################################################################################### +# Test against a polygon larger than all test data + +whereIntersectsEvenLargerPolygon +required_feature: esql.st_intersects + +FROM cartesian_multipolygons +| WHERE ST_Intersects(shape, TO_CARTESIANSHAPE("POLYGON((-1 -1, 4 -1, 4 4, -1 4, -1 -1))")) +| SORT id +; + +id:l | name:keyword | shape:cartesian_shape +0 | Four squares | MULTIPOLYGON(((0 0, 1 0, 1 1, 0 1, 0 0)), ((2 0, 3 0, 3 1, 2 1, 2 0)), ((2 2, 3 2, 3 3, 2 3, 2 2)), ((0 2, 1 2, 1 3, 0 3, 0 2))) +1 | Bottom left | POLYGON((0 0, 1 0, 1 1, 0 1, 0 0)) +2 | Bottom right | POLYGON((2 0, 3 0, 3 1, 2 1, 2 0)) +3 | Top right | POLYGON((2 2, 3 2, 3 3, 2 3, 2 2)) +4 | Top left | POLYGON((0 2, 1 2, 1 3, 0 3, 0 2)) +5 | Four squares with holes | MULTIPOLYGON(((0 0, 1 0, 1 1, 0 1, 0 0), (0.4 0.4, 0.6 0.4, 0.6 0.6, 0.4 0.6, 0.4 0.4)), ((2 0, 3 0, 3 1, 2 1, 2 0), (2.4 0.4, 2.6 0.4, 2.6 0.6, 2.4 0.6, 2.4 0.4)), ((2 2, 3 2, 3 3, 2 3, 2 2), (2.4 2.4, 2.6 2.4, 2.6 2.6, 2.4 2.6, 2.4 2.4)), ((0 2, 1 2, 1 3, 0 3, 0 2), (0.4 2.4, 0.6 2.4, 0.6 2.6, 0.4 2.6, 0.4 2.4))) +6 | Bottom left with holes | POLYGON((0 0, 1 0, 1 1, 0 1, 0 0), (0.4 0.4, 0.6 0.4, 0.6 0.6, 0.4 0.6, 0.4 0.4)) +7 | Bottom right with holes | POLYGON((2 0, 3 0, 3 1, 2 1, 2 0), (2.4 0.4, 2.6 0.4, 2.6 0.6, 2.4 0.6, 2.4 0.4)) +8 | Top right with holes | POLYGON((2 2, 3 2, 3 3, 2 3, 2 2), (2.4 2.4, 2.6 2.4, 2.6 2.6, 2.4 2.6, 2.4 2.4)) +9 | Top left with holes | POLYGON((0 2, 1 2, 1 3, 0 3, 0 2), (0.4 2.4, 0.6 2.4, 0.6 2.6, 0.4 2.6, 0.4 2.4)) +; + +whereContainsEvenLargerPolygon +required_feature: esql.st_contains_within + +FROM cartesian_multipolygons +| WHERE ST_Contains(shape, TO_CARTESIANSHAPE("POLYGON((-1 -1, 4 -1, 4 4, -1 4, -1 -1))")) +| SORT id +; + +id:l | name:keyword | shape:cartesian_shape +; + +whereWithinEvenLargerPolygon +required_feature: esql.st_contains_within + +FROM cartesian_multipolygons +| WHERE ST_Within(shape, TO_CARTESIANSHAPE("POLYGON((-1 -1, 4 -1, 4 4, -1 4, -1 -1))")) +| SORT id +; + +id:l | name:keyword | shape:cartesian_shape +0 | Four squares | MULTIPOLYGON(((0 0, 1 0, 1 1, 0 1, 0 0)), ((2 0, 3 0, 3 1, 2 1, 2 0)), ((2 2, 3 2, 3 3, 2 3, 2 2)), ((0 2, 1 2, 1 3, 0 3, 0 2))) +1 | Bottom left | POLYGON((0 0, 1 0, 1 1, 0 1, 0 0)) +2 | Bottom right | POLYGON((2 0, 3 0, 3 1, 2 1, 2 0)) +3 | Top right | POLYGON((2 2, 3 2, 3 3, 2 3, 2 2)) +4 | Top left | POLYGON((0 2, 1 2, 1 3, 0 3, 0 2)) +5 | Four squares with holes | MULTIPOLYGON(((0 0, 1 0, 1 1, 0 1, 0 0), (0.4 0.4, 0.6 0.4, 0.6 0.6, 0.4 0.6, 0.4 0.4)), ((2 0, 3 0, 3 1, 2 1, 2 0), (2.4 0.4, 2.6 0.4, 2.6 0.6, 2.4 0.6, 2.4 0.4)), ((2 2, 3 2, 3 3, 2 3, 2 2), (2.4 2.4, 2.6 2.4, 2.6 2.6, 2.4 2.6, 2.4 2.4)), ((0 2, 1 2, 1 3, 0 3, 0 2), (0.4 2.4, 0.6 2.4, 0.6 2.6, 0.4 2.6, 0.4 2.4))) +6 | Bottom left with holes | POLYGON((0 0, 1 0, 1 1, 0 1, 0 0), (0.4 0.4, 0.6 0.4, 0.6 0.6, 0.4 0.6, 0.4 0.4)) +7 | Bottom right with holes | POLYGON((2 0, 3 0, 3 1, 2 1, 2 0), (2.4 0.4, 2.6 0.4, 2.6 0.6, 2.4 0.6, 2.4 0.4)) +8 | Top right with holes | POLYGON((2 2, 3 2, 3 3, 2 3, 2 2), (2.4 2.4, 2.6 2.4, 2.6 2.6, 2.4 2.6, 2.4 2.4)) +9 | Top left with holes | POLYGON((0 2, 1 2, 1 3, 0 3, 0 2), (0.4 2.4, 0.6 2.4, 0.6 2.6, 0.4 2.6, 0.4 2.4)) +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-cartesian_multipolygons.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-cartesian_multipolygons.json new file mode 100644 index 0000000000000..41fae0fbe4754 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-cartesian_multipolygons.json @@ -0,0 +1,13 @@ +{ + "properties": { + "id": { + "type": "long" + }, + "name": { + "type": "keyword" + }, + "shape": { + "type": "shape" + } + } +} diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec index 9f9aeec7e2838..392d6f036111a 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -64,7 +64,9 @@ sinh |"double sinh(angle:double|integer|long|unsigned_long)" split |"keyword split(string:keyword|text, delim:keyword|text)" |[string, delim] |["keyword|text", "keyword|text"] |["", ""] |keyword | "Split a single valued string into multiple strings." | [false, false] | false | false sqrt |"double sqrt(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "" |double | "Returns the square root of a number." | false | false | false st_centroid |"geo_point|cartesian_point st_centroid(field:geo_point|cartesian_point)" |field |"geo_point|cartesian_point" | "" |"geo_point|cartesian_point" | "The centroid of a spatial field." | false | false | true +st_contains |"boolean st_contains(geomA:geo_point|cartesian_point|geo_shape|cartesian_shape, geomB:geo_point|cartesian_point|geo_shape|cartesian_shape)" |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |["Geometry column name or variable of geometry type", "Geometry column name or variable of geometry type"] |boolean | "Returns whether the first geometry contains the second geometry." | [false, false] | false | false st_intersects |"boolean st_intersects(geomA:geo_point|cartesian_point|geo_shape|cartesian_shape, geomB:geo_point|cartesian_point|geo_shape|cartesian_shape)" |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |["Geometry column name or variable of geometry type", "Geometry column name or variable of geometry type"] |boolean | "Returns whether the two geometries or geometry columns intersect." | [false, false] | false | false +st_within |"boolean st_within(geomA:geo_point|cartesian_point|geo_shape|cartesian_shape, geomB:geo_point|cartesian_point|geo_shape|cartesian_shape)" |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |["Geometry column name or variable of geometry type", "Geometry column name or variable of geometry type"] |boolean | "Returns whether the first geometry is within the second geometry." | [false, false] | false | false st_x |"double st_x(point:geo_point|cartesian_point)" |point |"geo_point|cartesian_point" | "" |double | "Extracts the x-coordinate from a point geometry." | false | false | false st_y |"double st_y(point:geo_point|cartesian_point)" |point |"geo_point|cartesian_point" | "" |double | "Extracts the y-coordinate from a point geometry." | false | false | false starts_with |"boolean starts_with(str:keyword|text, prefix:keyword|text)" |[str, prefix] |["keyword|text", "keyword|text"] |["", ""] |boolean | "Returns a boolean that indicates whether a keyword string starts with another string" | [false, false] | false | false @@ -168,7 +170,9 @@ double pi() "keyword split(string:keyword|text, delim:keyword|text)" "double sqrt(number:double|integer|long|unsigned_long)" "geo_point|cartesian_point st_centroid(field:geo_point|cartesian_point)" +"boolean st_contains(geomA:geo_point|cartesian_point|geo_shape|cartesian_shape, geomB:geo_point|cartesian_point|geo_shape|cartesian_shape)" "boolean st_intersects(geomA:geo_point|cartesian_point|geo_shape|cartesian_shape, geomB:geo_point|cartesian_point|geo_shape|cartesian_shape)" +"boolean st_within(geomA:geo_point|cartesian_point|geo_shape|cartesian_shape, geomB:geo_point|cartesian_point|geo_shape|cartesian_shape)" "double st_x(point:geo_point|cartesian_point)" "double st_y(point:geo_point|cartesian_point)" "boolean starts_with(str:keyword|text, prefix:keyword|text)" @@ -223,5 +227,5 @@ countFunctions#[skip:-8.13.99] meta functions | stats a = count(*), b = count(*), c = count(*) | mv_expand c; a:long | b:long | c:long -97 | 97 | 97 +99 | 99 | 99 ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec index 495d0cbb8d7f0..57554c41a6dec 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec @@ -1,9 +1,9 @@ ############################################### # Tests for GEO_POINT type -# +############################################### convertFromStringQuantize -required_feature: esql.geo_point +required_feature: esql.spatial_points row wkt = "POINT(42.97109629958868 14.7552534006536)" | eval pt = to_geopoint(wkt); @@ -12,7 +12,9 @@ wkt:keyword |pt:geo_point POINT(42.97109629958868 14.7552534006536) |POINT(42.97109629958868 14.7552534006536) ; -convertFromString#[skip:-8.12.99, reason:spatial type geo_point improved precision in 8.13] +convertFromString +required_feature: esql.spatial_points_from_source + // tag::to_geopoint-str[] ROW wkt = "POINT(42.97109630194 14.7552534413725)" | EVAL pt = TO_GEOPOINT(wkt) @@ -25,7 +27,9 @@ wkt:keyword |pt:geo_point // end::to_geopoint-str-result[] ; -convertFromStringArray#[skip:-8.12.99, reason:spatial type geo_point improved precision in 8.13] +convertFromStringArray +required_feature: esql.spatial_points_from_source + row wkt = ["POINT(42.97109630194 14.7552534413725)", "POINT(75.8092915005895 22.727749187571)"] | eval pt = to_geopoint(wkt); @@ -33,7 +37,9 @@ wkt:keyword ["POINT(42.97109630194 14.7552534413725)", "POINT(75.8092915005895 22.727749187571)"] |[POINT(42.97109630194 14.7552534413725), POINT(75.8092915005895 22.727749187571)] ; -centroidFromStringNested#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidFromStringNested +required_feature: esql.st_centroid + row wkt = "POINT(42.97109629958868 14.7552534006536)" | STATS c = ST_CENTROID(TO_GEOPOINT(wkt)); @@ -41,7 +47,9 @@ c:geo_point POINT(42.97109629958868 14.7552534006536) ; -centroidFromString1#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidFromString1 +required_feature: esql.st_centroid + ROW wkt = ["POINT(42.97109629958868 14.7552534006536)"] | MV_EXPAND wkt | EVAL pt = TO_GEOPOINT(wkt) @@ -51,7 +59,9 @@ c:geo_point POINT(42.97109629958868 14.7552534006536) ; -centroidFromString2#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidFromString2 +required_feature: esql.st_centroid + ROW wkt = ["POINT(42.97109629958868 14.7552534006536)", "POINT(75.80929149873555 22.72774917539209)"] | MV_EXPAND wkt | EVAL pt = TO_GEOPOINT(wkt) @@ -61,7 +71,9 @@ c:geo_point POINT(59.390193899162114 18.741501288022846) ; -centroidFromString3#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidFromString3 +required_feature: esql.st_centroid + ROW wkt = ["POINT(42.97109629958868 14.7552534006536)", "POINT(75.80929149873555 22.72774917539209)", "POINT(-0.030548143003023033 24.37553649504829)"] | MV_EXPAND wkt | EVAL pt = TO_GEOPOINT(wkt) @@ -99,7 +111,9 @@ POINT(42.97109629958868 14.7552534006536) | 42.97109629958868 | 14.755253400653 // end::st_x_y-result[] ; -simpleLoad#[skip:-8.12.99, reason:spatial type geo_point improved precision in 8.13] +simpleLoad +required_feature: esql.spatial_points_from_source + FROM airports | WHERE scalerank == 9 | SORT abbrev | WHERE length(name) > 12; abbrev:keyword | city:keyword | city_location:geo_point | country:keyword | location:geo_point | name:text | scalerank:i | type:k @@ -131,7 +145,12 @@ c:long | x:double | y:double 19 | null | null ; -centroidFromAirports#[skip:-8.12.99, reason:st_centroid added in 8.13] +############################################### +# Tests for ST_CENTROID on GEO_POINT type + +centroidFromAirports +required_feature: esql.st_centroid + // tag::st_centroid-airports[] FROM airports | STATS centroid=ST_CENTROID(location) @@ -144,7 +163,9 @@ POINT(-0.030548143003023033 24.37553649504829) // end::st_centroid-airports-result[] ; -centroidFromAirportsNested#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidFromAirportsNested +required_feature: esql.st_centroid + FROM airports | STATS centroid=ST_CENTROID(TO_GEOPOINT(location)) ; @@ -153,7 +174,9 @@ centroid:geo_point POINT (-0.03054810272375508 24.37553651570554) ; -centroidFromAirportsCount#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidFromAirportsCount +required_feature: esql.st_centroid + FROM airports | STATS centroid=ST_CENTROID(location), count=COUNT() ; @@ -162,7 +185,9 @@ centroid:geo_point | count:long POINT(-0.030548143003023033 24.37553649504829) | 891 ; -centroidFromAirportsCountGrouped#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidFromAirportsCountGrouped +required_feature: esql.st_centroid + FROM airports | STATS centroid=ST_CENTROID(location), count=COUNT() BY scalerank | SORT scalerank DESC @@ -179,7 +204,9 @@ POINT(-26.976065734634176 42.907839377294295) | 24 | 3 POINT(1.2588642098541771 24.379140841774642) | 63 | 2 ; -centroidFromAirportsFiltered#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidFromAirportsFiltered +required_feature: esql.st_centroid + FROM airports | WHERE scalerank == 9 | STATS centroid=ST_CENTROID(location), count=COUNT() @@ -189,7 +216,9 @@ centroid:geo_point | count:long POINT(83.27726172452623 28.99289782286029) | 33 ; -centroidFromAirportsCountGroupedCentroid#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidFromAirportsCountGroupedCentroid +required_feature: esql.st_centroid + FROM airports | STATS centroid=ST_CENTROID(location), count=COUNT() BY scalerank | STATS centroid=ST_CENTROID(centroid), count=SUM(count) @@ -199,7 +228,9 @@ centroid:geo_point | count:long POINT (7.572387259169772 26.836561792945492) | 891 ; -centroidFromAirportsCountCityLocations#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidFromAirportsCountCityLocations +required_feature: esql.st_centroid + FROM airports | STATS centroid=ST_CENTROID(city_location), count=COUNT() ; @@ -208,7 +239,9 @@ centroid:geo_point | count:long POINT (1.3965610809060276 24.127649406297987) | 891 ; -centroidFromAirportsCountGroupedCountry#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidFromAirportsCountGroupedCountry +required_feature: esql.st_centroid + FROM airports | STATS centroid=ST_CENTROID(city_location), count=COUNT() BY country | SORT count DESC, country ASC @@ -235,7 +268,9 @@ POINT (6.725663595240224 9.201645437966693) | 11 | Nigeria POINT (70.7946499697864 30.69746997440234) | 10 | Pakistan ; -centroidFromAirportsFilteredCountry#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidFromAirportsFilteredCountry +required_feature: esql.st_centroid + FROM airports | WHERE country == "United States" | STATS centroid=ST_CENTROID(city_location), count=COUNT() @@ -245,7 +280,9 @@ centroid:geo_point | count:long POINT (-97.3333946136801 38.07953176370194) | 129 ; -centroidFromAirportsCountGroupedCountryCentroid#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidFromAirportsCountGroupedCountryCentroid +required_feature: esql.st_centroid + FROM airports | STATS centroid=ST_CENTROID(city_location), count=COUNT() BY country | STATS centroid=ST_CENTROID(centroid), count=SUM(count) @@ -255,7 +292,9 @@ centroid:geo_point | count:long POINT (17.55538044598613 18.185558743854063) | 891 ; -centroidFromAirportsCountryCount#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidFromAirportsCountryCount +required_feature: esql.st_centroid + FROM airports | STATS airports=ST_CENTROID(location), cities=ST_CENTROID(city_location), count=COUNT() ; @@ -264,7 +303,9 @@ airports:geo_point | cities:geo_point POINT(-0.030548143003023033 24.37553649504829) | POINT (1.3965610809060276 24.127649406297987) | 891 ; -centroidFromAirportsFilteredAndSorted#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidFromAirportsFilteredAndSorted +required_feature: esql.st_centroid + FROM airports | WHERE scalerank == 9 | SORT abbrev @@ -276,7 +317,9 @@ centroid:geo_point | count:long POINT(78.73736493755132 26.761841227998957) | 12 ; -centroidFromAirportsAfterMvExpand#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidFromAirportsAfterMvExpand +required_feature: esql.st_centroid + FROM airports | MV_EXPAND type | STATS centroid=ST_CENTROID(location), count=COUNT() @@ -286,7 +329,9 @@ centroid:geo_point | count:long POINT(2.121611400672094 24.559172889205755) | 933 ; -centroidFromAirportsGroupedAfterMvExpand#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidFromAirportsGroupedAfterMvExpand +required_feature: esql.st_centroid + FROM airports | MV_EXPAND type | STATS centroid=ST_CENTROID(location), count=COUNT() BY scalerank @@ -304,7 +349,9 @@ POINT(-26.976065734634176 42.907839377294295) | 24 | 3 POINT(1.2588642098541771 24.379140841774642) | 63 | 2 ; -centroidFromAirportsGroupedAfterMvExpandFiltered#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidFromAirportsGroupedAfterMvExpandFiltered +required_feature: esql.st_centroid + FROM airports | WHERE scalerank == 9 | MV_EXPAND type @@ -315,7 +362,9 @@ centroid:geo_point | count:long | scalerank:i POINT(83.16847535921261 28.79002037679311) | 40 | 9 ; -centroidFromAirportsAfterMvExpandFiltered#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidFromAirportsAfterMvExpandFiltered +required_feature: esql.st_centroid + FROM airports | WHERE scalerank == 9 | MV_EXPAND type @@ -326,7 +375,9 @@ centroid:geo_point | count:long POINT(83.16847535921261 28.79002037679311) | 40 ; -centroidFromAirportsAfterKeywordPredicateCountryUK#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidFromAirportsAfterKeywordPredicateCountryUK +required_feature: esql.st_centroid + FROM airports | WHERE country == "United Kingdom" | STATS centroid=ST_CENTROID(location), count=COUNT() @@ -336,7 +387,9 @@ centroid:geo_point | count:long POINT (-2.597342072712148 54.33551226578214) | 17 ; -centroidFromAirportsAfterIntersectsPredicateCountryUK#[skip:-8.13.99, reason:st_intersects added in 8.14] +centroidFromAirportsAfterIntersectsPredicateCountryUK +required_feature: esql.st_intersects + FROM airports | WHERE ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((1.2305 60.8449, -1.582 61.6899, -10.7227 58.4017, -7.1191 55.3291, -7.9102 54.2139, -5.4492 54.0078, -5.2734 52.3756, -7.8223 49.6676, -5.0977 49.2678, 0.9668 50.5134, 2.5488 52.1065, 2.6367 54.0078, -0.9668 56.4625, 1.2305 60.8449))")) | STATS centroid=ST_CENTROID(location), count=COUNT() @@ -346,34 +399,70 @@ centroid:geo_point | count:long POINT (-2.597342072712148 54.33551226578214) | 17 ; -intersectsAfterCentroidFromAirportsAfterKeywordPredicateCountryUK#[skip:-8.13.99, reason:st_intersects added in 8.14] +centroidFromAirportsAfterContainsPredicateCountryUK +required_feature: esql.st_contains_within + +FROM airports +| WHERE ST_CONTAINS(TO_GEOSHAPE("POLYGON((1.2305 60.8449, -1.582 61.6899, -10.7227 58.4017, -7.1191 55.3291, -7.9102 54.2139, -5.4492 54.0078, -5.2734 52.3756, -7.8223 49.6676, -5.0977 49.2678, 0.9668 50.5134, 2.5488 52.1065, 2.6367 54.0078, -0.9668 56.4625, 1.2305 60.8449))"), location) +| STATS centroid=ST_CENTROID(location), count=COUNT() +; + +centroid:geo_point | count:long +POINT (-2.597342072712148 54.33551226578214) | 17 +; + +centroidFromAirportsAfterWithinPredicateCountryUK +required_feature: esql.st_contains_within + +FROM airports +| WHERE ST_WITHIN(location, TO_GEOSHAPE("POLYGON((1.2305 60.8449, -1.582 61.6899, -10.7227 58.4017, -7.1191 55.3291, -7.9102 54.2139, -5.4492 54.0078, -5.2734 52.3756, -7.8223 49.6676, -5.0977 49.2678, 0.9668 50.5134, 2.5488 52.1065, 2.6367 54.0078, -0.9668 56.4625, 1.2305 60.8449))")) +| STATS centroid=ST_CENTROID(location), count=COUNT() +; + +centroid:geo_point | count:long +POINT (-2.597342072712148 54.33551226578214) | 17 +; + +intersectsAfterCentroidFromAirportsAfterKeywordPredicateCountryUK +required_feature: esql.st_intersects + FROM airports | WHERE country == "United Kingdom" | STATS centroid = ST_CENTROID(location), count=COUNT() | EVAL centroid_in_uk = ST_INTERSECTS(centroid, TO_GEOSHAPE("POLYGON((1.2305 60.8449, -1.582 61.6899, -10.7227 58.4017, -7.1191 55.3291, -7.9102 54.2139, -5.4492 54.0078, -5.2734 52.3756, -7.8223 49.6676, -5.0977 49.2678, 0.9668 50.5134, 2.5488 52.1065, 2.6367 54.0078, -0.9668 56.4625, 1.2305 60.8449))")) | EVAL centroid_in_iceland = ST_INTERSECTS(centroid, TO_GEOSHAPE("POLYGON ((-25.4883 65.5312, -23.4668 66.7746, -18.4131 67.4749, -13.0957 66.2669, -12.3926 64.4159, -20.1270 62.7346, -24.7852 63.3718, -25.4883 65.5312))")) -| KEEP centroid, count, centroid_in_uk, centroid_in_iceland +| EVAL centroid_within_uk = ST_WITHIN(centroid, TO_GEOSHAPE("POLYGON((1.2305 60.8449, -1.582 61.6899, -10.7227 58.4017, -7.1191 55.3291, -7.9102 54.2139, -5.4492 54.0078, -5.2734 52.3756, -7.8223 49.6676, -5.0977 49.2678, 0.9668 50.5134, 2.5488 52.1065, 2.6367 54.0078, -0.9668 56.4625, 1.2305 60.8449))")) +| EVAL centroid_within_iceland = ST_WITHIN(centroid, TO_GEOSHAPE("POLYGON ((-25.4883 65.5312, -23.4668 66.7746, -18.4131 67.4749, -13.0957 66.2669, -12.3926 64.4159, -20.1270 62.7346, -24.7852 63.3718, -25.4883 65.5312))")) +| EVAL centroid_contains_uk = ST_CONTAINS(centroid, TO_GEOSHAPE("POLYGON((1.2305 60.8449, -1.582 61.6899, -10.7227 58.4017, -7.1191 55.3291, -7.9102 54.2139, -5.4492 54.0078, -5.2734 52.3756, -7.8223 49.6676, -5.0977 49.2678, 0.9668 50.5134, 2.5488 52.1065, 2.6367 54.0078, -0.9668 56.4625, 1.2305 60.8449))")) +| EVAL centroid_contains_iceland = ST_CONTAINS(centroid, TO_GEOSHAPE("POLYGON ((-25.4883 65.5312, -23.4668 66.7746, -18.4131 67.4749, -13.0957 66.2669, -12.3926 64.4159, -20.1270 62.7346, -24.7852 63.3718, -25.4883 65.5312))")) +| KEEP centroid, count, centroid_in_uk, centroid_in_iceland, centroid_within_uk, centroid_within_iceland, centroid_contains_uk, centroid_contains_iceland ; -centroid:geo_point | count:long | centroid_in_uk:boolean | centroid_in_iceland:boolean -POINT (-2.597342072712148 54.33551226578214) | 17 | true | false +centroid:geo_point | count:long | centroid_in_uk:boolean | centroid_in_iceland:boolean | centroid_within_uk:boolean | centroid_within_iceland:boolean | centroid_contains_uk:boolean | centroid_contains_iceland:boolean +POINT (-2.597342072712148 54.33551226578214) | 17 | true | false | true | false | false | false ; -centroidFromAirportsAfterIntersectsEvalExpression#[skip:-8.13.99, reason:st_intersects added in 8.14] +centroidFromAirportsAfterIntersectsEvalExpression +required_feature: esql.st_intersects + FROM airports | EVAL in_uk = ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((1.2305 60.8449, -1.582 61.6899, -10.7227 58.4017, -7.1191 55.3291, -7.9102 54.2139, -5.4492 54.0078, -5.2734 52.3756, -7.8223 49.6676, -5.0977 49.2678, 0.9668 50.5134, 2.5488 52.1065, 2.6367 54.0078, -0.9668 56.4625, 1.2305 60.8449))")) | EVAL in_iceland = ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON ((-25.4883 65.5312, -23.4668 66.7746, -18.4131 67.4749, -13.0957 66.2669, -12.3926 64.4159, -20.1270 62.7346, -24.7852 63.3718, -25.4883 65.5312))")) -| STATS centroid = ST_CENTROID(location), count=COUNT() BY in_uk, in_iceland +| EVAL within_uk = ST_WITHIN(location, TO_GEOSHAPE("POLYGON((1.2305 60.8449, -1.582 61.6899, -10.7227 58.4017, -7.1191 55.3291, -7.9102 54.2139, -5.4492 54.0078, -5.2734 52.3756, -7.8223 49.6676, -5.0977 49.2678, 0.9668 50.5134, 2.5488 52.1065, 2.6367 54.0078, -0.9668 56.4625, 1.2305 60.8449))")) +| EVAL within_iceland = ST_WITHIN(location, TO_GEOSHAPE("POLYGON ((-25.4883 65.5312, -23.4668 66.7746, -18.4131 67.4749, -13.0957 66.2669, -12.3926 64.4159, -20.1270 62.7346, -24.7852 63.3718, -25.4883 65.5312))")) +| STATS centroid = ST_CENTROID(location), count=COUNT() BY in_uk, in_iceland, within_uk, within_iceland | SORT count ASC ; -centroid:geo_point | count:long | in_uk:boolean | in_iceland:boolean -POINT (-21.946634463965893 64.13187285885215) | 1 | false | true -POINT (-2.597342072712148 54.33551226578214) | 17 | true | false -POINT (0.04453958108176276 23.74658354606057) | 873 | false | false +centroid:geo_point | count:long | in_uk:boolean | in_iceland:boolean | within_uk:boolean | within_iceland:boolean +POINT (-21.946634463965893 64.13187285885215) | 1 | false | true | false | true +POINT (-2.597342072712148 54.33551226578214) | 17 | true | false | true | false +POINT (0.04453958108176276 23.74658354606057) | 873 | false | false | false | false ; -centroidFromAirportsAfterIntersectsPredicate#[skip:-8.13.99, reason:st_intersects added in 8.14] +centroidFromAirportsAfterIntersectsPredicate +required_feature: esql.st_intersects + FROM airports | WHERE ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) | STATS centroid=ST_CENTROID(location), count=COUNT() @@ -383,7 +472,9 @@ centroid:geo_point | count:long POINT (42.97109629958868 14.7552534006536) | 1 ; -centroidFromAirportsAfterIntersectsCompoundPredicate#[skip:-8.13.99, reason:st_intersects added in 8.14] +centroidFromAirportsAfterIntersectsCompoundPredicate +required_feature: esql.st_intersects + FROM airports | WHERE scalerank == 9 AND ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) AND country == "Yemen" | STATS centroid=ST_CENTROID(location), count=COUNT() @@ -393,29 +484,38 @@ centroid:geo_point | count:long POINT (42.97109629958868 14.7552534006536) | 1 ; -pointIntersectsLiteralPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +############################################### +# Tests for ST_INTERSECTS on GEO_POINT type + +pointIntersectsLiteralPolygon +required_feature: esql.st_intersects + +// tag::st_intersects-airports[] FROM airports | WHERE ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) +// end::st_intersects-airports[] ; +// tag::st_intersects-airports-result[] abbrev:keyword | city:keyword | city_location:geo_point | country:keyword | location:geo_point | name:text | scalerank:i | type:k HOD | Al Ḩudaydah | POINT(42.9511 14.8022) | Yemen | POINT(42.97109630194 14.7552534413725) | Hodeidah Int'l | 9 | mid +// end::st_intersects-airports-result[] ; -pointIntersectsLiteralPolygonReversed#[skip:-8.13.99, reason:st_intersects added in 8.14] -// tag::st_intersects-airports[] +pointIntersectsLiteralPolygonReversed +required_feature: esql.st_intersects + FROM airports | WHERE ST_INTERSECTS(TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))"), location) -// end::st_intersects-airports[] ; -// tag::st_intersects-airports-results[] abbrev:keyword | city:keyword | city_location:geo_point | country:keyword | location:geo_point | name:text | scalerank:i | type:k HOD | Al Ḩudaydah | POINT(42.9511 14.8022) | Yemen | POINT(42.97109630194 14.7552534413725) | Hodeidah Int'l | 9 | mid -// end::st_intersects-airports-results[] ; -literalPointIntersectsLiteralPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +literalPointIntersectsLiteralPolygon +required_feature: esql.st_intersects + ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt | EVAL pt = TO_GEOPOINT(wkt) @@ -427,7 +527,9 @@ wkt:keyword | pt:geo_point "POINT(1 -1)" | POINT(1 -1) ; -literalPointIntersectsLiteralPolygonReversed#[skip:-8.13.99, reason:st_intersects added in 8.14] +literalPointIntersectsLiteralPolygonReversed +required_feature: esql.st_intersects + ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt | EVAL pt = TO_GEOPOINT(wkt) @@ -439,7 +541,9 @@ wkt:keyword | pt:geo_point "POINT(1 -1)" | POINT(1 -1) ; -literalPointIntersectsLiteralPolygonOneRow#[skip:-8.13.99, reason:st_intersects added in 8.14] +literalPointIntersectsLiteralPolygonOneRow +required_feature: esql.st_intersects + ROW intersects = ST_INTERSECTS(TO_GEOPOINT("POINT(0 0)"), TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) ; @@ -447,7 +551,9 @@ intersects:boolean true ; -cityInCityBoundary#[skip:-8.13.99, reason:st_intersects added in 8.14] +cityInCityBoundary +required_feature: esql.st_intersects + FROM airport_city_boundaries | EVAL in_city = ST_INTERSECTS(city_location, city_boundary) | STATS count=COUNT(*) BY in_city @@ -461,7 +567,9 @@ cardinality:k | in_city:boolean "many" | true ; -cityNotInCityBoundaryBiggest#[skip:-8.13.99, reason:st_intersects added in 8.14] +cityNotInCityBoundaryBiggest +required_feature: esql.st_intersects + FROM airport_city_boundaries | WHERE NOT ST_INTERSECTS(city_location, city_boundary) | EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) @@ -474,7 +582,9 @@ abbrev:keyword | airport:text | city:keyword | city_location:geo_poi SYX | Sanya Phoenix Int'l | Sanya | POINT(109.5036 18.2533) | 598 | POLYGON((109.1802 18.4609, 109.2304 18.4483, 109.2311 18.4261, 109.2696 18.411, 109.2602 18.3581, 109.2273 18.348, 109.2286 18.2638, 109.2842 18.2665, 109.3518 18.2166, 109.4508 18.1936, 109.4895 18.2281, 109.5137 18.2283, 109.4914 18.2781, 109.5041 18.2948, 109.4809 18.3034, 109.5029 18.3422, 109.5249 18.3375, 109.4993 18.3632, 109.535 18.4007, 109.5104 18.4374, 109.5231 18.4474, 109.5321 18.53, 109.4992 18.5568, 109.4192 18.5646, 109.4029 18.6302, 109.3286 18.5772, 109.309 18.5191, 109.2913 18.5141, 109.2434 18.5607, 109.2022 18.5572, 109.1815 18.5163, 109.1908 18.4711, 109.1802 18.4609))) ; -airportCityLocationPointIntersection#[skip:-8.13.99, reason:st_intersects added in 8.14] +airportCityLocationPointIntersection +required_feature: esql.st_intersects + FROM airports_mp | WHERE ST_INTERSECTS(location, city_location) ; @@ -483,7 +593,9 @@ abbrev:keyword | city:keyword | city_location:geo_point | country:keyword XXX | Atlantis | POINT(0 0) | Atlantis | POINT(0 0) | Atlantis Int'l | 1 | mid ; -airportCityLocationPointIntersectionCentroid#[skip:-8.13.99, reason:st_intersects added in 8.14] +airportCityLocationPointIntersectionCentroid +required_feature: esql.st_intersects + FROM airports_mp | WHERE ST_INTERSECTS(location, city_location) | STATS location=ST_CENTROID(location), city_location=ST_CENTROID(city_location), count=COUNT() @@ -493,7 +605,192 @@ location:geo_point | city_location:geo_point | count:long POINT (0 0) | POINT (0 0) | 1 ; -geoPointEquals#[skip:-8.12.99, reason:spatial type geo_point improved in 8.13] +############################################### +# Tests for ST_CONTAINS on GEO_POINT type + +literalPolygonContainsLiteralPoint +required_feature: esql.st_contains_within + +ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] +| MV_EXPAND wkt +| EVAL pt = TO_GEOPOINT(wkt) +| WHERE ST_CONTAINS(TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))"), pt) +; + +wkt:keyword | pt:geo_point +"POINT(1 1)" | POINT(1 1) +"POINT(1 -1)" | POINT(1 -1) +; + +literalPointDoesNotContainLiteralPolygon +required_feature: esql.st_contains_within + +ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] +| MV_EXPAND wkt +| EVAL pt = TO_GEOPOINT(wkt) +| WHERE ST_CONTAINS(pt, TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) +; + +wkt:keyword | pt:geo_point +; + +literalPolygonContainsLiteralPointOneRow +required_feature: esql.st_contains_within + +ROW contains = ST_CONTAINS(TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))"), TO_GEOPOINT("POINT(0 0)")) +; + +contains:boolean +true +; + +literalPointDoesNotContainLiteralPolygonOneRow +required_feature: esql.st_contains_within + +ROW contains = ST_CONTAINS(TO_GEOPOINT("POINT(0 0)"), TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) +; + +contains:boolean +false +; + +pointContainsLiteralPolygon +required_feature: esql.st_contains_within + +FROM airports +| WHERE ST_CONTAINS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) +; + +abbrev:keyword | city:keyword | city_location:geo_point | country:keyword | location:geo_point | name:text | scalerank:i | type:k +; + +pointContainedInLiteralPolygon +required_feature: esql.st_contains_within + +FROM airports +| WHERE ST_CONTAINS(TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))"), location) +; + +abbrev:keyword | city:keyword | city_location:geo_point | country:keyword | location:geo_point | name:text | scalerank:i | type:k +HOD | Al Ḩudaydah | POINT(42.9511 14.8022) | Yemen | POINT(42.97109630194 14.7552534413725) | Hodeidah Int'l | 9 | mid +; + +airportCityLocationPointContains +required_feature: esql.st_contains_within + +FROM airports_mp +| WHERE ST_CONTAINS(location, city_location) +; + +abbrev:keyword | city:keyword | city_location:geo_point | country:keyword | location:geo_point | name:text | scalerank:i | type:k +XXX | Atlantis | POINT(0 0) | Atlantis | POINT(0 0) | Atlantis Int'l | 1 | mid +; + +airportCityLocationPointContainsCentroid +required_feature: esql.st_contains_within + +FROM airports_mp +| WHERE ST_CONTAINS(location, city_location) +| STATS location=ST_CENTROID(location), city_location=ST_CENTROID(city_location), count=COUNT() +; + +location:geo_point | city_location:geo_point | count:long +POINT (0 0) | POINT (0 0) | 1 +; + +############################################### +# Tests for ST_WITHIN on GEO_POINT type + +literalPolygonNotWithinLiteralPoint +required_feature: esql.st_contains_within + +ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] +| MV_EXPAND wkt +| EVAL pt = TO_GEOPOINT(wkt) +| WHERE ST_WITHIN(TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))"), pt) +; + +wkt:keyword | pt:geo_point +; + +literalPointWithinLiteralPolygon +required_feature: esql.st_contains_within + +ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] +| MV_EXPAND wkt +| EVAL pt = TO_GEOPOINT(wkt) +| WHERE ST_WITHIN(pt, TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) +; + +wkt:keyword | pt:geo_point +"POINT(1 1)" | POINT(1 1) +"POINT(1 -1)" | POINT(1 -1) +; + +literalPolygonNotWithinLiteralPointOneRow +required_feature: esql.st_contains_within + +ROW within = ST_WITHIN(TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))"), TO_GEOPOINT("POINT(0 0)")) +; + +within:boolean +false +; + +literalPointWithinLiteralPolygonOneRow +required_feature: esql.st_contains_within + +ROW within = ST_WITHIN(TO_GEOPOINT("POINT(0 0)"), TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) +; + +within:boolean +true +; + +pointWithinLiteralPolygon +required_feature: esql.st_contains_within + +// tag::st_within-airports[] +FROM airports +| WHERE ST_WITHIN(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) +// end::st_within-airports[] +; + +// tag::st_within-airports-results[] +abbrev:keyword | city:keyword | city_location:geo_point | country:keyword | location:geo_point | name:text | scalerank:i | type:k +HOD | Al Ḩudaydah | POINT(42.9511 14.8022) | Yemen | POINT(42.97109630194 14.7552534413725) | Hodeidah Int'l | 9 | mid +// end::st_within-airports-results[] +; + +airportCityLocationPointWithin +required_feature: esql.st_contains_within + +FROM airports_mp +| WHERE ST_WITHIN(location, city_location) +; + +abbrev:keyword | city:keyword | city_location:geo_point | country:keyword | location:geo_point | name:text | scalerank:i | type:k +XXX | Atlantis | POINT(0 0) | Atlantis | POINT(0 0) | Atlantis Int'l | 1 | mid +; + +airportCityLocationPointWithinCentroid +required_feature: esql.st_contains_within + +FROM airports_mp +| WHERE ST_WITHIN(location, city_location) +| STATS location=ST_CENTROID(location), city_location=ST_CENTROID(city_location), count=COUNT() +; + +location:geo_point | city_location:geo_point | count:long +POINT (0 0) | POINT (0 0) | 1 +; + +############################################### +# Tests for Equality and casting with GEO_POINT + +geoPointEquals +required_feature: esql.spatial_points_from_source + // tag::to_geopoint-equals[] ROW wkt = ["POINT(42.97109630194 14.7552534413725)", "POINT(75.8092915005895 22.727749187571)"] | MV_EXPAND wkt @@ -508,7 +805,9 @@ wkt:keyword |pt:geo_point // end::to_geopoint-equals-result[] ; -geoPointNotEquals#[skip:-8.12.99, reason:spatial type geo_point improved in 8.13] +geoPointNotEquals +required_feature: esql.spatial_points_from_source + // tag::to_geopoint-not-equals[] ROW wkt = ["POINT(42.97109630194 14.7552534413725)", "POINT(75.8092915005895 22.727749187571)"] | MV_EXPAND wkt @@ -523,7 +822,9 @@ wkt:keyword |pt:geo_point // end::to_geopoint-not-equals-result[] ; -convertFromStringParseError#[skip:-8.12.99, reason:spatial type geo_point improved in 8.13] +convertFromStringParseError +required_feature: esql.spatial_points_from_source + // tag::to_geopoint-str-parse-error[] row wkt = ["POINTX(42.97109630194 14.7552534413725)", "POINT(75.8092915005895 22.727749187571)", "POINT(111)"] | mv_expand wkt @@ -547,9 +848,11 @@ wkt:keyword |pt:geo_point ############################################### # Tests for CARTESIAN_POINT type -# +############################################### + +convertCartesianFromString +required_feature: esql.spatial_points_from_source -convertCartesianFromString#[skip:-8.12.99, reason:spatial type cartesian_point improved precision in 8.13] // tag::to_cartesianpoint-str[] ROW wkt = ["POINT(4297.11 -1475.53)", "POINT(7580.93 2272.77)"] | MV_EXPAND wkt @@ -564,7 +867,9 @@ wkt:keyword |pt:cartesian_point // end::to_cartesianpoint-str-result[] ; -convertCartesianFromStringArray#[skip:-8.12.99, reason:spatial type cartesian_point improved precision in 8.13] +convertCartesianFromStringArray +required_feature: esql.spatial_points_from_source + row wkt = ["POINT(4297.11 -1475.53)", "POINT(7580.93 2272.77)"] | eval pt = to_cartesianpoint(wkt); @@ -572,7 +877,9 @@ wkt:keyword |pt:cartesian_point ["POINT(4297.11 -1475.53)", "POINT(7580.93 2272.77)"] |[POINT(4297.11 -1475.53), POINT(7580.93 2272.77)] ; -centroidCartesianFromStringNested#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidCartesianFromStringNested +required_feature: esql.st_centroid + row wkt = "POINT(4297.10986328125 -1475.530029296875)" | STATS c = ST_CENTROID(TO_CARTESIANPOINT(wkt)); @@ -580,7 +887,9 @@ c:cartesian_point POINT(4297.10986328125 -1475.530029296875) ; -centroidFromCartesianString1#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidFromCartesianString1 +required_feature: esql.st_centroid + ROW wkt = ["POINT(4297.10986328125 -1475.530029296875)"] | MV_EXPAND wkt | EVAL pt = TO_CARTESIANPOINT(wkt) @@ -590,7 +899,9 @@ c:cartesian_point POINT(4297.10986328125 -1475.530029296875) ; -centroidFromCartesianString2#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidFromCartesianString2 +required_feature: esql.st_centroid + ROW wkt = ["POINT(4297.10986328125 -1475.530029296875)", "POINT(7580.93017578125 2272.77001953125)"] | MV_EXPAND wkt | EVAL pt = TO_CARTESIANPOINT(wkt) @@ -600,7 +911,9 @@ c:cartesian_point POINT(5939.02001953125 398.6199951171875) ; -centroidFromCartesianString3#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidFromCartesianString3 +required_feature: esql.st_centroid + ROW wkt = ["POINT(4297.10986328125 -1475.530029296875)", "POINT(7580.93017578125 2272.77001953125)", "POINT(-30.548143003023033 2437.553649504829)"] | MV_EXPAND wkt | EVAL pt = TO_CARTESIANPOINT(wkt) @@ -621,7 +934,9 @@ point:cartesian_point | x:double | y:double POINT(4297.10986328125 -1475.530029296875) | 4297.10986328125 | -1475.530029296875 ; -simpleCartesianLoad#[skip:-8.12.99, reason:spatial type cartesian_point improved precision in 8.13] +simpleCartesianLoad +required_feature: esql.spatial_points_from_source + FROM airports_web | WHERE scalerank == 9 | SORT abbrev | WHERE length(name) > 12; abbrev:keyword | location:cartesian_point | name:text | scalerank:i | type:k @@ -635,7 +950,12 @@ WIIT | POINT (11708145.489503577 -584415.9142832769) | Radin Inten II ZAH | POINT (6779435.866395892 3436280.545331025) | Zahedan Int'l | 9 | mid ; -cartesianCentroidFromAirports#[skip:-8.12.99, reason:st_centroid added in 8.13] +############################################### +# Tests for ST_CENTROID on CARTESIAN_POINT type + +cartesianCentroidFromAirports +required_feature: esql.st_centroid + FROM airports_web | STATS centroid=ST_CENTROID(location); @@ -643,7 +963,9 @@ centroid:cartesian_point POINT(-266681.67563861894 3053301.5120195406) ; -cartesianCentroidFromAirportsNested#[skip:-8.12.99, reason:st_centroid added in 8.13] +cartesianCentroidFromAirportsNested +required_feature: esql.st_centroid + FROM airports_web | STATS centroid=ST_CENTROID(TO_CARTESIANPOINT(location)); @@ -651,7 +973,9 @@ centroid:cartesian_point POINT (-266681.66530554957 3053301.506061676) ; -cartesianCentroidFromAirportsCount#[skip:-8.12.99, reason:st_centroid added in 8.13] +cartesianCentroidFromAirportsCount +required_feature: esql.st_centroid + FROM airports_web | STATS centroid=ST_CENTROID(location), count=COUNT() ; @@ -660,7 +984,9 @@ centroid:cartesian_point | count:long POINT(-266681.67563861894 3053301.5120195406) | 849 ; -cartesianCentroidFromAirportsCountGrouped#[skip:-8.12.99, reason:st_centroid added in 8.13] +cartesianCentroidFromAirportsCountGrouped +required_feature: esql.st_centroid + FROM airports_web | STATS centroid=ST_CENTROID(location), count=COUNT() BY scalerank | SORT scalerank DESC @@ -677,7 +1003,9 @@ POINT(-3002961.9270833335 5451641.91796875) | 24 | 3 POINT(140136.12878224207 3081220.7881944445) | 63 | 2 ; -cartesianCentroidFromAirportsFiltered#[skip:-8.12.99, reason:st_centroid added in 8.13] +cartesianCentroidFromAirportsFiltered +required_feature: esql.st_centroid + FROM airports_web | WHERE scalerank == 9 | STATS centroid=ST_CENTROID(location), count=COUNT() @@ -687,7 +1015,9 @@ centroid:cartesian_point | count:long POINT(9289013.153846154 3615537.0533353365) | 26 ; -cartesianCentroidFromAirportsFilteredAndSorted#[skip:-8.12.99, reason:st_centroid added in 8.13] +cartesianCentroidFromAirportsFilteredAndSorted +required_feature: esql.st_centroid + FROM airports_web | WHERE scalerank == 9 | SORT abbrev @@ -699,7 +1029,9 @@ centroid:cartesian_point | count:long POINT(9003597.4375 3429344.0078125) | 8 ; -cartesianCentroidFromAirportsCountGroupedCentroid#[skip:-8.12.99, reason:st_centroid added in 8.13] +cartesianCentroidFromAirportsCountGroupedCentroid +required_feature: esql.st_centroid + FROM airports_web | STATS centroid=ST_CENTROID(location), count=COUNT() BY scalerank | STATS centroid=ST_CENTROID(centroid), count=SUM(count) @@ -709,7 +1041,12 @@ centroid:cartesian_point | count:long POINT (726480.0130685265 3359566.331716279) | 849 ; -cartesianCentroidFromAirportsAfterIntersectsPredicate#[skip:-8.13.99, reason:st_intersects added in 8.14] +############################################### +# Tests for ST_INTERSECTS on CARTESIAN_POINT type + +cartesianCentroidFromAirportsAfterIntersectsPredicate +required_feature: esql.st_intersects + FROM airports_web | WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))")) | STATS centroid=ST_CENTROID(location), count=COUNT() @@ -719,7 +1056,9 @@ centroid:cartesian_point | count:long POINT (4783520.5 1661010.0) | 1 ; -cartesianPointIntersectsPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +cartesianPointIntersectsPolygon +required_feature: esql.st_intersects + FROM airports_web | WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))")) ; @@ -728,7 +1067,9 @@ abbrev:keyword | location:cartesian_point | name:text | HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | 9 | mid ; -literalCartesianPointIntersectsPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +literalCartesianPointIntersectsPolygon +required_feature: esql.st_intersects + ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt | EVAL pt = TO_CARTESIANPOINT(wkt) @@ -740,7 +1081,126 @@ wkt:keyword | pt:cartesian_point "POINT(1 -1)" | POINT(1 -1) ; -cartesianPointEquals#[skip:-8.12.99, reason:spatial type cartesian_point improved in 8.13] +############################################### +# Tests for ST_CONTAINS on CARTESIAN_POINT type + +cartesianCentroidFromAirportsAfterPolygonContainsPointPredicate +required_feature: esql.st_contains_within + +FROM airports_web +| WHERE ST_CONTAINS(TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))"), location) +| STATS centroid=ST_CENTROID(location), count=COUNT() +; + +centroid:cartesian_point | count:long +POINT (4783520.5 1661010.0) | 1 +; + +cartesianPolygonContainsPointPredicate +required_feature: esql.st_contains_within + +FROM airports_web +| WHERE ST_CONTAINS(TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))"), location) +; + +abbrev:keyword | location:cartesian_point | name:text | scalerank:i | type:k +HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | 9 | mid +; + +literalCartesianPolygonContainsPointPredicate +required_feature: esql.st_contains_within + +ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] +| MV_EXPAND wkt +| EVAL pt = TO_CARTESIANPOINT(wkt) +| WHERE ST_CONTAINS(TO_CARTESIANSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))"), pt) +; + +wkt:keyword | pt:cartesian_point +"POINT(1 1)" | POINT(1 1) +"POINT(1 -1)" | POINT(1 -1) +; + +cartesianCentroidFromAirportsAfterPointContainsPolygonPredicate +required_feature: esql.st_contains_within + +FROM airports_web +| WHERE ST_CONTAINS(location, TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))")) +| STATS centroid=ST_CENTROID(location), count=COUNT() +; + +centroid:cartesian_point | count:long +POINT (NaN NaN) | 0 +; + +cartesianPointContainsPolygonPredicate +required_feature: esql.st_contains_within + +FROM airports_web +| WHERE ST_CONTAINS(location, TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))")) +; + +abbrev:keyword | location:cartesian_point | name:text | scalerank:i | type:k +; + +literalCartesianPointContainsPolygonPredicate +required_feature: esql.st_contains_within + +ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] +| MV_EXPAND wkt +| EVAL pt = TO_CARTESIANPOINT(wkt) +| WHERE ST_CONTAINS(pt, TO_CARTESIANSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) +; + +wkt:keyword | pt:cartesian_point +; + +############################################### +# Tests for ST_WITHIN on CARTESIAN_POINT type + +cartesianCentroidFromAirportsAfterWithinPredicate +required_feature: esql.st_contains_within + +FROM airports_web +| WHERE ST_WITHIN(location, TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))")) +| STATS centroid=ST_CENTROID(location), count=COUNT() +; + +centroid:cartesian_point | count:long +POINT (4783520.5 1661010.0) | 1 +; + +cartesianPointWithinPolygon +required_feature: esql.st_contains_within + +FROM airports_web +| WHERE ST_WITHIN(location, TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))")) +; + +abbrev:keyword | location:cartesian_point | name:text | scalerank:i | type:k +HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | 9 | mid +; + +literalCartesianPointWithinPolygon +required_feature: esql.st_contains_within + +ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] +| MV_EXPAND wkt +| EVAL pt = TO_CARTESIANPOINT(wkt) +| WHERE ST_WITHIN(pt, TO_CARTESIANSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) +; + +wkt:keyword | pt:cartesian_point +"POINT(1 1)" | POINT(1 1) +"POINT(1 -1)" | POINT(1 -1) +; + +############################################### +# Tests for Equality and casting with GEO_POINT + +cartesianPointEquals +required_feature: esql.spatial_points_from_source + // tag::to_cartesianpoint-equals[] ROW wkt = ["POINT(4297.11 -1475.53)", "POINT(7580.93 2272.77)"] | MV_EXPAND wkt @@ -755,7 +1215,9 @@ wkt:keyword |pt:cartesian_point // end::to_cartesianpoint-equals-result[] ; -cartesianPointNotEquals#[skip:-8.12.99, reason:spatial type cartesian_point improved in 8.13] +cartesianPointNotEquals +required_feature: esql.spatial_points_from_source + // tag::to_cartesianpoint-not-equals[] ROW wkt = ["POINT(4297.11 -1475.53)", "POINT(7580.93 2272.77)"] | MV_EXPAND wkt @@ -770,7 +1232,9 @@ wkt:keyword |pt:cartesian_point // end::to_cartesianpoint-not-equals-result[] ; -convertCartesianFromStringParseError#[skip:-8.12.99, reason:spatial type cartesian_point improved in 8.13] +convertCartesianFromStringParseError +required_feature: esql.spatial_points_from_source + // tag::to_cartesianpoint-str-parse-error[] row wkt = ["POINTX(4297.11 -1475.53)", "POINT(7580.93 2272.77)", "POINT(111)"] | mv_expand wkt diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial_shapes.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial_shapes.csv-spec index 69e56c7efe55d..f010ed13370e0 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial_shapes.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial_shapes.csv-spec @@ -2,7 +2,9 @@ # Tests for GEO_SHAPE type # -convertFromString#[skip:-8.12.99, reason: spatial type geo_shape only added in 8.13] +convertFromString +required_feature: esql.spatial_shapes + // tag::to_geoshape-str[] ROW wkt = "POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))" | EVAL geom = TO_GEOSHAPE(wkt) @@ -15,7 +17,9 @@ wkt:keyword | geom:geo_shape // end::to_geoshape-str-result[] ; -convertFromStringArray#[skip:-8.12.99, reason: spatial type geo_shape only added in 8.13] +convertFromStringArray +required_feature: esql.spatial_shapes + row wkt = ["POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))", "POINT(75.8092915005895 22.727749187571)"] | eval pt = to_geoshape(wkt); @@ -23,7 +27,9 @@ wkt:keyword ["POLYGON ((30 10\, 40 40\, 20 40\, 10 20\, 30 10))", "POINT(75.8092915005895 22.727749187571)"] |[POLYGON ((30 10\, 40 40\, 20 40\, 10 20\, 30 10)), POINT(75.8092915005895 22.727749187571)] ; -convertFromStringViaPoint#[skip:-8.12.99, reason: spatial type geo_shape only added in 8.13] +convertFromStringViaPoint +required_feature: esql.spatial_shapes + ROW wkt = "POINT (30 10)" | EVAL point = TO_GEOPOINT(wkt) | EVAL shape = TO_GEOSHAPE(point) @@ -34,14 +40,18 @@ wkt:keyword | point:geo_point | shape:geo_shape ; # need to work out how to upload WKT -simpleLoad#[skip:-8.12.99, reason: spatial type geo_shape only added in 8.13] +simpleLoad +required_feature: esql.spatial_shapes + FROM countries_bbox | WHERE id == "ISL"; id:keyword| name:keyword| shape:geo_shape ISL|Iceland|BBOX(-24.538400, -13.499446, 66.536100, 63.390000) ; -simpleLoadPointsAsShapes#[skip:-8.12.99, reason: spatial type geo_shape only added in 8.13] +simpleLoadPointsAsShapes +required_feature: esql.spatial_shapes + FROM airports | WHERE abbrev == "CPH" OR abbrev == "VLC" | SORT abbrev @@ -66,7 +76,12 @@ abbrev:keyword | region:text | city_location:geo_point | airport:tex CPH | Københavns Kommune | POINT(12.5683 55.6761) | Copenhagen | 265 ; -pointIntersectsLiteralPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +############################################### +# Tests for ST_INTERSECTS with GEO_SHAPE + +pointIntersectsLiteralPolygon +required_feature: esql.st_intersects + FROM airports | EVAL location = TO_GEOSHAPE(location) | WHERE ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) @@ -77,7 +92,9 @@ abbrev:keyword | name:text | location:geo_shape | count HOD | Hodeidah Int'l | POINT(42.97109630194 14.7552534413725) | Yemen | Al Ḩudaydah | POINT(42.9511 14.8022) ; -polygonIntersectsLiteralPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +polygonIntersectsLiteralPolygon +required_feature: esql.st_intersects + FROM airport_city_boundaries | WHERE ST_INTERSECTS(city_boundary, TO_GEOSHAPE("POLYGON((109.4 18.1, 109.6 18.1, 109.6 18.3, 109.4 18.3, 109.4 18.1))")) | KEEP abbrev, airport, region, city, city_location @@ -88,7 +105,9 @@ abbrev:keyword | airport:text | region:text | city:keyword | city_locati SYX | Sanya Phoenix Int'l | 天涯区 | Sanya | POINT(109.5036 18.2533) ; -pointIntersectsLiteralPolygonReversed#[skip:-8.13.99, reason:st_intersects added in 8.14] +pointIntersectsLiteralPolygonReversed +required_feature: esql.st_intersects + FROM airports | EVAL location = TO_GEOSHAPE(location) | WHERE ST_INTERSECTS(TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))"), location) @@ -99,7 +118,9 @@ abbrev:keyword | name:text | location:geo_shape | count HOD | Hodeidah Int'l | POINT(42.97109630194 14.7552534413725) | Yemen | Al Ḩudaydah | POINT(42.9511 14.8022) ; -literalPointIntersectsLiteralPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +literalPointIntersectsLiteralPolygon +required_feature: esql.st_intersects + ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt | EVAL pt = TO_GEOPOINT(wkt) @@ -111,7 +132,9 @@ wkt:keyword | pt:geo_point "POINT(1 -1)" | POINT(1 -1) ; -literalPointIntersectsLiteralPolygonReversed#[skip:-8.13.99, reason:st_intersects added in 8.14] +literalPointIntersectsLiteralPolygonReversed +required_feature: esql.st_intersects + ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt | EVAL pt = TO_GEOPOINT(wkt) @@ -123,7 +146,9 @@ wkt:keyword | pt:geo_point "POINT(1 -1)" | POINT(1 -1) ; -literalPointAsShapeIntersectsLiteralPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +literalPointAsShapeIntersectsLiteralPolygon +required_feature: esql.st_intersects + ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt | EVAL pt = TO_GEOSHAPE(wkt) @@ -135,7 +160,9 @@ wkt:keyword | pt:geo_shape "POINT(1 -1)" | POINT(1 -1) ; -literalPointAsShapeIntersectsLiteralPolygonReversed#[skip:-8.13.99, reason:st_intersects added in 8.14] +literalPointAsShapeIntersectsLiteralPolygonReversed +required_feature: esql.st_intersects + ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt | EVAL pt = TO_GEOSHAPE(wkt) @@ -147,7 +174,9 @@ wkt:keyword | pt:geo_shape "POINT(1 -1)" | POINT(1 -1) ; -shapeIntersectsLiteralPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +shapeIntersectsLiteralPolygon +required_feature: esql.st_intersects + FROM countries_bbox | WHERE ST_INTERSECTS(shape, TO_GEOSHAPE("POLYGON((29 -30, 31 -30, 31 -27.3, 29 -27.3, 29 -30))")) | SORT id DESC @@ -159,7 +188,9 @@ SWZ | Swaziland | BBOX(30.798336, 32.133400, -25.728336, -27.316391) LSO | Lesotho | BBOX(27.013973, 29.455554, -28.570691, -30.650527) ; -literalPolygonIntersectsLiteralPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +literalPolygonIntersectsLiteralPolygon +required_feature: esql.st_intersects + ROW wkt = ["POLYGON((-20 60, -6 60, -6 66, -20 66, -20 60))", "POLYGON((20 60, 6 60, 6 66, 20 66, 20 60))"] | EVAL other = TO_GEOSHAPE("POLYGON((-15 64, -10 64, -10 66, -15 66, -15 64))") | MV_EXPAND wkt @@ -172,7 +203,9 @@ wkt:keyword | shape:geo_shape "POLYGON((-20 60, -6 60, -6 66, -20 66, -20 60))" | POLYGON((-20 60, -6 60, -6 66, -20 66, -20 60)) | POLYGON((-15 64, -10 64, -10 66, -15 66, -15 64)) ; -literalPolygonIntersectsLiteralPolygonOneRow#[skip:-8.13.99, reason:st_intersects added in 8.14] +literalPolygonIntersectsLiteralPolygonOneRow +required_feature: esql.st_intersects + ROW intersects = ST_INTERSECTS(TO_GEOSHAPE("POLYGON((-20 60, -6 60, -6 66, -20 66, -20 60))"), TO_GEOSHAPE("POLYGON((-15 64, -10 64, -10 66, -15 66, -15 64))")) ; @@ -180,7 +213,49 @@ intersects:boolean true ; -geo_shapeEquals#[skip:-8.12.99, reason: spatial type geo_shape only added in 8.13] +############################################### +# Tests for ST_CONTAINS and ST_WITHIN with GEO_SHAPE + +polygonContainsLiteralPolygon +required_feature: esql.st_contains_within + +// tag::st_contains-airport_city_boundaries[] +FROM airport_city_boundaries +| WHERE ST_CONTAINS(city_boundary, TO_GEOSHAPE("POLYGON((109.35 18.3, 109.45 18.3, 109.45 18.4, 109.35 18.4, 109.35 18.3))")) +| KEEP abbrev, airport, region, city, city_location +// end::st_contains-airport_city_boundaries[] +| LIMIT 1 +; + +// tag::st_contains-airport_city_boundaries-result[] +abbrev:keyword | airport:text | region:text | city:keyword | city_location:geo_point +SYX | Sanya Phoenix Int'l | 天涯区 | Sanya | POINT(109.5036 18.2533) +// end::st_contains-airport_city_boundaries-result[] +; + +polygonWithinLiteralPolygon +required_feature: esql.st_contains_within + +// tag::st_within-airport_city_boundaries[] +FROM airport_city_boundaries +| WHERE ST_WITHIN(city_boundary, TO_GEOSHAPE("POLYGON((109.1 18.15, 109.6 18.15, 109.6 18.65, 109.1 18.65, 109.1 18.15))")) +| KEEP abbrev, airport, region, city, city_location +// end::st_within-airport_city_boundaries[] +| LIMIT 1 +; + +// tag::st_within-airport_city_boundaries-result[] +abbrev:keyword | airport:text | region:text | city:keyword | city_location:geo_point +SYX | Sanya Phoenix Int'l | 天涯区 | Sanya | POINT(109.5036 18.2533) +// end::st_within-airport_city_boundaries-result[] +; + +############################################### +# Tests for Equality and casting with GEO_SHAPE + +geo_shapeEquals +required_feature: esql.spatial_shapes + ROW wkt = ["POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))", "POINT(75.8092915005895 22.727749187571)"] | MV_EXPAND wkt | EVAL pt = to_geoshape(wkt) @@ -191,7 +266,9 @@ wkt:keyword |pt:geo_shape "POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))" |POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10)) ; -geo_shapeNotEquals#[skip:-8.12.99, reason: spatial type geo_shape only added in 8.13] +geo_shapeNotEquals +required_feature: esql.spatial_shapes + ROW wkt = ["POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))", "POINT(75.8092915005895 22.727749187571)"] | MV_EXPAND wkt | EVAL pt = to_geoshape(wkt) @@ -202,7 +279,9 @@ wkt:keyword |pt:geo_shape "POINT(75.8092915005895 22.727749187571)" |POINT(75.8092915005895 22.727749187571) ; -convertFromStringParseError#[skip:-8.12.99, reason: spatial type geo_shape only added in 8.13] +convertFromStringParseError +required_feature: esql.spatial_shapes + row wkt = ["POINTX(42.97109630194 14.7552534413725)", "POINT(75.8092915005895 22.727749187571)", "POINT(111)"] | mv_expand wkt | eval pt = to_geoshape(wkt) @@ -222,7 +301,9 @@ wkt:keyword |pt:geo_shape # Tests for CARTESIAN_SHAPE type # -convertCartesianShapeFromString#[skip:-8.12.99, reason: spatial type cartesian_shape only added in 8.13] +convertCartesianShapeFromString +required_feature: esql.spatial_shapes + // tag::to_cartesianshape-str[] ROW wkt = ["POINT(4297.11 -1475.53)", "POLYGON ((3339584.72 1118889.97, 4452779.63 4865942.27, 2226389.81 4865942.27, 1113194.90 2273030.92, 3339584.72 1118889.97))"] | MV_EXPAND wkt @@ -237,7 +318,9 @@ wkt:keyword |geom:cartesian_shape // end::to_cartesianshape-str-result[] ; -convertCartesianFromStringArray#[skip:-8.12.99, reason:spatial type cartesian_shape only added in 8.13] +convertCartesianFromStringArray +required_feature: esql.spatial_shapes + row wkt = ["POLYGON ((3339584.72 1118889.97, 4452779.63 4865942.27, 2226389.81 4865942.27, 1113194.90 2273030.92, 3339584.72 1118889.97))", "POINT(7580.93 2272.77)"] | eval pt = to_cartesianshape(wkt); @@ -245,7 +328,9 @@ wkt:keyword ["POLYGON ((3339584.72 1118889.97\, 4452779.63 4865942.27\, 2226389.81 4865942.27\, 1113194.90 2273030.92\, 3339584.72 1118889.97))", "POINT(7580.93 2272.77)"] |[POLYGON ((3339584.72 1118889.97\, 4452779.63 4865942.27\, 2226389.81 4865942.27\, 1113194.90 2273030.92\, 3339584.72 1118889.97)), POINT(7580.93 2272.77)] ; -convertCartesianFromStringViaPoint#[skip:-8.12.99, reason: spatial type cartesian_shape only added in 8.13] +convertCartesianFromStringViaPoint +required_feature: esql.spatial_shapes + ROW wkt = "POINT (3010 -1010)" | EVAL point = TO_CARTESIANPOINT(wkt) | EVAL shape = TO_CARTESIANSHAPE(point) @@ -256,14 +341,18 @@ wkt:keyword | point:cartesian_point | shape:cartesian_shape ; # need to work out how to upload WKT -simpleCartesianShapeLoad#[skip:-8.12.99, reason: spatial type cartesian_shape only added in 8.13] +simpleCartesianShapeLoad +required_feature: esql.spatial_shapes + FROM countries_bbox_web | WHERE id == "ISL"; id:keyword| name:keyword|shape:cartesian_shape ISL|Iceland|BBOX(-2731602.192501422, -1502751.454502109, 1.0025136653899286E7, 9196525.03584683) ; -simpleLoadCartesianPointsAsShapes#[skip:-8.12.99, reason: spatial type cartesian_shape only added in 8.13] +simpleLoadCartesianPointsAsShapes +required_feature: esql.spatial_shapes + FROM airports_web | WHERE abbrev == "CPH" OR abbrev == "VLC" | SORT abbrev @@ -275,7 +364,12 @@ abbrev:keyword | name:text | scalerank:integer | type:keyword | location:cart "VLC" | "Valencia" | 8 | "mid" | POINT(-52706.98819688343 4792315.469321795) ; -cartesianPointIntersectsPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +############################################### +# Tests for ST_INTERSECTS with CARTESIAN_SHAPE + +cartesianPointIntersectsPolygon +required_feature: esql.st_intersects + FROM airports_web | EVAL location = TO_CARTESIANSHAPE(location) | WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))")) @@ -286,7 +380,9 @@ abbrev:keyword | name:text | location:cartesian_shape | HOD | Hodeidah Int'l | POINT (4783520.559160681 1661010.0197476079) | 9 | mid ; -literalCartesianPointIntersectsPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +literalCartesianPointIntersectsPolygon +required_feature: esql.st_intersects + ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt | EVAL pt = TO_CARTESIANSHAPE(wkt) @@ -298,7 +394,9 @@ wkt:keyword | pt:cartesian_shape "POINT(1 -1)" | POINT(1 -1) ; -cartesianShapeIntersectsPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +cartesianShapeIntersectsPolygon +required_feature: esql.st_intersects + FROM countries_bbox_web | WHERE ST_INTERSECTS(shape, TO_CARTESIANSHAPE("POLYGON((3100000 -3400000, 3500000 -3400000, 3500000 -3150000, 3100000 -3150000, 3100000 -3400000))")) | SORT id DESC @@ -310,7 +408,9 @@ SWZ | Swaziland | BBOX(3428455.080322901, 3577073.7249586442, -2965472 LSO | Lesotho | BBOX(3007181.718244638, 3278977.271857335, -3321117.2692412077, -3587446.106149188) ; -literalCartesianPolygonIntersectsPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +literalCartesianPolygonIntersectsPolygon +required_feature: esql.st_intersects + ROW wkt = ["POLYGON((-2000 6000, -600 6000, -600 6600, -2000 6600, -2000 6000))", "POLYGON((2000 6000, 600 6000, 600 6600, 2000 6600, 2000 6000))"] | MV_EXPAND wkt | EVAL shape = TO_CARTESIANSHAPE(wkt) @@ -322,7 +422,41 @@ wkt:keyword | shape:ca "POLYGON((-2000 6000, -600 6000, -600 6600, -2000 6600, -2000 6000))" | POLYGON((-2000 6000, -600 6000, -600 6600, -2000 6600, -2000 6000)) | POLYGON((-1500 6400, -1000 6400, -1000 6600, -1500 6600, -1500 6400)) ; -cartesianshapeEquals#[skip:-8.12.99, reason: spatial type cartesian_shape only added in 8.13] +############################################### +# Tests for ST_CONTAINS and ST_WITHIN with CARTESIAN_SHAPE + +cartesianShapeContainsPolygon +required_feature: esql.st_contains_within + +FROM countries_bbox_web +| WHERE ST_CONTAINS(shape, TO_CARTESIANSHAPE("POLYGON((3100000 -3400000, 3500000 -3400000, 3500000 -3150000, 3100000 -3150000, 3100000 -3400000))")) +| SORT id DESC +; + +id:keyword | name:keyword | shape:cartesian_shape +ZAF | South Africa | BBOX(1834915.5679635953, 4218142.412200545, -2527908.4975596936, -5937134.146607068) +; + +cartesianShapeWithinPolygon +required_feature: esql.st_contains_within + +FROM countries_bbox_web +| WHERE ST_WITHIN(shape, TO_CARTESIANSHAPE("POLYGON((1800000 -2500000, 4300000 -2500000, 4300000 -6000000, 1800000 -6000000, 1800000 -2500000))")) +| SORT id DESC +; + +id:keyword | name:keyword | shape:cartesian_shape +ZAF | South Africa | BBOX(1834915.5679635953, 4218142.412200545, -2527908.4975596936, -5937134.146607068) +SWZ | Swaziland | BBOX(3428455.080322901, 3577073.7249586442, -2965472.9128583763, -3163056.5390926218) +LSO | Lesotho | BBOX(3007181.718244638, 3278977.271857335, -3321117.2692412077, -3587446.106149188) +; + +############################################### +# Tests for Equality and casting with CARTESIAN_SHAPE + +cartesianshapeEquals +required_feature: esql.spatial_shapes + ROW wkt = ["POLYGON ((3339584.72 1118889.97, 4452779.63 4865942.27, 2226389.81 4865942.27, 1113194.90 2273030.92, 3339584.72 1118889.97))", "POINT(7580.93 2272.77)"] | MV_EXPAND wkt | EVAL pt = to_cartesianshape(wkt) @@ -333,7 +467,9 @@ wkt:keyword |pt:cartesian_shape "POLYGON ((3339584.72 1118889.97, 4452779.63 4865942.27, 2226389.81 4865942.27, 1113194.90 2273030.92, 3339584.72 1118889.97))" |POLYGON ((3339584.72 1118889.97, 4452779.63 4865942.27, 2226389.81 4865942.27, 1113194.90 2273030.92, 3339584.72 1118889.97)) ; -cartesianShapeNotEquals#[skip:-8.12.99, reason: spatial type cartesian_shape only added in 8.13] +cartesianShapeNotEquals +required_feature: esql.spatial_shapes + ROW wkt = ["POLYGON ((3339584.72 1118889.97, 4452779.63 4865942.27, 2226389.81 4865942.27, 1113194.90 2273030.92, 3339584.72 1118889.97))", "POINT(7580.93 2272.77)"] | MV_EXPAND wkt | EVAL pt = to_cartesianshape(wkt) @@ -344,7 +480,9 @@ wkt:keyword |pt:cartesian_shape "POINT(7580.93 2272.77)" |POINT(7580.93 2272.77) ; -convertCartesianShapeFromStringParseError#[skip:-8.12.99, reason: spatial type cartesian_shape only added in 8.13] +convertCartesianShapeFromStringParseError +required_feature: esql.spatial_shapes + row wkt = ["POINTX(4297.11 -1475.53)", "POINT(7580.93 2272.77)", "POINT(111)"] | mv_expand wkt | eval pt = to_cartesianshape(wkt) diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndConstantEvaluator.java new file mode 100644 index 0000000000000..ce7e2889fc298 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndConstantEvaluator.java @@ -0,0 +1,128 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.geo.Component2D; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialContains}. + * This class is generated. Do not edit it. + */ +public final class SpatialContainsCartesianPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final Component2D rightValue; + + private final DriverContext driverContext; + + public SpatialContainsCartesianPointDocValuesAndConstantEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock leftValueBlock = (LongBlock) leftValue.eval(page)) { + LongVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock); + } + return eval(page.getPositionCount(), leftValueVector); + } + } + + public BooleanBlock eval(int positionCount, LongBlock leftValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialContains.processCartesianPointDocValuesAndConstant(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValue)); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, LongVector leftValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialContains.processCartesianPointDocValuesAndConstant(leftValueVector.getLong(p), rightValue)); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialContainsCartesianPointDocValuesAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final Component2D rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + Component2D rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialContainsCartesianPointDocValuesAndConstantEvaluator get(DriverContext context) { + return new SpatialContainsCartesianPointDocValuesAndConstantEvaluator(source, leftValue.get(context), rightValue, context); + } + + @Override + public String toString() { + return "SpatialContainsCartesianPointDocValuesAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndSourceEvaluator.java new file mode 100644 index 0000000000000..5b536707e8a0f --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndSourceEvaluator.java @@ -0,0 +1,142 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialContains}. + * This class is generated. Do not edit it. + */ +public final class SpatialContainsCartesianPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final EvalOperator.ExpressionEvaluator rightValue; + + private final DriverContext driverContext; + + public SpatialContainsCartesianPointDocValuesAndSourceEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock leftValueBlock = (LongBlock) leftValue.eval(page)) { + try (BytesRefBlock rightValueBlock = (BytesRefBlock) rightValue.eval(page)) { + LongVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + BytesRefVector rightValueVector = rightValueBlock.asVector(); + if (rightValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + return eval(page.getPositionCount(), leftValueVector, rightValueVector).asBlock(); + } + } + } + + public BooleanBlock eval(int positionCount, LongBlock leftValueBlock, + BytesRefBlock rightValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rightValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rightValueBlock.getValueCount(p) != 1) { + if (rightValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendBoolean(SpatialContains.processCartesianPointDocValuesAndSource(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValueBlock.getBytesRef(rightValueBlock.getFirstValueIndex(p), rightValueScratch))); + } + return result.build(); + } + } + + public BooleanVector eval(int positionCount, LongVector leftValueVector, + BytesRefVector rightValueVector) { + try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + result.appendBoolean(SpatialContains.processCartesianPointDocValuesAndSource(leftValueVector.getLong(p), rightValueVector.getBytesRef(p, rightValueScratch))); + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialContainsCartesianPointDocValuesAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue, rightValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final EvalOperator.ExpressionEvaluator.Factory rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + EvalOperator.ExpressionEvaluator.Factory rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialContainsCartesianPointDocValuesAndSourceEvaluator get(DriverContext context) { + return new SpatialContainsCartesianPointDocValuesAndSourceEvaluator(source, leftValue.get(context), rightValue.get(context), context); + } + + @Override + public String toString() { + return "SpatialContainsCartesianPointDocValuesAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndConstantEvaluator.java new file mode 100644 index 0000000000000..55dfbede4c003 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndConstantEvaluator.java @@ -0,0 +1,132 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.io.IOException; +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.geo.Component2D; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialContains}. + * This class is generated. Do not edit it. + */ +public final class SpatialContainsCartesianSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final Component2D rightValue; + + private final DriverContext driverContext; + + public SpatialContainsCartesianSourceAndConstantEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock leftValueBlock = (BytesRefBlock) leftValue.eval(page)) { + BytesRefVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock); + } + return eval(page.getPositionCount(), leftValueVector); + } + } + + public BooleanBlock eval(int positionCount, BytesRefBlock leftValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialContains.processCartesianSourceAndConstant(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValue)); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, BytesRefVector leftValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialContains.processCartesianSourceAndConstant(leftValueVector.getBytesRef(p, leftValueScratch), rightValue)); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialContainsCartesianSourceAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final Component2D rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + Component2D rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialContainsCartesianSourceAndConstantEvaluator get(DriverContext context) { + return new SpatialContainsCartesianSourceAndConstantEvaluator(source, leftValue.get(context), rightValue, context); + } + + @Override + public String toString() { + return "SpatialContainsCartesianSourceAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndSourceEvaluator.java new file mode 100644 index 0000000000000..d2456597b5761 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndSourceEvaluator.java @@ -0,0 +1,152 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.io.IOException; +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialContains}. + * This class is generated. Do not edit it. + */ +public final class SpatialContainsCartesianSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final EvalOperator.ExpressionEvaluator rightValue; + + private final DriverContext driverContext; + + public SpatialContainsCartesianSourceAndSourceEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock leftValueBlock = (BytesRefBlock) leftValue.eval(page)) { + try (BytesRefBlock rightValueBlock = (BytesRefBlock) rightValue.eval(page)) { + BytesRefVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + BytesRefVector rightValueVector = rightValueBlock.asVector(); + if (rightValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + return eval(page.getPositionCount(), leftValueVector, rightValueVector); + } + } + } + + public BooleanBlock eval(int positionCount, BytesRefBlock leftValueBlock, + BytesRefBlock rightValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rightValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rightValueBlock.getValueCount(p) != 1) { + if (rightValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialContains.processCartesianSourceAndSource(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValueBlock.getBytesRef(rightValueBlock.getFirstValueIndex(p), rightValueScratch))); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, BytesRefVector leftValueVector, + BytesRefVector rightValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialContains.processCartesianSourceAndSource(leftValueVector.getBytesRef(p, leftValueScratch), rightValueVector.getBytesRef(p, rightValueScratch))); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialContainsCartesianSourceAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue, rightValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final EvalOperator.ExpressionEvaluator.Factory rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + EvalOperator.ExpressionEvaluator.Factory rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialContainsCartesianSourceAndSourceEvaluator get(DriverContext context) { + return new SpatialContainsCartesianSourceAndSourceEvaluator(source, leftValue.get(context), rightValue.get(context), context); + } + + @Override + public String toString() { + return "SpatialContainsCartesianSourceAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndConstantEvaluator.java new file mode 100644 index 0000000000000..957800fb3c38e --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndConstantEvaluator.java @@ -0,0 +1,128 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.geo.Component2D; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialContains}. + * This class is generated. Do not edit it. + */ +public final class SpatialContainsGeoPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final Component2D rightValue; + + private final DriverContext driverContext; + + public SpatialContainsGeoPointDocValuesAndConstantEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock leftValueBlock = (LongBlock) leftValue.eval(page)) { + LongVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock); + } + return eval(page.getPositionCount(), leftValueVector); + } + } + + public BooleanBlock eval(int positionCount, LongBlock leftValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialContains.processGeoPointDocValuesAndConstant(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValue)); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, LongVector leftValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialContains.processGeoPointDocValuesAndConstant(leftValueVector.getLong(p), rightValue)); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialContainsGeoPointDocValuesAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final Component2D rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + Component2D rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialContainsGeoPointDocValuesAndConstantEvaluator get(DriverContext context) { + return new SpatialContainsGeoPointDocValuesAndConstantEvaluator(source, leftValue.get(context), rightValue, context); + } + + @Override + public String toString() { + return "SpatialContainsGeoPointDocValuesAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndSourceEvaluator.java new file mode 100644 index 0000000000000..348c343f0b005 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndSourceEvaluator.java @@ -0,0 +1,151 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialContains}. + * This class is generated. Do not edit it. + */ +public final class SpatialContainsGeoPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final EvalOperator.ExpressionEvaluator rightValue; + + private final DriverContext driverContext; + + public SpatialContainsGeoPointDocValuesAndSourceEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock leftValueBlock = (LongBlock) leftValue.eval(page)) { + try (BytesRefBlock rightValueBlock = (BytesRefBlock) rightValue.eval(page)) { + LongVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + BytesRefVector rightValueVector = rightValueBlock.asVector(); + if (rightValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + return eval(page.getPositionCount(), leftValueVector, rightValueVector); + } + } + } + + public BooleanBlock eval(int positionCount, LongBlock leftValueBlock, + BytesRefBlock rightValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rightValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rightValueBlock.getValueCount(p) != 1) { + if (rightValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialContains.processGeoPointDocValuesAndSource(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValueBlock.getBytesRef(rightValueBlock.getFirstValueIndex(p), rightValueScratch))); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, LongVector leftValueVector, + BytesRefVector rightValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialContains.processGeoPointDocValuesAndSource(leftValueVector.getLong(p), rightValueVector.getBytesRef(p, rightValueScratch))); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialContainsGeoPointDocValuesAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue, rightValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final EvalOperator.ExpressionEvaluator.Factory rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + EvalOperator.ExpressionEvaluator.Factory rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialContainsGeoPointDocValuesAndSourceEvaluator get(DriverContext context) { + return new SpatialContainsGeoPointDocValuesAndSourceEvaluator(source, leftValue.get(context), rightValue.get(context), context); + } + + @Override + public String toString() { + return "SpatialContainsGeoPointDocValuesAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndConstantEvaluator.java new file mode 100644 index 0000000000000..186eacc680c2c --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndConstantEvaluator.java @@ -0,0 +1,132 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.io.IOException; +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.geo.Component2D; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialContains}. + * This class is generated. Do not edit it. + */ +public final class SpatialContainsGeoSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final Component2D rightValue; + + private final DriverContext driverContext; + + public SpatialContainsGeoSourceAndConstantEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock leftValueBlock = (BytesRefBlock) leftValue.eval(page)) { + BytesRefVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock); + } + return eval(page.getPositionCount(), leftValueVector); + } + } + + public BooleanBlock eval(int positionCount, BytesRefBlock leftValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialContains.processGeoSourceAndConstant(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValue)); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, BytesRefVector leftValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialContains.processGeoSourceAndConstant(leftValueVector.getBytesRef(p, leftValueScratch), rightValue)); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialContainsGeoSourceAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final Component2D rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + Component2D rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialContainsGeoSourceAndConstantEvaluator get(DriverContext context) { + return new SpatialContainsGeoSourceAndConstantEvaluator(source, leftValue.get(context), rightValue, context); + } + + @Override + public String toString() { + return "SpatialContainsGeoSourceAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndSourceEvaluator.java new file mode 100644 index 0000000000000..6bff91629f74c --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndSourceEvaluator.java @@ -0,0 +1,152 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.io.IOException; +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialContains}. + * This class is generated. Do not edit it. + */ +public final class SpatialContainsGeoSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final EvalOperator.ExpressionEvaluator rightValue; + + private final DriverContext driverContext; + + public SpatialContainsGeoSourceAndSourceEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock leftValueBlock = (BytesRefBlock) leftValue.eval(page)) { + try (BytesRefBlock rightValueBlock = (BytesRefBlock) rightValue.eval(page)) { + BytesRefVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + BytesRefVector rightValueVector = rightValueBlock.asVector(); + if (rightValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + return eval(page.getPositionCount(), leftValueVector, rightValueVector); + } + } + } + + public BooleanBlock eval(int positionCount, BytesRefBlock leftValueBlock, + BytesRefBlock rightValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rightValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rightValueBlock.getValueCount(p) != 1) { + if (rightValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialContains.processGeoSourceAndSource(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValueBlock.getBytesRef(rightValueBlock.getFirstValueIndex(p), rightValueScratch))); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, BytesRefVector leftValueVector, + BytesRefVector rightValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialContains.processGeoSourceAndSource(leftValueVector.getBytesRef(p, leftValueScratch), rightValueVector.getBytesRef(p, rightValueScratch))); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialContainsGeoSourceAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue, rightValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final EvalOperator.ExpressionEvaluator.Factory rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + EvalOperator.ExpressionEvaluator.Factory rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialContainsGeoSourceAndSourceEvaluator get(DriverContext context) { + return new SpatialContainsGeoSourceAndSourceEvaluator(source, leftValue.get(context), rightValue.get(context), context); + } + + @Override + public String toString() { + return "SpatialContainsGeoSourceAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndConstantEvaluator.java new file mode 100644 index 0000000000000..6deb7133fcf13 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndConstantEvaluator.java @@ -0,0 +1,128 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.geo.Component2D; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialWithin}. + * This class is generated. Do not edit it. + */ +public final class SpatialWithinCartesianPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final Component2D rightValue; + + private final DriverContext driverContext; + + public SpatialWithinCartesianPointDocValuesAndConstantEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock leftValueBlock = (LongBlock) leftValue.eval(page)) { + LongVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock); + } + return eval(page.getPositionCount(), leftValueVector); + } + } + + public BooleanBlock eval(int positionCount, LongBlock leftValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialWithin.processCartesianPointDocValuesAndConstant(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValue)); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, LongVector leftValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialWithin.processCartesianPointDocValuesAndConstant(leftValueVector.getLong(p), rightValue)); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialWithinCartesianPointDocValuesAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final Component2D rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + Component2D rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialWithinCartesianPointDocValuesAndConstantEvaluator get(DriverContext context) { + return new SpatialWithinCartesianPointDocValuesAndConstantEvaluator(source, leftValue.get(context), rightValue, context); + } + + @Override + public String toString() { + return "SpatialWithinCartesianPointDocValuesAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndSourceEvaluator.java new file mode 100644 index 0000000000000..d2470583c3a7c --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndSourceEvaluator.java @@ -0,0 +1,142 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialWithin}. + * This class is generated. Do not edit it. + */ +public final class SpatialWithinCartesianPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final EvalOperator.ExpressionEvaluator rightValue; + + private final DriverContext driverContext; + + public SpatialWithinCartesianPointDocValuesAndSourceEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock leftValueBlock = (LongBlock) leftValue.eval(page)) { + try (BytesRefBlock rightValueBlock = (BytesRefBlock) rightValue.eval(page)) { + LongVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + BytesRefVector rightValueVector = rightValueBlock.asVector(); + if (rightValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + return eval(page.getPositionCount(), leftValueVector, rightValueVector).asBlock(); + } + } + } + + public BooleanBlock eval(int positionCount, LongBlock leftValueBlock, + BytesRefBlock rightValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rightValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rightValueBlock.getValueCount(p) != 1) { + if (rightValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendBoolean(SpatialWithin.processCartesianPointDocValuesAndSource(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValueBlock.getBytesRef(rightValueBlock.getFirstValueIndex(p), rightValueScratch))); + } + return result.build(); + } + } + + public BooleanVector eval(int positionCount, LongVector leftValueVector, + BytesRefVector rightValueVector) { + try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + result.appendBoolean(SpatialWithin.processCartesianPointDocValuesAndSource(leftValueVector.getLong(p), rightValueVector.getBytesRef(p, rightValueScratch))); + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialWithinCartesianPointDocValuesAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue, rightValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final EvalOperator.ExpressionEvaluator.Factory rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + EvalOperator.ExpressionEvaluator.Factory rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialWithinCartesianPointDocValuesAndSourceEvaluator get(DriverContext context) { + return new SpatialWithinCartesianPointDocValuesAndSourceEvaluator(source, leftValue.get(context), rightValue.get(context), context); + } + + @Override + public String toString() { + return "SpatialWithinCartesianPointDocValuesAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndConstantEvaluator.java new file mode 100644 index 0000000000000..45c8f60d12b03 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndConstantEvaluator.java @@ -0,0 +1,132 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.io.IOException; +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.geo.Component2D; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialWithin}. + * This class is generated. Do not edit it. + */ +public final class SpatialWithinCartesianSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final Component2D rightValue; + + private final DriverContext driverContext; + + public SpatialWithinCartesianSourceAndConstantEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock leftValueBlock = (BytesRefBlock) leftValue.eval(page)) { + BytesRefVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock); + } + return eval(page.getPositionCount(), leftValueVector); + } + } + + public BooleanBlock eval(int positionCount, BytesRefBlock leftValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialWithin.processCartesianSourceAndConstant(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValue)); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, BytesRefVector leftValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialWithin.processCartesianSourceAndConstant(leftValueVector.getBytesRef(p, leftValueScratch), rightValue)); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialWithinCartesianSourceAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final Component2D rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + Component2D rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialWithinCartesianSourceAndConstantEvaluator get(DriverContext context) { + return new SpatialWithinCartesianSourceAndConstantEvaluator(source, leftValue.get(context), rightValue, context); + } + + @Override + public String toString() { + return "SpatialWithinCartesianSourceAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndSourceEvaluator.java new file mode 100644 index 0000000000000..958ac825eeb0b --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndSourceEvaluator.java @@ -0,0 +1,152 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.io.IOException; +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialWithin}. + * This class is generated. Do not edit it. + */ +public final class SpatialWithinCartesianSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final EvalOperator.ExpressionEvaluator rightValue; + + private final DriverContext driverContext; + + public SpatialWithinCartesianSourceAndSourceEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock leftValueBlock = (BytesRefBlock) leftValue.eval(page)) { + try (BytesRefBlock rightValueBlock = (BytesRefBlock) rightValue.eval(page)) { + BytesRefVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + BytesRefVector rightValueVector = rightValueBlock.asVector(); + if (rightValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + return eval(page.getPositionCount(), leftValueVector, rightValueVector); + } + } + } + + public BooleanBlock eval(int positionCount, BytesRefBlock leftValueBlock, + BytesRefBlock rightValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rightValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rightValueBlock.getValueCount(p) != 1) { + if (rightValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialWithin.processCartesianSourceAndSource(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValueBlock.getBytesRef(rightValueBlock.getFirstValueIndex(p), rightValueScratch))); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, BytesRefVector leftValueVector, + BytesRefVector rightValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialWithin.processCartesianSourceAndSource(leftValueVector.getBytesRef(p, leftValueScratch), rightValueVector.getBytesRef(p, rightValueScratch))); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialWithinCartesianSourceAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue, rightValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final EvalOperator.ExpressionEvaluator.Factory rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + EvalOperator.ExpressionEvaluator.Factory rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialWithinCartesianSourceAndSourceEvaluator get(DriverContext context) { + return new SpatialWithinCartesianSourceAndSourceEvaluator(source, leftValue.get(context), rightValue.get(context), context); + } + + @Override + public String toString() { + return "SpatialWithinCartesianSourceAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndConstantEvaluator.java new file mode 100644 index 0000000000000..680cf7b38445b --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndConstantEvaluator.java @@ -0,0 +1,128 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.geo.Component2D; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialWithin}. + * This class is generated. Do not edit it. + */ +public final class SpatialWithinGeoPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final Component2D rightValue; + + private final DriverContext driverContext; + + public SpatialWithinGeoPointDocValuesAndConstantEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock leftValueBlock = (LongBlock) leftValue.eval(page)) { + LongVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock); + } + return eval(page.getPositionCount(), leftValueVector); + } + } + + public BooleanBlock eval(int positionCount, LongBlock leftValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialWithin.processGeoPointDocValuesAndConstant(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValue)); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, LongVector leftValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialWithin.processGeoPointDocValuesAndConstant(leftValueVector.getLong(p), rightValue)); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialWithinGeoPointDocValuesAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final Component2D rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + Component2D rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialWithinGeoPointDocValuesAndConstantEvaluator get(DriverContext context) { + return new SpatialWithinGeoPointDocValuesAndConstantEvaluator(source, leftValue.get(context), rightValue, context); + } + + @Override + public String toString() { + return "SpatialWithinGeoPointDocValuesAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndSourceEvaluator.java new file mode 100644 index 0000000000000..624b9243a62c4 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndSourceEvaluator.java @@ -0,0 +1,151 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialWithin}. + * This class is generated. Do not edit it. + */ +public final class SpatialWithinGeoPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final EvalOperator.ExpressionEvaluator rightValue; + + private final DriverContext driverContext; + + public SpatialWithinGeoPointDocValuesAndSourceEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock leftValueBlock = (LongBlock) leftValue.eval(page)) { + try (BytesRefBlock rightValueBlock = (BytesRefBlock) rightValue.eval(page)) { + LongVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + BytesRefVector rightValueVector = rightValueBlock.asVector(); + if (rightValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + return eval(page.getPositionCount(), leftValueVector, rightValueVector); + } + } + } + + public BooleanBlock eval(int positionCount, LongBlock leftValueBlock, + BytesRefBlock rightValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rightValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rightValueBlock.getValueCount(p) != 1) { + if (rightValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialWithin.processGeoPointDocValuesAndSource(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValueBlock.getBytesRef(rightValueBlock.getFirstValueIndex(p), rightValueScratch))); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, LongVector leftValueVector, + BytesRefVector rightValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialWithin.processGeoPointDocValuesAndSource(leftValueVector.getLong(p), rightValueVector.getBytesRef(p, rightValueScratch))); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialWithinGeoPointDocValuesAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue, rightValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final EvalOperator.ExpressionEvaluator.Factory rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + EvalOperator.ExpressionEvaluator.Factory rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialWithinGeoPointDocValuesAndSourceEvaluator get(DriverContext context) { + return new SpatialWithinGeoPointDocValuesAndSourceEvaluator(source, leftValue.get(context), rightValue.get(context), context); + } + + @Override + public String toString() { + return "SpatialWithinGeoPointDocValuesAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndConstantEvaluator.java new file mode 100644 index 0000000000000..3647594337c57 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndConstantEvaluator.java @@ -0,0 +1,132 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.io.IOException; +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.geo.Component2D; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialWithin}. + * This class is generated. Do not edit it. + */ +public final class SpatialWithinGeoSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final Component2D rightValue; + + private final DriverContext driverContext; + + public SpatialWithinGeoSourceAndConstantEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock leftValueBlock = (BytesRefBlock) leftValue.eval(page)) { + BytesRefVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock); + } + return eval(page.getPositionCount(), leftValueVector); + } + } + + public BooleanBlock eval(int positionCount, BytesRefBlock leftValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialWithin.processGeoSourceAndConstant(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValue)); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, BytesRefVector leftValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialWithin.processGeoSourceAndConstant(leftValueVector.getBytesRef(p, leftValueScratch), rightValue)); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialWithinGeoSourceAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final Component2D rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + Component2D rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialWithinGeoSourceAndConstantEvaluator get(DriverContext context) { + return new SpatialWithinGeoSourceAndConstantEvaluator(source, leftValue.get(context), rightValue, context); + } + + @Override + public String toString() { + return "SpatialWithinGeoSourceAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndSourceEvaluator.java new file mode 100644 index 0000000000000..8794c3d0488b3 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndSourceEvaluator.java @@ -0,0 +1,152 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.io.IOException; +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialWithin}. + * This class is generated. Do not edit it. + */ +public final class SpatialWithinGeoSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final EvalOperator.ExpressionEvaluator rightValue; + + private final DriverContext driverContext; + + public SpatialWithinGeoSourceAndSourceEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock leftValueBlock = (BytesRefBlock) leftValue.eval(page)) { + try (BytesRefBlock rightValueBlock = (BytesRefBlock) rightValue.eval(page)) { + BytesRefVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + BytesRefVector rightValueVector = rightValueBlock.asVector(); + if (rightValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + return eval(page.getPositionCount(), leftValueVector, rightValueVector); + } + } + } + + public BooleanBlock eval(int positionCount, BytesRefBlock leftValueBlock, + BytesRefBlock rightValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rightValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rightValueBlock.getValueCount(p) != 1) { + if (rightValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialWithin.processGeoSourceAndSource(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValueBlock.getBytesRef(rightValueBlock.getFirstValueIndex(p), rightValueScratch))); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, BytesRefVector leftValueVector, + BytesRefVector rightValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialWithin.processGeoSourceAndSource(leftValueVector.getBytesRef(p, leftValueScratch), rightValueVector.getBytesRef(p, rightValueScratch))); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialWithinGeoSourceAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue, rightValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final EvalOperator.ExpressionEvaluator.Factory rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + EvalOperator.ExpressionEvaluator.Factory rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialWithinGeoSourceAndSourceEvaluator get(DriverContext context) { + return new SpatialWithinGeoSourceAndSourceEvaluator(source, leftValue.get(context), rightValue.get(context), context); + } + + @Override + public String toString() { + return "SpatialWithinGeoSourceAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index 6f1f01bbe632c..9f0976e0045d3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -79,7 +79,9 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvSum; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvZip; import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialContains; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialIntersects; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialWithin; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.StX; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.StY; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Concat; @@ -182,7 +184,9 @@ private FunctionDefinition[][] functions() { // spatial new FunctionDefinition[] { def(SpatialCentroid.class, SpatialCentroid::new, "st_centroid"), + def(SpatialContains.class, SpatialContains::new, "st_contains"), def(SpatialIntersects.class, SpatialIntersects::new, "st_intersects"), + def(SpatialWithin.class, SpatialWithin::new, "st_within"), def(StX.class, StX::new, "st_x"), def(StY.class, StY::new, "st_y") }, // conditional diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/LuceneComponent2DUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/LuceneComponent2DUtils.java new file mode 100644 index 0000000000000..e7b3292b3714d --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/LuceneComponent2DUtils.java @@ -0,0 +1,91 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import org.apache.lucene.geo.Component2D; +import org.apache.lucene.geo.LatLonGeometry; +import org.apache.lucene.geo.Rectangle; +import org.apache.lucene.geo.XYGeometry; + +import java.util.ArrayList; +import java.util.List; + +/** + * This utilities class provides access to protected methods in Lucene using alternative APIs. + * For example, the 'create' method returns the original Component2D array, instead of a Component2D containing + * a component tree of potentially multiple components. This is particularly useful for algorithms that need to + * operate on each component individually. + */ +public class LuceneComponent2DUtils { + /** + * This method is based on LatLonGeometry.create, but returns an array of Component2D objects for multi-component geometries. + */ + public static Component2D[] createLatLonComponents(LatLonGeometry... latLonGeometries) { + if (latLonGeometries == null) { + throw new IllegalArgumentException("geometries must not be null"); + } else if (latLonGeometries.length == 0) { + throw new IllegalArgumentException("geometries must not be empty"); + } else { + final List components = new ArrayList<>(latLonGeometries.length); + + for (int i = 0; i < latLonGeometries.length; ++i) { + if (latLonGeometries[i] == null) { + throw new IllegalArgumentException("geometries[" + i + "] must not be null"); + } + + if (latLonGeometries[i] instanceof Rectangle rectangle && rectangle.crossesDateline()) { + addRectangle(components, rectangle); + } else { + components.add(LatLonGeometry.create(latLonGeometries[i])); + } + } + + return components.toArray(new Component2D[0]); + } + } + + private static void addRectangle(List components, Rectangle rectangle) { + double minLongitude = rectangle.minLon; + boolean crossesDateline = rectangle.minLon > rectangle.maxLon; + if (minLongitude == 180.0 && crossesDateline) { + minLongitude = -180.0; + crossesDateline = false; + } + if (crossesDateline) { + Rectangle left = new Rectangle(rectangle.minLat, rectangle.maxLat, -180.0, rectangle.maxLon); + Rectangle right = new Rectangle(rectangle.minLat, rectangle.maxLat, minLongitude, 180.0); + components.add(LatLonGeometry.create(left)); + components.add(LatLonGeometry.create(right)); + } else { + components.add(LatLonGeometry.create(rectangle)); + } + } + + /** + * This method is based on XYGeometry.create, but returns an array of Component2D objects for multi-component geometries. + */ + public static Component2D[] createXYComponents(XYGeometry... xyGeometries) { + if (xyGeometries == null) { + throw new IllegalArgumentException("geometries must not be null"); + } else if (xyGeometries.length == 0) { + throw new IllegalArgumentException("geometries must not be empty"); + } else { + Component2D[] components = new Component2D[xyGeometries.length]; + + for (int i = 0; i < xyGeometries.length; ++i) { + if (xyGeometries[i] == null) { + throw new IllegalArgumentException("geometries[" + i + "] must not be null"); + } + + components[i] = XYGeometry.create(xyGeometries[i]); + } + + return components; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java new file mode 100644 index 0000000000000..8bf33a7e3dc61 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java @@ -0,0 +1,287 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import org.apache.lucene.document.ShapeField; +import org.apache.lucene.geo.Component2D; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.geo.Orientation; +import org.elasticsearch.compute.ann.Evaluator; +import org.elasticsearch.compute.ann.Fixed; +import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.index.mapper.GeoShapeIndexer; +import org.elasticsearch.index.mapper.ShapeIndexer; +import org.elasticsearch.lucene.spatial.CartesianShapeIndexer; +import org.elasticsearch.lucene.spatial.CoordinateEncoder; +import org.elasticsearch.lucene.spatial.GeometryDocValueReader; +import org.elasticsearch.xpack.esql.expression.function.Example; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.FieldAttribute; +import org.elasticsearch.xpack.ql.tree.NodeInfo; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils.asGeometryDocValueReader; +import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils.asLuceneComponent2Ds; +import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils.makeGeometryFromLiteral; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_POINT; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_SHAPE; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_POINT; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_SHAPE; + +/** + * This is the primary class for supporting the function ST_CONTAINS. + * The bulk of the capabilities are within the parent class SpatialRelatesFunction, + * which supports all the relations in the ShapeField.QueryRelation enum. + * Here we simply wire the rules together specific to ST_CONTAINS and QueryRelation.CONTAINS. + */ +public class SpatialContains extends SpatialRelatesFunction { + // public for test access with reflection + public static final SpatialRelationsContains GEO = new SpatialRelationsContains( + SpatialCoordinateTypes.GEO, + CoordinateEncoder.GEO, + new GeoShapeIndexer(Orientation.CCW, "ST_Contains") + ); + // public for test access with reflection + public static final SpatialRelationsContains CARTESIAN = new SpatialRelationsContains( + SpatialCoordinateTypes.CARTESIAN, + CoordinateEncoder.CARTESIAN, + new CartesianShapeIndexer("ST_Contains") + ); + + /** + * We override the normal behaviour for CONTAINS because we need to test each component separately. + * This applies to multi-component geometries (MultiPolygon, etc.) as well as polygons that cross the dateline. + */ + static final class SpatialRelationsContains extends SpatialRelations { + SpatialRelationsContains(SpatialCoordinateTypes spatialCoordinateType, CoordinateEncoder encoder, ShapeIndexer shapeIndexer) { + super(ShapeField.QueryRelation.CONTAINS, spatialCoordinateType, encoder, shapeIndexer); + } + + @Override + protected boolean geometryRelatesGeometry(BytesRef left, BytesRef right) throws IOException { + Component2D[] rightComponent2Ds = asLuceneComponent2Ds(crsType, fromBytesRef(right)); + return geometryRelatesGeometries(left, rightComponent2Ds); + } + + private boolean geometryRelatesGeometries(BytesRef left, Component2D[] rightComponent2Ds) throws IOException { + Geometry leftGeom = fromBytesRef(left); + GeometryDocValueReader leftDocValueReader = asGeometryDocValueReader(coordinateEncoder, shapeIndexer, leftGeom); + return geometryRelatesGeometries(leftDocValueReader, rightComponent2Ds); + } + + private boolean geometryRelatesGeometries(GeometryDocValueReader leftDocValueReader, Component2D[] rightComponent2Ds) + throws IOException { + for (Component2D rightComponent2D : rightComponent2Ds) { + // Every component of the right geometry must be contained within the left geometry for this to pass + if (geometryRelatesGeometry(leftDocValueReader, rightComponent2D) == false) { + return false; + } + } + return true; + } + } + + @FunctionInfo( + returnType = { "boolean" }, + description = "Returns whether the first geometry contains the second geometry.", + note = "The second parameter must also have the same coordinate system as the first. " + + "This means it is not possible to combine `geo_*` and `cartesian_*` parameters.", + examples = @Example(file = "spatial_shapes", tag = "st_contains-airport_city_boundaries") + ) + public SpatialContains( + Source source, + @Param( + name = "geomA", + type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, + description = "Geometry column name or variable of geometry type" + ) Expression left, + @Param( + name = "geomB", + type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, + description = "Geometry column name or variable of geometry type" + ) Expression right + ) { + this(source, left, right, false, false); + } + + SpatialContains(Source source, Expression left, Expression right, boolean leftDocValues, boolean rightDocValues) { + super(source, left, right, leftDocValues, rightDocValues); + } + + @Override + public ShapeField.QueryRelation queryRelation() { + return ShapeField.QueryRelation.CONTAINS; + } + + @Override + public SpatialContains withDocValues(Set attributes) { + // Only update the docValues flags if the field is found in the attributes + boolean leftDV = leftDocValues || foundField(left(), attributes); + boolean rightDV = rightDocValues || foundField(right(), attributes); + return new SpatialContains(source(), left(), right(), leftDV, rightDV); + } + + @Override + protected SpatialContains replaceChildren(Expression newLeft, Expression newRight) { + return new SpatialContains(source(), newLeft, newRight, leftDocValues, rightDocValues); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, SpatialContains::new, left(), right()); + } + + @Override + public Object fold() { + try { + GeometryDocValueReader docValueReader = asGeometryDocValueReader(crsType, left()); + Geometry rightGeom = makeGeometryFromLiteral(right()); + Component2D[] components = asLuceneComponent2Ds(crsType, rightGeom); + return (crsType == SpatialCrsType.GEO) + ? GEO.geometryRelatesGeometries(docValueReader, components) + : CARTESIAN.geometryRelatesGeometries(docValueReader, components); + } catch (IOException e) { + throw new IllegalArgumentException("Failed to fold constant fields: " + e.getMessage(), e); + } + } + + @Override + Map> evaluatorRules() { + return evaluatorMap; + } + + /** + * To keep the number of evaluators to a minimum, we swap the arguments to get the WITHIN relation. + * This also makes other optimizations, like lucene-pushdown, simpler to develop. + */ + @Override + public SpatialRelatesFunction surrogate() { + if (left().foldable() && right().foldable() == false) { + return new SpatialWithin(source(), right(), left(), rightDocValues, leftDocValues); + } + return this; + } + + private static final Map> evaluatorMap = new HashMap<>(); + + static { + // Support geo_point and geo_shape from source and constant combinations + for (DataType spatialType : new DataType[] { GEO_POINT, GEO_SHAPE }) { + for (DataType otherType : new DataType[] { GEO_POINT, GEO_SHAPE }) { + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSources(spatialType, otherType), + new SpatialEvaluatorFactory.SpatialEvaluatorFactoryWithFields(SpatialContainsGeoSourceAndSourceEvaluator.Factory::new) + ); + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSourceAndConstant(spatialType, otherType), + new SpatialEvaluatorFactory.SpatialEvaluatorWithConstantFactory( + SpatialContainsGeoSourceAndConstantEvaluator.Factory::new + ) + ); + if (EsqlDataTypes.isSpatialPoint(spatialType)) { + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSources(spatialType, otherType).withLeftDocValues(), + new SpatialEvaluatorFactory.SpatialEvaluatorFactoryWithFields( + SpatialContainsGeoPointDocValuesAndSourceEvaluator.Factory::new + ) + ); + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSourceAndConstant(spatialType, otherType).withLeftDocValues(), + new SpatialEvaluatorFactory.SpatialEvaluatorWithConstantFactory( + SpatialContainsGeoPointDocValuesAndConstantEvaluator.Factory::new + ) + ); + } + } + } + + // Support cartesian_point and cartesian_shape from source and constant combinations + for (DataType spatialType : new DataType[] { CARTESIAN_POINT, CARTESIAN_SHAPE }) { + for (DataType otherType : new DataType[] { CARTESIAN_POINT, CARTESIAN_SHAPE }) { + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSources(spatialType, otherType), + new SpatialEvaluatorFactory.SpatialEvaluatorFactoryWithFields( + SpatialContainsCartesianSourceAndSourceEvaluator.Factory::new + ) + ); + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSourceAndConstant(spatialType, otherType), + new SpatialEvaluatorFactory.SpatialEvaluatorWithConstantFactory( + SpatialContainsCartesianSourceAndConstantEvaluator.Factory::new + ) + ); + if (EsqlDataTypes.isSpatialPoint(spatialType)) { + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSources(spatialType, otherType).withLeftDocValues(), + new SpatialEvaluatorFactory.SpatialEvaluatorFactoryWithFields( + SpatialContainsCartesianPointDocValuesAndSourceEvaluator.Factory::new + ) + ); + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSourceAndConstant(spatialType, otherType).withLeftDocValues(), + new SpatialEvaluatorFactory.SpatialEvaluatorWithConstantFactory( + SpatialContainsCartesianPointDocValuesAndConstantEvaluator.Factory::new + ) + ); + } + } + } + } + + @Evaluator(extraName = "GeoSourceAndConstant", warnExceptions = { IllegalArgumentException.class, IOException.class }) + static boolean processGeoSourceAndConstant(BytesRef leftValue, @Fixed Component2D rightValue) throws IOException { + return GEO.geometryRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "GeoSourceAndSource", warnExceptions = { IllegalArgumentException.class, IOException.class }) + static boolean processGeoSourceAndSource(BytesRef leftValue, BytesRef rightValue) throws IOException { + return GEO.geometryRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "GeoPointDocValuesAndConstant", warnExceptions = { IllegalArgumentException.class }) + static boolean processGeoPointDocValuesAndConstant(long leftValue, @Fixed Component2D rightValue) { + return GEO.pointRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "GeoPointDocValuesAndSource", warnExceptions = { IllegalArgumentException.class }) + static boolean processGeoPointDocValuesAndSource(long leftValue, BytesRef rightValue) { + Geometry geometry = SpatialCoordinateTypes.UNSPECIFIED.wkbToGeometry(rightValue); + return GEO.pointRelatesGeometry(leftValue, geometry); + } + + @Evaluator(extraName = "CartesianSourceAndConstant", warnExceptions = { IllegalArgumentException.class, IOException.class }) + static boolean processCartesianSourceAndConstant(BytesRef leftValue, @Fixed Component2D rightValue) throws IOException { + return CARTESIAN.geometryRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "CartesianSourceAndSource", warnExceptions = { IllegalArgumentException.class, IOException.class }) + static boolean processCartesianSourceAndSource(BytesRef leftValue, BytesRef rightValue) throws IOException { + return CARTESIAN.geometryRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "CartesianPointDocValuesAndConstant", warnExceptions = { IllegalArgumentException.class }) + static boolean processCartesianPointDocValuesAndConstant(long leftValue, @Fixed Component2D rightValue) { + return CARTESIAN.pointRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "CartesianPointDocValuesAndSource") + static boolean processCartesianPointDocValuesAndSource(long leftValue, BytesRef rightValue) { + Geometry geometry = SpatialCoordinateTypes.UNSPECIFIED.wkbToGeometry(rightValue); + return CARTESIAN.pointRelatesGeometry(leftValue, geometry); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialEvaluatorFactory.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialEvaluatorFactory.java index ccdd68e1806c1..cea7d926c3e39 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialEvaluatorFactory.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialEvaluatorFactory.java @@ -38,7 +38,7 @@ public abstract EvalOperator.ExpressionEvaluator.Factory get( Function toEvaluator ); - public static EvalOperator.ExpressionEvaluator.Factory makeSpatialEvaluator( + static EvalOperator.ExpressionEvaluator.Factory makeSpatialEvaluator( SpatialSourceSupplier s, Map> evaluatorRules, Function toEvaluator @@ -170,7 +170,7 @@ public EvalOperator.ExpressionEvaluator.Factory get( protected record SpatialEvaluatorFieldKey(DataType dataType, boolean isConstant) {} - protected record SpatialEvaluatorKey( + record SpatialEvaluatorKey( SpatialRelatesFunction.SpatialCrsType crsType, boolean leftDocValues, boolean rightDocValues, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java index 831c041caaa94..93965b0d3e9be 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java @@ -18,6 +18,7 @@ import org.elasticsearch.lucene.spatial.CartesianShapeIndexer; import org.elasticsearch.lucene.spatial.CoordinateEncoder; import org.elasticsearch.lucene.spatial.GeometryDocValueReader; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; @@ -40,21 +41,35 @@ import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_POINT; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_SHAPE; +/** + * This is the primary class for supporting the function ST_INTERSECTS. + * The bulk of the capabilities are within the parent class SpatialRelatesFunction, + * which supports all the relations in the ShapeField.QueryRelation enum. + * Here we simply wire the rules together specific to ST_INTERSECTS and QueryRelation.INTERSECTS. + */ public class SpatialIntersects extends SpatialRelatesFunction { - protected static final SpatialRelations GEO = new SpatialRelations( + // public for test access with reflection + public static final SpatialRelations GEO = new SpatialRelations( ShapeField.QueryRelation.INTERSECTS, SpatialCoordinateTypes.GEO, CoordinateEncoder.GEO, new GeoShapeIndexer(Orientation.CCW, "ST_Intersects") ); - protected static final SpatialRelations CARTESIAN = new SpatialRelations( + // public for test access with reflection + public static final SpatialRelations CARTESIAN = new SpatialRelations( ShapeField.QueryRelation.INTERSECTS, SpatialCoordinateTypes.CARTESIAN, CoordinateEncoder.CARTESIAN, new CartesianShapeIndexer("ST_Intersects") ); - @FunctionInfo(returnType = { "boolean" }, description = "Returns whether the two geometries or geometry columns intersect.") + @FunctionInfo( + returnType = { "boolean" }, + description = "Returns whether the two geometries or geometry columns intersect.", + note = "The second parameter must also have the same coordinate system as the first. " + + "This means it is not possible to combine `geo_*` and `cartesian_*` parameters.", + examples = @Example(file = "spatial", tag = "st_intersects-airports") + ) public SpatialIntersects( Source source, @Param( @@ -112,7 +127,7 @@ public Object fold() { } @Override - protected Map> evaluatorRules() { + Map> evaluatorRules() { return evaluatorMap; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunction.java index cdd21682d0db7..09938558b6cce 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunction.java @@ -35,6 +35,7 @@ import java.util.function.Function; import java.util.function.Predicate; +import static org.apache.lucene.document.ShapeField.QueryRelation.CONTAINS; import static org.apache.lucene.document.ShapeField.QueryRelation.DISJOINT; import static org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions.isSpatial; import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils.asGeometryDocValueReader; @@ -203,7 +204,14 @@ public boolean rightDocValues() { /** * Produce a map of rules defining combinations of incoming types to the evaluator factory that should be used. */ - protected abstract Map> evaluatorRules(); + abstract Map> evaluatorRules(); + + /** + * Some spatial functions can replace themselves with alternatives that are more efficient for certain cases. + */ + public SpatialRelatesFunction surrogate() { + return this; + } @Override public EvalOperator.ExpressionEvaluator.Factory toEvaluator( @@ -262,7 +270,7 @@ protected boolean geometryRelatesGeometry(BytesRef left, BytesRef right) throws return geometryRelatesGeometry(left, rightComponent2D); } - private Geometry fromBytesRef(BytesRef bytesRef) { + protected Geometry fromBytesRef(BytesRef bytesRef) { return SpatialCoordinateTypes.UNSPECIFIED.wkbToGeometry(bytesRef); } @@ -286,12 +294,16 @@ protected boolean pointRelatesGeometry(long encoded, Geometry geometry) { protected boolean pointRelatesGeometry(long encoded, Component2D component2D) { // This code path exists for doc-values points, and we could consider re-using the point class to reduce garbage creation Point point = spatialCoordinateType.longAsPoint(encoded); - return geometryRelatesPoint(component2D, point); + return pointRelatesGeometry(point, component2D); } - private boolean geometryRelatesPoint(Component2D component2D, Point point) { - boolean contains = component2D.contains(point.getX(), point.getY()); - return queryRelation == DISJOINT ? contains == false : contains; + private boolean pointRelatesGeometry(Point point, Component2D component2D) { + if (queryRelation == CONTAINS) { + return component2D.withinPoint(point.getX(), point.getY()) == Component2D.WithinRelation.CANDIDATE; + } else { + boolean contains = component2D.contains(point.getX(), point.getY()); + return queryRelation == DISJOINT ? contains == false : contains; + } } } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesUtils.java index e088dbf7a70ec..d558e1c21c045 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesUtils.java @@ -50,6 +50,16 @@ static Component2D asLuceneComponent2D(SpatialRelatesFunction.SpatialCrsType crs } } + static Component2D[] asLuceneComponent2Ds(SpatialRelatesFunction.SpatialCrsType crsType, Geometry geometry) { + if (crsType == SpatialRelatesFunction.SpatialCrsType.GEO) { + var luceneGeometries = LuceneGeometriesUtils.toLatLonGeometry(geometry, true, t -> {}); + return LuceneComponent2DUtils.createLatLonComponents(luceneGeometries); + } else { + var luceneGeometries = LuceneGeometriesUtils.toXYGeometry(geometry, t -> {}); + return LuceneComponent2DUtils.createXYComponents(luceneGeometries); + } + } + /** * This function is used to convert a spatial constant to a doc-values byte array. * When both left and right sides are constants, we convert the left to a doc-values byte array and the right to a Component2D. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithin.java new file mode 100644 index 0000000000000..a5ade4cfeb73c --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithin.java @@ -0,0 +1,252 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import org.apache.lucene.document.ShapeField; +import org.apache.lucene.geo.Component2D; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.geo.Orientation; +import org.elasticsearch.compute.ann.Evaluator; +import org.elasticsearch.compute.ann.Fixed; +import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.index.mapper.GeoShapeIndexer; +import org.elasticsearch.lucene.spatial.CartesianShapeIndexer; +import org.elasticsearch.lucene.spatial.CoordinateEncoder; +import org.elasticsearch.lucene.spatial.GeometryDocValueReader; +import org.elasticsearch.xpack.esql.expression.SurrogateExpression; +import org.elasticsearch.xpack.esql.expression.function.Example; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.FieldAttribute; +import org.elasticsearch.xpack.ql.tree.NodeInfo; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils.asGeometryDocValueReader; +import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils.asLuceneComponent2D; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_POINT; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_SHAPE; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_POINT; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_SHAPE; + +/** + * This is the primary class for supporting the function ST_WITHIN. + * The bulk of the capabilities are within the parent class SpatialRelatesFunction, + * which supports all the relations in the ShapeField.QueryRelation enum. + * Here we simply wire the rules together specific to ST_WITHIN and QueryRelation.WITHIN. + */ +public class SpatialWithin extends SpatialRelatesFunction implements SurrogateExpression { + // public for test access with reflection + public static final SpatialRelations GEO = new SpatialRelations( + ShapeField.QueryRelation.WITHIN, + SpatialCoordinateTypes.GEO, + CoordinateEncoder.GEO, + new GeoShapeIndexer(Orientation.CCW, "ST_Within") + ); + // public for test access with reflection + public static final SpatialRelations CARTESIAN = new SpatialRelations( + ShapeField.QueryRelation.WITHIN, + SpatialCoordinateTypes.CARTESIAN, + CoordinateEncoder.CARTESIAN, + new CartesianShapeIndexer("ST_Within") + ); + + @FunctionInfo( + returnType = { "boolean" }, + description = "Returns whether the first geometry is within the second geometry.", + note = "The second parameter must also have the same coordinate system as the first. " + + "This means it is not possible to combine `geo_*` and `cartesian_*` parameters.", + examples = @Example(file = "spatial_shapes", tag = "st_within-airport_city_boundaries") + ) + public SpatialWithin( + Source source, + @Param( + name = "geomA", + type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, + description = "Geometry column name or variable of geometry type" + ) Expression left, + @Param( + name = "geomB", + type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, + description = "Geometry column name or variable of geometry type" + ) Expression right + ) { + this(source, left, right, false, false); + } + + SpatialWithin(Source source, Expression left, Expression right, boolean leftDocValues, boolean rightDocValues) { + super(source, left, right, leftDocValues, rightDocValues); + } + + @Override + public ShapeField.QueryRelation queryRelation() { + return ShapeField.QueryRelation.WITHIN; + } + + @Override + public SpatialWithin withDocValues(Set attributes) { + // Only update the docValues flags if the field is found in the attributes + boolean leftDV = leftDocValues || foundField(left(), attributes); + boolean rightDV = rightDocValues || foundField(right(), attributes); + return new SpatialWithin(source(), left(), right(), leftDV, rightDV); + } + + @Override + protected SpatialWithin replaceChildren(Expression newLeft, Expression newRight) { + return new SpatialWithin(source(), newLeft, newRight, leftDocValues, rightDocValues); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, SpatialWithin::new, left(), right()); + } + + @Override + public Object fold() { + try { + GeometryDocValueReader docValueReader = asGeometryDocValueReader(crsType, left()); + Component2D component2D = asLuceneComponent2D(crsType, right()); + return (crsType == SpatialCrsType.GEO) + ? GEO.geometryRelatesGeometry(docValueReader, component2D) + : CARTESIAN.geometryRelatesGeometry(docValueReader, component2D); + } catch (IOException e) { + throw new IllegalArgumentException("Failed to fold constant fields: " + e.getMessage(), e); + } + } + + @Override + Map> evaluatorRules() { + return evaluatorMap; + } + + /** + * To keep the number of evaluators to a minimum, we swap the arguments to get the CONTAINS relation. + * This also makes other optimizations, like lucene-pushdown, simpler to develop. + */ + @Override + public SpatialRelatesFunction surrogate() { + if (left().foldable() && right().foldable() == false) { + return new SpatialContains(source(), right(), left(), rightDocValues, leftDocValues); + } + return this; + } + + private static final Map> evaluatorMap = new HashMap<>(); + + static { + // Support geo_point and geo_shape from source and constant combinations + for (DataType spatialType : new DataType[] { GEO_POINT, GEO_SHAPE }) { + for (DataType otherType : new DataType[] { GEO_POINT, GEO_SHAPE }) { + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSources(spatialType, otherType), + new SpatialEvaluatorFactory.SpatialEvaluatorFactoryWithFields(SpatialWithinGeoSourceAndSourceEvaluator.Factory::new) + ); + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSourceAndConstant(spatialType, otherType), + new SpatialEvaluatorFactory.SpatialEvaluatorWithConstantFactory(SpatialWithinGeoSourceAndConstantEvaluator.Factory::new) + ); + if (EsqlDataTypes.isSpatialPoint(spatialType)) { + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSources(spatialType, otherType).withLeftDocValues(), + new SpatialEvaluatorFactory.SpatialEvaluatorFactoryWithFields( + SpatialWithinGeoPointDocValuesAndSourceEvaluator.Factory::new + ) + ); + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSourceAndConstant(spatialType, otherType).withLeftDocValues(), + new SpatialEvaluatorFactory.SpatialEvaluatorWithConstantFactory( + SpatialWithinGeoPointDocValuesAndConstantEvaluator.Factory::new + ) + ); + } + } + } + + // Support cartesian_point and cartesian_shape from source and constant combinations + for (DataType spatialType : new DataType[] { CARTESIAN_POINT, CARTESIAN_SHAPE }) { + for (DataType otherType : new DataType[] { CARTESIAN_POINT, CARTESIAN_SHAPE }) { + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSources(spatialType, otherType), + new SpatialEvaluatorFactory.SpatialEvaluatorFactoryWithFields( + SpatialWithinCartesianSourceAndSourceEvaluator.Factory::new + ) + ); + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSourceAndConstant(spatialType, otherType), + new SpatialEvaluatorFactory.SpatialEvaluatorWithConstantFactory( + SpatialWithinCartesianSourceAndConstantEvaluator.Factory::new + ) + ); + if (EsqlDataTypes.isSpatialPoint(spatialType)) { + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSources(spatialType, otherType).withLeftDocValues(), + new SpatialEvaluatorFactory.SpatialEvaluatorFactoryWithFields( + SpatialWithinCartesianPointDocValuesAndSourceEvaluator.Factory::new + ) + ); + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSourceAndConstant(spatialType, otherType).withLeftDocValues(), + new SpatialEvaluatorFactory.SpatialEvaluatorWithConstantFactory( + SpatialWithinCartesianPointDocValuesAndConstantEvaluator.Factory::new + ) + ); + } + } + } + } + + @Evaluator(extraName = "GeoSourceAndConstant", warnExceptions = { IllegalArgumentException.class, IOException.class }) + static boolean processGeoSourceAndConstant(BytesRef leftValue, @Fixed Component2D rightValue) throws IOException { + return GEO.geometryRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "GeoSourceAndSource", warnExceptions = { IllegalArgumentException.class, IOException.class }) + static boolean processGeoSourceAndSource(BytesRef leftValue, BytesRef rightValue) throws IOException { + return GEO.geometryRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "GeoPointDocValuesAndConstant", warnExceptions = { IllegalArgumentException.class }) + static boolean processGeoPointDocValuesAndConstant(long leftValue, @Fixed Component2D rightValue) { + return GEO.pointRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "GeoPointDocValuesAndSource", warnExceptions = { IllegalArgumentException.class }) + static boolean processGeoPointDocValuesAndSource(long leftValue, BytesRef rightValue) { + Geometry geometry = SpatialCoordinateTypes.UNSPECIFIED.wkbToGeometry(rightValue); + return GEO.pointRelatesGeometry(leftValue, geometry); + } + + @Evaluator(extraName = "CartesianSourceAndConstant", warnExceptions = { IllegalArgumentException.class, IOException.class }) + static boolean processCartesianSourceAndConstant(BytesRef leftValue, @Fixed Component2D rightValue) throws IOException { + return CARTESIAN.geometryRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "CartesianSourceAndSource", warnExceptions = { IllegalArgumentException.class, IOException.class }) + static boolean processCartesianSourceAndSource(BytesRef leftValue, BytesRef rightValue) throws IOException { + return CARTESIAN.geometryRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "CartesianPointDocValuesAndConstant", warnExceptions = { IllegalArgumentException.class }) + static boolean processCartesianPointDocValuesAndConstant(long leftValue, @Fixed Component2D rightValue) { + return CARTESIAN.pointRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "CartesianPointDocValuesAndSource") + static boolean processCartesianPointDocValuesAndSource(long leftValue, BytesRef rightValue) { + Geometry geometry = SpatialCoordinateTypes.UNSPECIFIED.wkbToGeometry(rightValue); + return CARTESIAN.pointRelatesGeometry(leftValue, geometry); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java index 536265b1be3e8..21c17110ad4fe 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java @@ -101,7 +101,10 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvSum; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvZip; import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialContains; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialIntersects; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesFunction; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialWithin; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.StX; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.StY; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Concat; @@ -391,7 +394,9 @@ public static List namedTypeEntries() { of(ScalarFunction.class, Pow.class, PlanNamedTypes::writePow, PlanNamedTypes::readPow), of(ScalarFunction.class, StartsWith.class, PlanNamedTypes::writeStartsWith, PlanNamedTypes::readStartsWith), of(ScalarFunction.class, EndsWith.class, PlanNamedTypes::writeEndsWith, PlanNamedTypes::readEndsWith), - of(ScalarFunction.class, SpatialIntersects.class, PlanNamedTypes::writeIntersects, PlanNamedTypes::readIntersects), + of(ScalarFunction.class, SpatialIntersects.class, PlanNamedTypes::writeSpatialRelatesFunction, PlanNamedTypes::readIntersects), + of(ScalarFunction.class, SpatialContains.class, PlanNamedTypes::writeSpatialRelatesFunction, PlanNamedTypes::readContains), + of(ScalarFunction.class, SpatialWithin.class, PlanNamedTypes::writeSpatialRelatesFunction, PlanNamedTypes::readWithin), of(ScalarFunction.class, Substring.class, PlanNamedTypes::writeSubstring, PlanNamedTypes::readSubstring), of(ScalarFunction.class, Left.class, PlanNamedTypes::writeLeft, PlanNamedTypes::readLeft), of(ScalarFunction.class, Right.class, PlanNamedTypes::writeRight, PlanNamedTypes::readRight), @@ -1494,11 +1499,17 @@ static SpatialIntersects readIntersects(PlanStreamInput in) throws IOException { return new SpatialIntersects(Source.EMPTY, in.readExpression(), in.readExpression()); } - static void writeIntersects(PlanStreamOutput out, SpatialIntersects intersects) throws IOException { - List fields = intersects.children(); - assert fields.size() == 2; - out.writeExpression(fields.get(0)); - out.writeExpression(fields.get(1)); + static SpatialContains readContains(PlanStreamInput in) throws IOException { + return new SpatialContains(Source.EMPTY, in.readExpression(), in.readExpression()); + } + + static SpatialWithin readWithin(PlanStreamInput in) throws IOException { + return new SpatialWithin(Source.EMPTY, in.readExpression(), in.readExpression()); + } + + static void writeSpatialRelatesFunction(PlanStreamOutput out, SpatialRelatesFunction spatialRelatesFunction) throws IOException { + out.writeExpression(spatialRelatesFunction.left()); + out.writeExpression(spatialRelatesFunction.right()); } static Now readNow(PlanStreamInput in) throws IOException { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java index 3425306863585..ec3ff07a9867f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.Count; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Case; import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesFunction; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.Eval; @@ -120,7 +121,8 @@ protected static Batch substitutions() { new SubstituteSurrogates(), new ReplaceRegexMatch(), new ReplaceAliasingEvalWithProject(), - new SkipQueryOnEmptyMappings() + new SkipQueryOnEmptyMappings(), + new SubstituteSpatialSurrogates() // new NormalizeAggregate(), - waits on https://github.com/elastic/elasticsearch/issues/100634 ); } @@ -297,6 +299,25 @@ static String limitToString(String string) { } } + /** + * Currently this works similarly to SurrogateExpression, leaving the logic inside the expressions, + * so each can decide for itself whether or not to change to a surrogate expression. + * But what is actually being done is similar to LiteralsOnTheRight. We can consider in the future moving + * this in either direction, reducing the number of rules, but for now, + * it's a separate rule to reduce the risk of unintended interactions with other rules. + */ + static class SubstituteSpatialSurrogates extends OptimizerRules.OptimizerExpressionRule { + + SubstituteSpatialSurrogates() { + super(TransformDirection.UP); + } + + @Override + protected SpatialRelatesFunction rule(SpatialRelatesFunction function) { + return function.surrogate(); + } + } + static class ConvertStringToByteRef extends OptimizerRules.OptimizerExpressionRule { ConvertStringToByteRef() { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java index 95e275a2d0333..29f0e04ef2b94 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java @@ -39,9 +39,14 @@ public class EsqlFeatures implements FeatureSpecification { private static final NodeFeature MV_WARN = new NodeFeature("esql.mv_warn"); /** - * Support for loading {@code geo_point} fields. Added in #102177. + * Support for loading {@code geo_point} and {@code cartesian_point} fields. Added in #102177. */ - private static final NodeFeature GEO_POINT_SUPPORT = new NodeFeature("esql.geo_point"); + private static final NodeFeature SPATIAL_POINTS = new NodeFeature("esql.spatial_points"); + + /** + * Changed precision of {@code geo_point} and {@code cartesian_point} fields, by loading from source into WKB. Done in #103691. + */ + private static final NodeFeature SPATIAL_POINTS_FROM_SOURCE = new NodeFeature("esql.spatial_points_from_source"); /** * When we added the warnings when conversion functions fail. Like {@code TO_INT('foo')}. @@ -55,10 +60,25 @@ public class EsqlFeatures implements FeatureSpecification { */ private static final NodeFeature POW_DOUBLE = new NodeFeature("esql.pow_double"); - // /** - // * Support for loading {@code geo_point} fields. - // */ - // private static final NodeFeature GEO_SHAPE_SUPPORT = new NodeFeature("esql.geo_shape"); + /** + * Support for loading {@code geo_shape} and {@code cartesian_shape} fields. Done in #104269. + */ + private static final NodeFeature SPATIAL_SHAPES = new NodeFeature("esql.spatial_shapes"); + + /** + * Support for spatial aggregation {@code ST_CENTROID}. Done in #104269. + */ + private static final NodeFeature ST_CENTROID = new NodeFeature("esql.st_centroid"); + + /** + * Support for spatial aggregation {@code ST_INTERSECTS}. Done in #104907. + */ + private static final NodeFeature ST_INTERSECTS = new NodeFeature("esql.st_intersects"); + + /** + * Support for spatial aggregation {@code ST_CONTAINS} and {@code ST_WITHIN}. Done in #106503. + */ + private static final NodeFeature ST_CONTAINS_WITHIN = new NodeFeature("esql.st_contains_within"); /** * The introduction of the {@code VALUES} agg. @@ -77,7 +97,19 @@ public class EsqlFeatures implements FeatureSpecification { @Override public Set getFeatures() { - return Set.of(ASYNC_QUERY, AGG_VALUES, MV_SORT, DISABLE_NULLABLE_OPTS, ST_X_Y, FROM_OPTIONS); + return Set.of( + ASYNC_QUERY, + AGG_VALUES, + MV_SORT, + DISABLE_NULLABLE_OPTS, + ST_X_Y, + FROM_OPTIONS, + SPATIAL_POINTS_FROM_SOURCE, + SPATIAL_SHAPES, + ST_CENTROID, + ST_INTERSECTS, + ST_CONTAINS_WITHIN + ); } @Override @@ -85,10 +117,9 @@ public Map getHistoricalFeatures() { return Map.ofEntries( Map.entry(TransportEsqlStatsAction.ESQL_STATS_FEATURE, Version.V_8_11_0), Map.entry(MV_WARN, Version.V_8_12_0), - Map.entry(GEO_POINT_SUPPORT, Version.V_8_12_0), + Map.entry(SPATIAL_POINTS, Version.V_8_12_0), Map.entry(CONVERT_WARN, Version.V_8_12_0), Map.entry(POW_DOUBLE, Version.V_8_12_0) - // Map.entry(GEO_SHAPE_SUPPORT, Version.V_8_13_0) ); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java index ca69569546ba3..e67ea0cf5624f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java @@ -231,9 +231,9 @@ private static org.apache.lucene.search.Query pointShapeQuery( SearchExecutionContext context ) { final boolean hasDocValues = context.getFieldType(fieldName).hasDocValues(); - // only the intersects relation is supported for indexed cartesian point types - if (relation != ShapeField.QueryRelation.INTERSECTS) { - throw new QueryShardException(context, relation + " query relation not supported for Field [" + fieldName + "]."); + if (geometry.type() != ShapeType.POINT && relation == ShapeField.QueryRelation.CONTAINS) { + // A point field can never contain a non-point geometry + return new MatchNoDocsQuery(); } final Consumer checker = t -> { if (t == ShapeType.POINT || t == ShapeType.MULTIPOINT || t == ShapeType.LINESTRING || t == ShapeType.MULTILINESTRING) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 56869de1b87ca..b97622f28520c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -1223,7 +1223,7 @@ private static void renderFullLayout(String name, boolean hasExamples) throws IO writeToTempDir("layout", rendered, "asciidoc"); } - private static String functionName() { + protected static String functionName() { Class testClass = getTestClass(); if (testClass.isAnnotationPresent(FunctionName.class)) { FunctionName functionNameAnnotation = testClass.getAnnotation(FunctionName.class); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsTests.java new file mode 100644 index 0000000000000..37bfb6eccac5d --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsTests.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.xpack.esql.expression.function.FunctionName; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.Supplier; + +@FunctionName("st_contains") +public class SpatialContainsTests extends SpatialRelatesFunctionTestCase { + public SpatialContainsTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + List suppliers = new ArrayList<>(); + DataType[] geoDataTypes = { EsqlDataTypes.GEO_POINT, EsqlDataTypes.GEO_SHAPE }; + SpatialRelatesFunctionTestCase.addSpatialCombinations(suppliers, geoDataTypes); + DataType[] cartesianDataTypes = { EsqlDataTypes.CARTESIAN_POINT, EsqlDataTypes.CARTESIAN_SHAPE }; + SpatialRelatesFunctionTestCase.addSpatialCombinations(suppliers, cartesianDataTypes); + return parameterSuppliersFromTypedData( + errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers), SpatialContainsTests::typeErrorMessage) + ); + } + + @Override + protected Expression build(Source source, List args) { + return new SpatialContains(source, args.get(0), args.get(1)); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsTests.java index e36d92fecd81f..83679ca7134e4 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsTests.java @@ -7,38 +7,22 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; -import joptsimple.internal.Strings; - import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.FunctionName; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.expression.TypeResolutions; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; -import org.elasticsearch.xpack.ql.type.DataTypes; -import org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes; -import java.io.IOException; import java.util.ArrayList; import java.util.List; -import java.util.Locale; -import java.util.Set; import java.util.function.Supplier; -import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesFunction.compatibleTypeNames; -import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isSpatial; -import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isSpatialGeo; -import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isString; - @FunctionName("st_intersects") - -public class SpatialIntersectsTests extends AbstractFunctionTestCase { +public class SpatialIntersectsTests extends SpatialRelatesFunctionTestCase { public SpatialIntersectsTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } @@ -47,9 +31,9 @@ public SpatialIntersectsTests(@Name("TestCase") Supplier parameters() { List suppliers = new ArrayList<>(); DataType[] geoDataTypes = { EsqlDataTypes.GEO_POINT, EsqlDataTypes.GEO_SHAPE }; - addSpatialCombinations(suppliers, geoDataTypes); + SpatialRelatesFunctionTestCase.addSpatialCombinations(suppliers, geoDataTypes); DataType[] cartesianDataTypes = { EsqlDataTypes.CARTESIAN_POINT, EsqlDataTypes.CARTESIAN_SHAPE }; - addSpatialCombinations(suppliers, cartesianDataTypes); + SpatialRelatesFunctionTestCase.addSpatialCombinations(suppliers, cartesianDataTypes); return parameterSuppliersFromTypedData( errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers), SpatialIntersectsTests::typeErrorMessage) ); @@ -59,155 +43,4 @@ public static Iterable parameters() { protected Expression build(Source source, List args) { return new SpatialIntersects(source, args.get(0), args.get(1)); } - - private static void addSpatialCombinations(List suppliers, DataType[] dataTypes) { - for (DataType leftType : dataTypes) { - TestCaseSupplier.TypedDataSupplier leftDataSupplier = testCaseSupplier(leftType); - for (DataType rightType : dataTypes) { - if (typeCompatible(leftType, rightType)) { - TestCaseSupplier.TypedDataSupplier rightDataSupplier = testCaseSupplier(rightType); - suppliers.add( - TestCaseSupplier.testCaseSupplier( - leftDataSupplier, - rightDataSupplier, - SpatialIntersectsTests::spatialEvaluatorString, - DataTypes.BOOLEAN, - (l, r) -> expected(l, leftType, r, rightType) - ) - ); - } - } - } - } - - /** - * Build the expected error message for an invalid type signature. - */ - protected static String typeErrorMessage(boolean includeOrdinal, List> validPerPosition, List types) { - List badArgPositions = new ArrayList<>(); - for (int i = 0; i < types.size(); i++) { - if (validPerPosition.get(i).contains(types.get(i)) == false) { - badArgPositions.add(i); - } - } - if (badArgPositions.size() == 0) { - return oneInvalid(1, 0, includeOrdinal, types); - } else if (badArgPositions.size() == 1) { - int badArgPosition = badArgPositions.get(0); - int goodArgPosition = badArgPosition == 0 ? 1 : 0; - if (isSpatial(types.get(goodArgPosition)) == false) { - return oneInvalid(badArgPosition, -1, includeOrdinal, types); - } else { - return oneInvalid(badArgPosition, goodArgPosition, includeOrdinal, types); - } - } else { - return oneInvalid(0, -1, includeOrdinal, types); - } - } - - private static String oneInvalid(int badArgPosition, int goodArgPosition, boolean includeOrdinal, List types) { - String ordinal = includeOrdinal ? TypeResolutions.ParamOrdinal.fromIndex(badArgPosition).name().toLowerCase(Locale.ROOT) + " " : ""; - String expectedType = goodArgPosition >= 0 - ? compatibleTypes(types.get(goodArgPosition)) - : "geo_point, cartesian_point, geo_shape or cartesian_shape"; - String name = types.get(badArgPosition).typeName(); - return ordinal + "argument of [] must be [" + expectedType + "], found value [" + name + "] type [" + name + "]"; - } - - private static String compatibleTypes(DataType spatialDataType) { - return Strings.join(compatibleTypeNames(spatialDataType), " or "); - } - - private static TestCaseSupplier.TypedDataSupplier testCaseSupplier(DataType dataType) { - return switch (dataType.esType()) { - case "geo_point" -> TestCaseSupplier.geoPointCases(() -> false).get(0); - case "geo_shape" -> TestCaseSupplier.geoShapeCases(() -> false).get(0); - case "cartesian_point" -> TestCaseSupplier.cartesianPointCases(() -> false).get(0); - case "cartesian_shape" -> TestCaseSupplier.cartesianShapeCases(() -> false).get(0); - default -> throw new IllegalArgumentException("Unsupported datatype for ST_INTERSECTS: " + dataType); - }; - } - - private static Object expected(Object left, DataType leftType, Object right, DataType rightType) { - if (typeCompatible(leftType, rightType) == false) { - return null; - } - // TODO cast objects to right type and check intersection - BytesRef leftWKB = asGeometryWKB(left, leftType); - BytesRef rightWKB = asGeometryWKB(right, rightType); - SpatialRelatesFunction.SpatialRelations spatialIntersects = spatialRelations(left, leftType, right, rightType); - try { - return spatialIntersects.geometryRelatesGeometry(leftWKB, rightWKB); - } catch (IOException e) { - throw new RuntimeException(e); - } - } - - private static SpatialRelatesFunction.SpatialRelations spatialRelations( - Object left, - DataType leftType, - Object right, - DataType rightType - ) { - if (isSpatialGeo(leftType) || isSpatialGeo(rightType)) { - return SpatialIntersects.GEO; - } else if (isSpatial(leftType) || isSpatial(rightType)) { - return SpatialIntersects.CARTESIAN; - } else { - throw new IllegalArgumentException( - "Unsupported left and right types: left[" - + leftType.esType() - + ":" - + left.getClass().getSimpleName() - + "] right[" - + rightType.esType() - + ":" - + right.getClass().getSimpleName() - + "]" - ); - } - } - - private static BytesRef asGeometryWKB(Object object, DataType dataType) { - if (isString(dataType)) { - return SpatialCoordinateTypes.UNSPECIFIED.wktToWkb(object.toString()); - } else if (object instanceof BytesRef wkb) { - return wkb; - } else { - throw new IllegalArgumentException("Invalid geometry base type for " + dataType + ": " + object.getClass().getSimpleName()); - } - } - - private static boolean typeCompatible(DataType leftType, DataType rightType) { - if (isSpatial(leftType) && isSpatial(rightType)) { - // Both must be GEO_* or both must be CARTESIAN_* - return countGeo(leftType, rightType) != 1; - } - return true; - } - - private static DataType pickSpatialType(DataType leftType, DataType rightType) { - if (isSpatial(leftType)) { - return leftType; - } else if (isSpatial(rightType)) { - return rightType; - } else { - throw new IllegalArgumentException("Invalid spatial types: " + leftType + " and " + rightType); - } - } - - private static String spatialEvaluatorString(DataType leftType, DataType rightType) { - String crsType = isSpatialGeo(pickSpatialType(leftType, rightType)) ? "Geo" : "Cartesian"; - return "SpatialIntersects" + crsType + "SourceAndSourceEvaluator[leftValue=Attribute[channel=0], rightValue=Attribute[channel=1]]"; - } - - private static int countGeo(DataType... types) { - int count = 0; - for (DataType type : types) { - if (isSpatialGeo(type)) { - count++; - } - } - return count; - } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunctionTestCase.java new file mode 100644 index 0000000000000..e905f85141f31 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunctionTestCase.java @@ -0,0 +1,207 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import joptsimple.internal.Strings; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.ql.expression.TypeResolutions; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; +import org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes; + +import java.io.IOException; +import java.lang.reflect.Field; +import java.util.ArrayList; +import java.util.List; +import java.util.Locale; +import java.util.Set; + +import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesFunction.compatibleTypeNames; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isSpatial; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isSpatialGeo; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isString; + +public abstract class SpatialRelatesFunctionTestCase extends AbstractFunctionTestCase { + + private static String getFunctionClassName() { + Class testClass = getTestClass(); + String testClassName = testClass.getSimpleName(); + return testClassName.replace("Tests", ""); + } + + private static Class getSpatialRelatesFunctionClass() throws ClassNotFoundException { + String functionClassName = getFunctionClassName(); + return Class.forName("org.elasticsearch.xpack.esql.expression.function.scalar.spatial." + functionClassName); + } + + private static SpatialRelatesFunction.SpatialRelations getRelationsField(String name) { + try { + Field field = getSpatialRelatesFunctionClass().getField(name); + Object value = field.get(null); + return (SpatialRelatesFunction.SpatialRelations) value; + } catch (NoSuchFieldException | ClassNotFoundException | IllegalAccessException e) { + throw new RuntimeException(e); + } + } + + protected static void addSpatialCombinations(List suppliers, DataType[] dataTypes) { + for (DataType leftType : dataTypes) { + TestCaseSupplier.TypedDataSupplier leftDataSupplier = testCaseSupplier(leftType); + for (DataType rightType : dataTypes) { + if (typeCompatible(leftType, rightType)) { + TestCaseSupplier.TypedDataSupplier rightDataSupplier = testCaseSupplier(rightType); + suppliers.add( + TestCaseSupplier.testCaseSupplier( + leftDataSupplier, + rightDataSupplier, + SpatialRelatesFunctionTestCase::spatialEvaluatorString, + DataTypes.BOOLEAN, + (l, r) -> expected(l, leftType, r, rightType) + ) + ); + } + } + } + } + + /** + * Build the expected error message for an invalid type signature. + */ + protected static String typeErrorMessage(boolean includeOrdinal, List> validPerPosition, List types) { + List badArgPositions = new ArrayList<>(); + for (int i = 0; i < types.size(); i++) { + if (validPerPosition.get(i).contains(types.get(i)) == false) { + badArgPositions.add(i); + } + } + if (badArgPositions.isEmpty()) { + return oneInvalid(1, 0, includeOrdinal, types); + } else if (badArgPositions.size() == 1) { + int badArgPosition = badArgPositions.get(0); + int goodArgPosition = badArgPosition == 0 ? 1 : 0; + if (isSpatial(types.get(goodArgPosition)) == false) { + return oneInvalid(badArgPosition, -1, includeOrdinal, types); + } else { + return oneInvalid(badArgPosition, goodArgPosition, includeOrdinal, types); + } + } else { + return oneInvalid(0, -1, includeOrdinal, types); + } + } + + private static String oneInvalid(int badArgPosition, int goodArgPosition, boolean includeOrdinal, List types) { + String ordinal = includeOrdinal ? TypeResolutions.ParamOrdinal.fromIndex(badArgPosition).name().toLowerCase(Locale.ROOT) + " " : ""; + String expectedType = goodArgPosition >= 0 + ? compatibleTypes(types.get(goodArgPosition)) + : "geo_point, cartesian_point, geo_shape or cartesian_shape"; + String name = types.get(badArgPosition).typeName(); + return ordinal + "argument of [] must be [" + expectedType + "], found value [" + name + "] type [" + name + "]"; + } + + private static String compatibleTypes(DataType spatialDataType) { + return Strings.join(compatibleTypeNames(spatialDataType), " or "); + } + + private static TestCaseSupplier.TypedDataSupplier testCaseSupplier(DataType dataType) { + return switch (dataType.esType()) { + case "geo_point" -> TestCaseSupplier.geoPointCases(() -> false).get(0); + case "geo_shape" -> TestCaseSupplier.geoShapeCases(() -> false).get(0); + case "cartesian_point" -> TestCaseSupplier.cartesianPointCases(() -> false).get(0); + case "cartesian_shape" -> TestCaseSupplier.cartesianShapeCases(() -> false).get(0); + default -> throw new IllegalArgumentException("Unsupported datatype for " + functionName() + ": " + dataType); + }; + } + + private static Object expected(Object left, DataType leftType, Object right, DataType rightType) { + if (typeCompatible(leftType, rightType) == false) { + return null; + } + // TODO cast objects to right type and check intersection + BytesRef leftWKB = asGeometryWKB(left, leftType); + BytesRef rightWKB = asGeometryWKB(right, rightType); + SpatialRelatesFunction.SpatialRelations spatialRelations = spatialRelations(left, leftType, right, rightType); + try { + return spatialRelations.geometryRelatesGeometry(leftWKB, rightWKB); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + private static SpatialRelatesFunction.SpatialRelations spatialRelations( + Object left, + DataType leftType, + Object right, + DataType rightType + ) { + if (isSpatialGeo(leftType) || isSpatialGeo(rightType)) { + return getRelationsField("GEO"); + } else if (isSpatial(leftType) || isSpatial(rightType)) { + return getRelationsField("CARTESIAN"); + } else { + throw new IllegalArgumentException( + "Unsupported left and right types: left[" + + leftType.esType() + + ":" + + left.getClass().getSimpleName() + + "] right[" + + rightType.esType() + + ":" + + right.getClass().getSimpleName() + + "]" + ); + } + } + + private static BytesRef asGeometryWKB(Object object, DataType dataType) { + if (isString(dataType)) { + return SpatialCoordinateTypes.UNSPECIFIED.wktToWkb(object.toString()); + } else if (object instanceof BytesRef wkb) { + return wkb; + } else { + throw new IllegalArgumentException("Invalid geometry base type for " + dataType + ": " + object.getClass().getSimpleName()); + } + } + + private static boolean typeCompatible(DataType leftType, DataType rightType) { + if (isSpatial(leftType) && isSpatial(rightType)) { + // Both must be GEO_* or both must be CARTESIAN_* + return countGeo(leftType, rightType) != 1; + } + return true; + } + + private static DataType pickSpatialType(DataType leftType, DataType rightType) { + if (isSpatial(leftType)) { + return leftType; + } else if (isSpatial(rightType)) { + return rightType; + } else { + throw new IllegalArgumentException("Invalid spatial types: " + leftType + " and " + rightType); + } + } + + private static String spatialEvaluatorString(DataType leftType, DataType rightType) { + String crsType = isSpatialGeo(pickSpatialType(leftType, rightType)) ? "Geo" : "Cartesian"; + return getFunctionClassName() + + crsType + + "SourceAndSourceEvaluator[leftValue=Attribute[channel=0], rightValue=Attribute[channel=1]]"; + } + + private static int countGeo(DataType... types) { + int count = 0; + for (DataType type : types) { + if (isSpatialGeo(type)) { + count++; + } + } + return count; + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinTests.java new file mode 100644 index 0000000000000..11dbc060b4eb5 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinTests.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.xpack.esql.expression.function.FunctionName; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.Supplier; + +@FunctionName("st_within") +public class SpatialWithinTests extends SpatialRelatesFunctionTestCase { + public SpatialWithinTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + List suppliers = new ArrayList<>(); + DataType[] geoDataTypes = { EsqlDataTypes.GEO_POINT, EsqlDataTypes.GEO_SHAPE }; + SpatialRelatesFunctionTestCase.addSpatialCombinations(suppliers, geoDataTypes); + DataType[] cartesianDataTypes = { EsqlDataTypes.CARTESIAN_POINT, EsqlDataTypes.CARTESIAN_SHAPE }; + SpatialRelatesFunctionTestCase.addSpatialCombinations(suppliers, cartesianDataTypes); + return parameterSuppliersFromTypedData( + errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers), SpatialWithinTests::typeErrorMessage) + ); + } + + @Override + protected Expression build(Source source, List args) { + return new SpatialWithin(source, args.get(0), args.get(1)); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index 650cd2c81115c..180a8ff16f4eb 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -42,7 +42,10 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.Sum; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToGeoPoint; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Round; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialContains; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialIntersects; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesFunction; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialWithin; import org.elasticsearch.xpack.esql.parser.EsqlParser; import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.Eval; @@ -120,6 +123,7 @@ import static org.elasticsearch.xpack.esql.SerializationTestUtils.assertSerialization; import static org.elasticsearch.xpack.esql.plan.physical.AggregateExec.Mode.FINAL; import static org.elasticsearch.xpack.esql.plan.physical.AggregateExec.Mode.PARTIAL; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_POINT; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_POINT; import static org.elasticsearch.xpack.ql.expression.Expressions.name; import static org.elasticsearch.xpack.ql.expression.Expressions.names; @@ -2925,6 +2929,174 @@ public void testPushSpatialIntersectsStringToSource() { } } + private record TestSpatialRelation(ShapeRelation relation, TestDataSource index, boolean literalRight, boolean canPushToSource) { + String function() { + return switch (relation) { + case INTERSECTS -> "ST_INTERSECTS"; + case WITHIN -> "ST_WITHIN"; + case CONTAINS -> "ST_CONTAINS"; + default -> throw new IllegalArgumentException("Unsupported relation: " + relation); + }; + } + + Class functionClass() { + return switch (relation) { + case INTERSECTS -> SpatialIntersects.class; + case WITHIN -> literalRight ? SpatialWithin.class : SpatialContains.class; + case CONTAINS -> literalRight ? SpatialContains.class : SpatialWithin.class; + default -> throw new IllegalArgumentException("Unsupported relation: " + relation); + }; + } + + ShapeRelation relationship() { + return switch (relation) { + case WITHIN -> literalRight ? ShapeRelation.WITHIN : ShapeRelation.CONTAINS; + case CONTAINS -> literalRight ? ShapeRelation.CONTAINS : ShapeRelation.WITHIN; + default -> relation; + }; + } + + DataType locationType() { + return index.index.name().endsWith("_web") ? CARTESIAN_POINT : GEO_POINT; + } + + String castFunction() { + return index.index.name().endsWith("_web") ? "TO_CARTESIANSHAPE" : "TO_GEOSHAPE"; + } + + String predicate() { + String field = "location"; + String literal = castFunction() + "(\"POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))\")"; + return literalRight ? function() + "(" + field + ", " + literal + ")" : function() + "(" + literal + ", " + field + ")"; + } + } + + public void testPushDownSpatialRelatesStringToSource() { + TestSpatialRelation[] tests = new TestSpatialRelation[] { + new TestSpatialRelation(ShapeRelation.INTERSECTS, airports, true, true), + new TestSpatialRelation(ShapeRelation.INTERSECTS, airports, false, true), + new TestSpatialRelation(ShapeRelation.WITHIN, airports, true, true), + new TestSpatialRelation(ShapeRelation.WITHIN, airports, false, true), + new TestSpatialRelation(ShapeRelation.CONTAINS, airports, true, true), + new TestSpatialRelation(ShapeRelation.CONTAINS, airports, false, true), + new TestSpatialRelation(ShapeRelation.INTERSECTS, airportsWeb, true, true), + new TestSpatialRelation(ShapeRelation.INTERSECTS, airportsWeb, false, true), + new TestSpatialRelation(ShapeRelation.WITHIN, airportsWeb, true, true), + new TestSpatialRelation(ShapeRelation.WITHIN, airportsWeb, false, true), + new TestSpatialRelation(ShapeRelation.CONTAINS, airportsWeb, true, true), + new TestSpatialRelation(ShapeRelation.CONTAINS, airportsWeb, false, true) }; + for (TestSpatialRelation test : tests) { + var plan = this.physicalPlan("FROM " + test.index.index.name() + " | WHERE " + test.predicate(), test.index); + var limit = as(plan, LimitExec.class); + var exchange = as(limit.child(), ExchangeExec.class); + var fragment = as(exchange.child(), FragmentExec.class); + var limit2 = as(fragment.fragment(), Limit.class); + var filter = as(limit2.child(), Filter.class); + assertThat(test.predicate(), filter.condition(), instanceOf(test.functionClass())); + + var optimized = optimizedPlan(plan); + var topLimit = as(optimized, LimitExec.class); + exchange = as(topLimit.child(), ExchangeExec.class); + var project = as(exchange.child(), ProjectExec.class); + var fieldExtract = as(project.child(), FieldExtractExec.class); + if (test.canPushToSource) { + var source = source(fieldExtract.child()); + // TODO: bring back SingleValueQuery once it can handle LeafShapeFieldData + // var condition = as(sv(source.query(), "location"), AbstractGeometryQueryBuilder.class); + var condition = as(source.query(), SpatialRelatesQuery.ShapeQueryBuilder.class); + assertThat("Geometry field name: " + test.predicate(), condition.fieldName(), equalTo("location")); + assertThat("Spatial relationship: " + test.predicate(), condition.relation(), equalTo(test.relationship())); + assertThat("Geometry is Polygon: " + test.predicate(), condition.shape().type(), equalTo(ShapeType.POLYGON)); + var polygon = as(condition.shape(), Polygon.class); + assertThat("Polygon shell length: " + test.predicate(), polygon.getPolygon().length(), equalTo(5)); + assertThat("Polygon holes: " + test.predicate(), polygon.getNumberOfHoles(), equalTo(0)); + } else { + // Currently CARTESIAN fields do not support lucene push-down for CONTAINS/WITHIN + var limitExec = as(fieldExtract.child(), LimitExec.class); + var filterExec = as(limitExec.child(), FilterExec.class); + var fieldExtractLocation = as(filterExec.child(), FieldExtractExec.class); + assertThat(test.predicate(), fieldExtractLocation.attributesToExtract().size(), equalTo(1)); + assertThat(test.predicate(), fieldExtractLocation.attributesToExtract().get(0).name(), equalTo("location")); + var source = source(fieldExtractLocation.child()); + assertThat(test.predicate(), source.query(), equalTo(null)); + } + } + } + + public void testPushDownSpatialRelatesStringToSourceAndUseDocValuesForCentroid() { + TestSpatialRelation[] tests = new TestSpatialRelation[] { + new TestSpatialRelation(ShapeRelation.INTERSECTS, airports, true, true), + new TestSpatialRelation(ShapeRelation.INTERSECTS, airports, false, true), + new TestSpatialRelation(ShapeRelation.WITHIN, airports, true, true), + new TestSpatialRelation(ShapeRelation.WITHIN, airports, false, true), + new TestSpatialRelation(ShapeRelation.CONTAINS, airports, true, true), + new TestSpatialRelation(ShapeRelation.CONTAINS, airports, false, true), + new TestSpatialRelation(ShapeRelation.WITHIN, airportsWeb, true, true), + new TestSpatialRelation(ShapeRelation.WITHIN, airportsWeb, false, true), + new TestSpatialRelation(ShapeRelation.CONTAINS, airportsWeb, true, true), + new TestSpatialRelation(ShapeRelation.CONTAINS, airportsWeb, false, true) }; + for (TestSpatialRelation test : tests) { + var centroidExpr = "centroid=ST_CENTROID(location), count=COUNT()"; + var plan = this.physicalPlan( + "FROM " + test.index.index.name() + " | WHERE " + test.predicate() + " | STATS " + centroidExpr, + test.index + ); + var limit = as(plan, LimitExec.class); + var agg = as(limit.child(), AggregateExec.class); + assertThat("No groupings in aggregation", agg.groupings().size(), equalTo(0)); + // Before optimization the aggregation does not use doc-values + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, "centroid", SpatialCentroid.class, test.locationType(), false); + var exchange = as(agg.child(), ExchangeExec.class); + var fragment = as(exchange.child(), FragmentExec.class); + var fAgg = as(fragment.fragment(), Aggregate.class); + var filter = as(fAgg.child(), Filter.class); + assertThat(test.predicate(), filter.condition(), instanceOf(test.functionClass())); + + // Now verify that optimization re-writes the ExchangeExec and pushed down the filter into the Lucene query + var optimized = optimizedPlan(plan); + limit = as(optimized, LimitExec.class); + agg = as(limit.child(), AggregateExec.class); + // Above the exchange (in coordinator) the aggregation is not using doc-values + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, "centroid", SpatialCentroid.class, test.locationType(), false); + exchange = as(agg.child(), ExchangeExec.class); + agg = as(exchange.child(), AggregateExec.class); + assertThat("Aggregation is PARTIAL", agg.getMode(), equalTo(PARTIAL)); + // below the exchange (in data node) the aggregation is using doc-values + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, "centroid", SpatialCentroid.class, test.locationType(), true); + if (test.canPushToSource) { + var extract = as(agg.child(), FieldExtractExec.class); + assertTrue( + "Expect field attribute to be extracted as doc-values", + extract.attributesToExtract() + .stream() + .allMatch(attr -> extract.hasDocValuesAttribute(attr) && attr.dataType() == test.locationType()) + ); + var source = source(extract.child()); + // TODO: bring back SingleValueQuery once it can handle LeafShapeFieldData + // var condition = as(sv(source.query(), "location"), AbstractGeometryQueryBuilder.class); + var condition = as(source.query(), SpatialRelatesQuery.ShapeQueryBuilder.class); + assertThat("Geometry field name: " + test.predicate(), condition.fieldName(), equalTo("location")); + assertThat("Spatial relationship: " + test.predicate(), condition.relation(), equalTo(test.relationship())); + assertThat("Geometry is Polygon: " + test.predicate(), condition.shape().type(), equalTo(ShapeType.POLYGON)); + var polygon = as(condition.shape(), Polygon.class); + assertThat("Polygon shell length: " + test.predicate(), polygon.getPolygon().length(), equalTo(5)); + assertThat("Polygon holes: " + test.predicate(), polygon.getNumberOfHoles(), equalTo(0)); + } else { + // Currently CARTESIAN fields do not support lucene push-down for CONTAINS/WITHIN + var filterExec = as(agg.child(), FilterExec.class); + var fieldExtractLocation = as(filterExec.child(), FieldExtractExec.class); + assertThat(test.predicate(), fieldExtractLocation.attributesToExtract().size(), equalTo(1)); + assertThat(test.predicate(), fieldExtractLocation.attributesToExtract().get(0).name(), equalTo("location")); + var source = source(fieldExtractLocation.child()); + assertThat(test.predicate(), source.query(), equalTo(null)); + + } + } + } + /** * Plan: * Plan: From 7f17effb4ff96008218fee529b8fd5f8ab5fcb83 Mon Sep 17 00:00:00 2001 From: Niels Bauman <33722607+nielsbauman@users.noreply.github.com> Date: Tue, 2 Apr 2024 13:03:35 +0200 Subject: [PATCH 051/264] Implement rollover for failure stores (#106715) Allows rolling over failure stores through the existing RolloverRequest by specifying `IndicesOptions.FailureStoreOptions` on that request. --- .../DataStreamGetWriteIndexTests.java | 17 +- ...etadataDataStreamRolloverServiceTests.java | 15 +- .../200_rollover_failure_store.yml | 116 ++++++++++ .../rest-api-spec/api/indices.rollover.json | 4 + .../org/elasticsearch/TransportVersions.java | 1 + .../indices/rollover/LazyRolloverAction.java | 3 +- .../rollover/MetadataRolloverService.java | 200 +++++++++++------- .../indices/rollover/RolloverRequest.java | 30 ++- .../rollover/TransportRolloverAction.java | 30 ++- .../cluster/metadata/DataStream.java | 71 ++++++- .../MetadataCreateDataStreamService.java | 31 ++- .../indices/RestRolloverIndexAction.java | 12 ++ ...adataRolloverServiceAutoShardingTests.java | 42 ++-- .../MetadataRolloverServiceTests.java | 137 ++++++++++-- .../rollover/RolloverRequestTests.java | 41 ++++ .../TransportRolloverActionTests.java | 3 +- .../cluster/metadata/DataStreamTests.java | 26 +++ .../MetadataCreateDataStreamServiceTests.java | 49 +++-- .../metadata/DataStreamTestHelper.java | 25 ++- 19 files changed, 685 insertions(+), 168 deletions(-) create mode 100644 modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/200_rollover_failure_store.yml diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamGetWriteIndexTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamGetWriteIndexTests.java index b61cbdc837010..111a46bb7098b 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamGetWriteIndexTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamGetWriteIndexTests.java @@ -277,7 +277,8 @@ public void setup() throws Exception { createIndexService, indexAliasesService, EmptySystemIndices.INSTANCE, - WriteLoadForecaster.DEFAULT + WriteLoadForecaster.DEFAULT, + clusterService ); } @@ -318,7 +319,19 @@ private MetadataRolloverService.RolloverResult rolloverOver(ClusterState state, MaxDocsCondition condition = new MaxDocsCondition(randomNonNegativeLong()); List> metConditions = Collections.singletonList(condition); CreateIndexRequest createIndexRequest = new CreateIndexRequest("_na_"); - return rolloverService.rolloverClusterState(state, name, null, createIndexRequest, metConditions, time, false, false, null, null); + return rolloverService.rolloverClusterState( + state, + name, + null, + createIndexRequest, + metConditions, + time, + false, + false, + null, + null, + false + ); } private Index getWriteIndex(ClusterState state, String name, String timestamp) { diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/MetadataDataStreamRolloverServiceTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/MetadataDataStreamRolloverServiceTests.java index 2bfbeb8e37aaf..61f0efe89504d 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/MetadataDataStreamRolloverServiceTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/MetadataDataStreamRolloverServiceTests.java @@ -120,7 +120,8 @@ public void testRolloverClusterStateForDataStream() throws Exception { randomBoolean(), false, indexStats, - null + null, + false ); long after = testThreadPool.absoluteTimeInMillis(); @@ -220,7 +221,8 @@ public void testRolloverAndMigrateDataStream() throws Exception { randomBoolean(), false, null, - null + null, + false ); String sourceIndexName = DataStream.getDefaultBackingIndexName(dataStream.getName(), dataStream.getGeneration()); @@ -313,7 +315,8 @@ public void testChangingIndexModeFromTimeSeriesToSomethingElseNoEffectOnExisting randomBoolean(), false, null, - null + null, + false ); String sourceIndexName = DataStream.getDefaultBackingIndexName(dataStream.getName(), dataStream.getGeneration()); @@ -379,7 +382,8 @@ public void testRolloverClusterStateWithBrokenOlderTsdbDataStream() throws Excep randomBoolean(), false, indexStats, - null + null, + false ); long after = testThreadPool.absoluteTimeInMillis(); @@ -460,7 +464,8 @@ public void testRolloverClusterStateWithBrokenTsdbDataStream() throws Exception randomBoolean(), false, indexStats, - null + null, + false ) ); assertThat(e.getMessage(), containsString("is overlapping with backing index")); diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/200_rollover_failure_store.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/200_rollover_failure_store.yml new file mode 100644 index 0000000000000..82c757fc4af76 --- /dev/null +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/200_rollover_failure_store.yml @@ -0,0 +1,116 @@ +--- +setup: + - skip: + version: " - 8.13.99" + reason: "data stream failure store rollover only supported in 8.14+" + features: allowed_warnings + + - do: + allowed_warnings: + - "index template [my-template] has index patterns [data-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-template] will take precedence during new index creation" + indices.put_index_template: + name: my-template + body: + index_patterns: [data-*] + data_stream: + failure_store: true + template: + mappings: + properties: + '@timestamp': + type: date + count: + type: long + + - do: + indices.create_data_stream: + name: data-stream-for-rollover + +--- +"Roll over a data stream's failure store without conditions": + # rollover data stream to create new backing index + - do: + indices.rollover: + alias: "data-stream-for-rollover" + failure_store: true + + - match: { old_index: "/\\.fs-data-stream-for-rollover-(\\d{4}\\.\\d{2}\\.\\d{2}-)?000001/" } + - match: { new_index: "/\\.fs-data-stream-for-rollover-(\\d{4}\\.\\d{2}\\.\\d{2}-)?000002/" } + - match: { rolled_over: true } + - match: { dry_run: false } + + - do: + indices.get_data_stream: + name: "*" + - match: { data_streams.0.name: data-stream-for-rollover } + - match: { data_streams.0.timestamp_field.name: '@timestamp' } + # Both backing and failure indices use the same generation field. + - match: { data_streams.0.generation: 2 } + - length: { data_streams.0.indices: 1 } + - match: { data_streams.0.indices.0.index_name: '/\.ds-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - length: { data_streams.0.failure_indices: 2 } + - match: { data_streams.0.failure_indices.0.index_name: '/\.fs-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - match: { data_streams.0.failure_indices.1.index_name: '/\.fs-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000002/' } + +--- +"Roll over a data stream's failure store with conditions": + # index first document and wait for refresh + - do: + index: + index: data-stream-for-rollover + refresh: true + body: + '@timestamp': '2020-12-12' + count: 'invalid value' + + # rollover data stream to create new backing index + - do: + indices.rollover: + alias: "data-stream-for-rollover" + failure_store: true + body: + conditions: + max_docs: 1 + + - match: { old_index: "/\\.fs-data-stream-for-rollover-(\\d{4}\\.\\d{2}\\.\\d{2}-)?000001/" } + - match: { new_index: "/\\.fs-data-stream-for-rollover-(\\d{4}\\.\\d{2}\\.\\d{2}-)?000002/" } + - match: { rolled_over: true } + - match: { dry_run: false } + + - do: + indices.get_data_stream: + name: "*" + - match: { data_streams.0.name: data-stream-for-rollover } + - match: { data_streams.0.timestamp_field.name: '@timestamp' } + # Both backing and failure indices use the same generation field. + - match: { data_streams.0.generation: 2 } + - length: { data_streams.0.indices: 1 } + - match: { data_streams.0.indices.0.index_name: '/\.ds-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - length: { data_streams.0.failure_indices: 2 } + - match: { data_streams.0.failure_indices.0.index_name: '/\.fs-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - match: { data_streams.0.failure_indices.1.index_name: '/\.fs-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000002/' } + +--- +"Don't roll over a data stream's failure store when conditions aren't met": + # rollover data stream to create new backing index + - do: + indices.rollover: + alias: "data-stream-for-rollover" + failure_store: true + body: + conditions: + max_docs: 1 + + - match: { rolled_over: false } + - match: { dry_run: false } + + - do: + indices.get_data_stream: + name: "*" + - match: { data_streams.0.name: data-stream-for-rollover } + - match: { data_streams.0.timestamp_field.name: '@timestamp' } + - match: { data_streams.0.generation: 1 } + - length: { data_streams.0.indices: 1 } + - match: { data_streams.0.indices.0.index_name: '/\.ds-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - length: { data_streams.0.failure_indices: 1 } + - match: { data_streams.0.failure_indices.0.index_name: '/\.fs-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000001/' } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json index 47a1bee665506..e3c06ab080597 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json @@ -63,6 +63,10 @@ "type":"boolean", "default":"false", "description":"If set to true, the rollover action will only mark a data stream to signal that it needs to be rolled over at the next write. Only allowed on data streams." + }, + "failure_store":{ + "type":"boolean", + "description":"If set to true, the rollover action will be applied on the failure store of the data stream." } }, "body":{ diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index d7bc07b3eb2b4..b0649c9429884 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -159,6 +159,7 @@ static TransportVersion def(int id) { public static final TransportVersion ESQL_ES_SOURCE_OPTIONS = def(8_618_00_0); public static final TransportVersion ADD_PERSISTENT_TASK_EXCEPTIONS = def(8_619_00_0); public static final TransportVersion ESQL_REDUCER_NODE_FRAGMENT = def(8_620_00_0); + public static final TransportVersion FAILURE_STORE_ROLLOVER = def(8_621_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/LazyRolloverAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/LazyRolloverAction.java index 623186e052eb7..e8d63affcb8bf 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/LazyRolloverAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/LazyRolloverAction.java @@ -98,7 +98,8 @@ protected void masterOperation( clusterState, rolloverRequest.getRolloverTarget(), rolloverRequest.getNewIndexName(), - rolloverRequest.getCreateIndexRequest() + rolloverRequest.getCreateIndexRequest(), + false ); final String trialSourceIndexName = trialRolloverNames.sourceName(); final String trialRolloverIndexName = trialRolloverNames.rolloverName(); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java index b03353a11793f..75852098170c6 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java @@ -26,11 +26,13 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexTemplateMetadata; import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.metadata.MetadataCreateDataStreamService; import org.elasticsearch.cluster.metadata.MetadataCreateIndexService; import org.elasticsearch.cluster.metadata.MetadataDataStreamsService; import org.elasticsearch.cluster.metadata.MetadataIndexAliasesService; import org.elasticsearch.cluster.metadata.MetadataIndexTemplateService; import org.elasticsearch.cluster.routing.allocation.WriteLoadForecaster; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; @@ -76,6 +78,7 @@ public class MetadataRolloverService { private final MetadataIndexAliasesService indexAliasesService; private final SystemIndices systemIndices; private final WriteLoadForecaster writeLoadForecaster; + private final ClusterService clusterService; @Inject public MetadataRolloverService( @@ -83,13 +86,15 @@ public MetadataRolloverService( MetadataCreateIndexService createIndexService, MetadataIndexAliasesService indexAliasesService, SystemIndices systemIndices, - WriteLoadForecaster writeLoadForecaster + WriteLoadForecaster writeLoadForecaster, + ClusterService clusterService ) { this.threadPool = threadPool; this.createIndexService = createIndexService; this.indexAliasesService = indexAliasesService; this.systemIndices = systemIndices; this.writeLoadForecaster = writeLoadForecaster; + this.clusterService = clusterService; } public record RolloverResult(String rolloverIndexName, String sourceIndexName, ClusterState clusterState) { @@ -116,9 +121,10 @@ public RolloverResult rolloverClusterState( boolean silent, boolean onlyValidate, @Nullable IndexMetadataStats sourceIndexStats, - @Nullable AutoShardingResult autoShardingResult + @Nullable AutoShardingResult autoShardingResult, + boolean isFailureStoreRollover ) throws Exception { - validate(currentState.metadata(), rolloverTarget, newIndexName, createIndexRequest); + validate(currentState.metadata(), rolloverTarget, newIndexName, createIndexRequest, isFailureStoreRollover); final IndexAbstraction indexAbstraction = currentState.metadata().getIndicesLookup().get(rolloverTarget); return switch (indexAbstraction.getType()) { case ALIAS -> rolloverAlias( @@ -141,7 +147,8 @@ public RolloverResult rolloverClusterState( silent, onlyValidate, sourceIndexStats, - autoShardingResult + autoShardingResult, + isFailureStoreRollover ); default -> // the validate method above prevents this case @@ -160,13 +167,19 @@ public static NameResolution resolveRolloverNames( ClusterState currentState, String rolloverTarget, String newIndexName, - CreateIndexRequest createIndexRequest + CreateIndexRequest createIndexRequest, + boolean isFailureStoreRollover ) { - validate(currentState.metadata(), rolloverTarget, newIndexName, createIndexRequest); + validate(currentState.metadata(), rolloverTarget, newIndexName, createIndexRequest, isFailureStoreRollover); final IndexAbstraction indexAbstraction = currentState.metadata().getIndicesLookup().get(rolloverTarget); return switch (indexAbstraction.getType()) { case ALIAS -> resolveAliasRolloverNames(currentState.metadata(), indexAbstraction, newIndexName); - case DATA_STREAM -> resolveDataStreamRolloverNames(currentState.getMetadata(), (DataStream) indexAbstraction); + case DATA_STREAM -> { + if (isFailureStoreRollover) { + yield resolveDataStreamFailureStoreRolloverNames(currentState.metadata(), (DataStream) indexAbstraction); + } + yield resolveDataStreamRolloverNames(currentState.getMetadata(), (DataStream) indexAbstraction); + } default -> // the validate method above prevents this case throw new IllegalStateException("unable to roll over type [" + indexAbstraction.getType().getDisplayName() + "]"); @@ -190,6 +203,17 @@ private static NameResolution resolveDataStreamRolloverNames(Metadata metadata, return new NameResolution(originalWriteIndex.getIndex().getName(), null, dataStream.nextWriteIndexAndGeneration(metadata).v1()); } + private static NameResolution resolveDataStreamFailureStoreRolloverNames(Metadata metadata, DataStream dataStream) { + assert dataStream.getFailureStoreWriteIndex() != null : "Unable to roll over failure store with no failure store indices"; + + final IndexMetadata originalWriteIndex = metadata.index(dataStream.getFailureStoreWriteIndex()); + return new NameResolution( + originalWriteIndex.getIndex().getName(), + null, + dataStream.nextFailureStoreWriteIndexAndGeneration(metadata).v1() + ); + } + private RolloverResult rolloverAlias( ClusterState currentState, IndexAbstraction.Alias alias, @@ -252,7 +276,8 @@ private RolloverResult rolloverDataStream( boolean silent, boolean onlyValidate, @Nullable IndexMetadataStats sourceIndexStats, - @Nullable AutoShardingResult autoShardingResult + @Nullable AutoShardingResult autoShardingResult, + boolean isFailureStoreRollover ) throws Exception { if (SnapshotsService.snapshottingDataStreams(currentState, Collections.singleton(dataStream.getName())).isEmpty() == false) { @@ -280,8 +305,10 @@ private RolloverResult rolloverDataStream( templateV2 = systemDataStreamDescriptor.getComposableIndexTemplate(); } - final Index originalWriteIndex = dataStream.getWriteIndex(); - final Tuple nextIndexAndGeneration = dataStream.nextWriteIndexAndGeneration(currentState.metadata()); + final Index originalWriteIndex = isFailureStoreRollover ? dataStream.getFailureStoreWriteIndex() : dataStream.getWriteIndex(); + final Tuple nextIndexAndGeneration = isFailureStoreRollover + ? dataStream.nextFailureStoreWriteIndexAndGeneration(currentState.metadata()) + : dataStream.nextWriteIndexAndGeneration(currentState.metadata()); final String newWriteIndexName = nextIndexAndGeneration.v1(); final long newGeneration = nextIndexAndGeneration.v2(); MetadataCreateIndexService.validateIndexName(newWriteIndexName, currentState); // fails if the index already exists @@ -289,72 +316,87 @@ private RolloverResult rolloverDataStream( return new RolloverResult(newWriteIndexName, originalWriteIndex.getName(), currentState); } - DataStreamAutoShardingEvent dataStreamAutoShardingEvent = autoShardingResult == null - ? dataStream.getAutoShardingEvent() - : switch (autoShardingResult.type()) { - case NO_CHANGE_REQUIRED, COOLDOWN_PREVENTED_INCREASE, COOLDOWN_PREVENTED_DECREASE -> { - if (dataStream.getAutoShardingEvent() != null) { - logger.info( - "Rolling over data stream [{}] using existing auto-sharding recommendation [{}]", - dataStreamName, - dataStream.getAutoShardingEvent() + ClusterState newState; + if (isFailureStoreRollover) { + newState = MetadataCreateDataStreamService.createFailureStoreIndex( + createIndexService, + "rollover_failure_store", + clusterService.getSettings(), + currentState, + now.toEpochMilli(), + dataStreamName, + templateV2, + newWriteIndexName, + (builder, indexMetadata) -> builder.put(dataStream.rolloverFailureStore(indexMetadata.getIndex(), newGeneration)) + ); + } else { + DataStreamAutoShardingEvent dataStreamAutoShardingEvent = autoShardingResult == null + ? dataStream.getAutoShardingEvent() + : switch (autoShardingResult.type()) { + case NO_CHANGE_REQUIRED, COOLDOWN_PREVENTED_INCREASE, COOLDOWN_PREVENTED_DECREASE -> { + if (dataStream.getAutoShardingEvent() != null) { + logger.info( + "Rolling over data stream [{}] using existing auto-sharding recommendation [{}]", + dataStreamName, + dataStream.getAutoShardingEvent() + ); + } + yield dataStream.getAutoShardingEvent(); + } + case INCREASE_SHARDS, DECREASE_SHARDS -> { + logger.info("Auto sharding data stream [{}] to [{}]", dataStreamName, autoShardingResult); + yield new DataStreamAutoShardingEvent( + dataStream.getWriteIndex().getName(), + autoShardingResult.targetNumberOfShards(), + now.toEpochMilli() ); } - yield dataStream.getAutoShardingEvent(); - } - case INCREASE_SHARDS, DECREASE_SHARDS -> { - logger.info("Auto sharding data stream [{}] to [{}]", dataStreamName, autoShardingResult); - yield new DataStreamAutoShardingEvent( - dataStream.getWriteIndex().getName(), - autoShardingResult.targetNumberOfShards(), - now.toEpochMilli() - ); - } - // data sharding might not be available due to the feature not being available/enabled or due to cluster level excludes - // being configured. the index template will dictate the number of shards as usual - case NOT_APPLICABLE -> { - logger.debug("auto sharding is not applicable for data stream [{}]", dataStreamName); - yield null; - } - }; - - // configure the number of shards using an auto sharding event (new, or existing) if we have one - if (dataStreamAutoShardingEvent != null) { - Settings settingsWithAutoSharding = Settings.builder() - .put(createIndexRequest.settings()) - .put(IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), dataStreamAutoShardingEvent.targetNumberOfShards()) - .build(); - createIndexRequest.settings(settingsWithAutoSharding); - } + // data sharding might not be available due to the feature not being available/enabled or due to cluster level excludes + // being configured. the index template will dictate the number of shards as usual + case NOT_APPLICABLE -> { + logger.debug("auto sharding is not applicable for data stream [{}]", dataStreamName); + yield null; + } + }; + + // configure the number of shards using an auto sharding event (new, or existing) if we have one + if (dataStreamAutoShardingEvent != null) { + Settings settingsWithAutoSharding = Settings.builder() + .put(createIndexRequest.settings()) + .put(IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), dataStreamAutoShardingEvent.targetNumberOfShards()) + .build(); + createIndexRequest.settings(settingsWithAutoSharding); + } - var createIndexClusterStateRequest = prepareDataStreamCreateIndexRequest( - dataStreamName, - newWriteIndexName, - createIndexRequest, - systemDataStreamDescriptor, - now - ); - createIndexClusterStateRequest.setMatchingTemplate(templateV2); - assert createIndexClusterStateRequest.performReroute() == false - : "rerouteCompletionIsNotRequired() assumes reroute is not called by underlying service"; + var createIndexClusterStateRequest = prepareDataStreamCreateIndexRequest( + dataStreamName, + newWriteIndexName, + createIndexRequest, + systemDataStreamDescriptor, + now + ); + createIndexClusterStateRequest.setMatchingTemplate(templateV2); + assert createIndexClusterStateRequest.performReroute() == false + : "rerouteCompletionIsNotRequired() assumes reroute is not called by underlying service"; - ClusterState newState = createIndexService.applyCreateIndexRequest( - currentState, - createIndexClusterStateRequest, - silent, - (builder, indexMetadata) -> { - downgradeBrokenTsdbBackingIndices(dataStream, builder); - builder.put( - dataStream.rollover( - indexMetadata.getIndex(), - newGeneration, - metadata.isTimeSeriesTemplate(templateV2), - dataStreamAutoShardingEvent - ) - ); - }, - rerouteCompletionIsNotRequired() - ); + newState = createIndexService.applyCreateIndexRequest( + currentState, + createIndexClusterStateRequest, + silent, + (builder, indexMetadata) -> { + downgradeBrokenTsdbBackingIndices(dataStream, builder); + builder.put( + dataStream.rollover( + indexMetadata.getIndex(), + newGeneration, + metadata.isTimeSeriesTemplate(templateV2), + dataStreamAutoShardingEvent + ) + ); + }, + rerouteCompletionIsNotRequired() + ); + } RolloverInfo rolloverInfo = new RolloverInfo(dataStreamName, metConditions, threadPool.absoluteTimeInMillis()); @@ -561,7 +603,13 @@ static void checkNoDuplicatedAliasInIndexTemplate( } } - static void validate(Metadata metadata, String rolloverTarget, String newIndexName, CreateIndexRequest request) { + static void validate( + Metadata metadata, + String rolloverTarget, + String newIndexName, + CreateIndexRequest request, + boolean isFailureStoreRollover + ) { final IndexAbstraction indexAbstraction = metadata.getIndicesLookup().get(rolloverTarget); if (indexAbstraction == null) { throw new IllegalArgumentException("rollover target [" + rolloverTarget + "] does not exist"); @@ -591,6 +639,12 @@ static void validate(Metadata metadata, String rolloverTarget, String newIndexNa "aliases, mappings, and index settings may not be specified when rolling over a data stream" ); } + var dataStream = (DataStream) indexAbstraction; + if (isFailureStoreRollover && dataStream.isFailureStore() == false) { + throw new IllegalArgumentException( + "unable to roll over failure store because [" + indexAbstraction.getName() + "] does not have the failure store enabled" + ); + } } } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java index 06046a066d211..28ef2f644af04 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java @@ -101,6 +101,7 @@ public class RolloverRequest extends AcknowledgedRequest implem private RolloverConditions conditions = new RolloverConditions(); // the index name "_na_" is never read back, what matters are settings, mappings and aliases private CreateIndexRequest createIndexRequest = new CreateIndexRequest("_na_"); + private IndicesOptions indicesOptions = IndicesOptions.strictSingleIndexNoExpandForbidClosed(); public RolloverRequest(StreamInput in) throws IOException { super(in); @@ -114,6 +115,9 @@ public RolloverRequest(StreamInput in) throws IOException { } else { lazy = false; } + if (in.getTransportVersion().onOrAfter(TransportVersions.FAILURE_STORE_ROLLOVER)) { + indicesOptions = IndicesOptions.readIndicesOptions(in); + } } RolloverRequest() {} @@ -138,6 +142,18 @@ public ActionRequestValidationException validate() { ); } + var failureStoreOptions = indicesOptions.failureStoreOptions(); + if (failureStoreOptions.includeRegularIndices() && failureStoreOptions.includeFailureIndices()) { + validationException = addValidationError( + "rollover cannot be applied to both regular and failure indices at the same time", + validationException + ); + } + + if (failureStoreOptions.includeFailureIndices() && lazy) { + validationException = addValidationError("lazily rolling over a failure store is currently not supported", validationException); + } + return validationException; } @@ -152,6 +168,9 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.LAZY_ROLLOVER_ADDED)) { out.writeBoolean(lazy); } + if (out.getTransportVersion().onOrAfter(TransportVersions.FAILURE_STORE_ROLLOVER)) { + indicesOptions.writeIndicesOptions(out); + } } @Override @@ -161,7 +180,11 @@ public String[] indices() { @Override public IndicesOptions indicesOptions() { - return IndicesOptions.strictSingleIndexNoExpandForbidClosed(); + return indicesOptions; + } + + public void setIndicesOptions(IndicesOptions indicesOptions) { + this.indicesOptions = indicesOptions; } @Override @@ -282,11 +305,12 @@ public boolean equals(Object o) { && Objects.equals(rolloverTarget, that.rolloverTarget) && Objects.equals(newIndexName, that.newIndexName) && Objects.equals(conditions, that.conditions) - && Objects.equals(createIndexRequest, that.createIndexRequest); + && Objects.equals(createIndexRequest, that.createIndexRequest) + && Objects.equals(indicesOptions, that.indicesOptions); } @Override public int hashCode() { - return Objects.hash(rolloverTarget, newIndexName, dryRun, conditions, createIndexRequest, lazy); + return Objects.hash(rolloverTarget, newIndexName, dryRun, conditions, createIndexRequest, lazy, indicesOptions); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java index bd507ee9054f1..2a92496d5695a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java @@ -141,11 +141,14 @@ public TransportRolloverAction( @Override protected ClusterBlockException checkBlock(RolloverRequest request, ClusterState state) { - IndicesOptions indicesOptions = IndicesOptions.fromOptions( - true, - true, - request.indicesOptions().expandWildcardsOpen(), - request.indicesOptions().expandWildcardsClosed() + final var indicesOptions = new IndicesOptions( + IndicesOptions.ConcreteTargetOptions.ALLOW_UNAVAILABLE_TARGETS, + IndicesOptions.WildcardOptions.builder() + .matchOpen(request.indicesOptions().expandWildcardsOpen()) + .matchClosed(request.indicesOptions().expandWildcardsClosed()) + .build(), + IndicesOptions.GatekeeperOptions.DEFAULT, + request.indicesOptions().failureStoreOptions() ); return state.blocks() @@ -170,7 +173,8 @@ protected void masterOperation( clusterState, rolloverRequest.getRolloverTarget(), rolloverRequest.getNewIndexName(), - rolloverRequest.getCreateIndexRequest() + rolloverRequest.getCreateIndexRequest(), + rolloverRequest.indicesOptions().failureStoreOptions().includeFailureIndices() ); final String trialSourceIndexName = trialRolloverNames.sourceName(); final String trialRolloverIndexName = trialRolloverNames.rolloverName(); @@ -225,9 +229,15 @@ protected void masterOperation( return; } + final var statsIndicesOptions = new IndicesOptions( + IndicesOptions.ConcreteTargetOptions.ALLOW_UNAVAILABLE_TARGETS, + IndicesOptions.WildcardOptions.builder().matchClosed(true).allowEmptyExpressions(false).build(), + IndicesOptions.GatekeeperOptions.DEFAULT, + rolloverRequest.indicesOptions().failureStoreOptions() + ); IndicesStatsRequest statsRequest = new IndicesStatsRequest().indices(rolloverRequest.getRolloverTarget()) .clear() - .indicesOptions(IndicesOptions.fromOptions(true, false, true, true)) + .indicesOptions(statsIndicesOptions) .docs(true) .indexing(true); statsRequest.setParentTask(clusterService.localNode().getId(), task.getId()); @@ -444,7 +454,8 @@ public ClusterState executeTask( currentState, rolloverRequest.getRolloverTarget(), rolloverRequest.getNewIndexName(), - rolloverRequest.getCreateIndexRequest() + rolloverRequest.getCreateIndexRequest(), + rolloverRequest.indicesOptions().failureStoreOptions().includeFailureIndices() ); // Re-evaluate the conditions, now with our final source index name @@ -494,7 +505,8 @@ public ClusterState executeTask( false, false, sourceIndexStats, - rolloverTask.autoShardingResult() + rolloverTask.autoShardingResult(), + rolloverRequest.indicesOptions().failureStoreOptions().includeFailureIndices() ); results.add(rolloverResult); logger.trace("rollover result [{}]", rolloverResult); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java index 776fb9fd87740..f1a508b803d4d 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java @@ -22,6 +22,7 @@ import org.elasticsearch.cluster.metadata.DataStreamLifecycle.Downsampling.Round; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.TriFunction; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -511,12 +512,47 @@ public DataStream unsafeRollover(Index writeIndex, long generation, boolean time } /** - * Performs a dummy rollover on a {@code DataStream} instance and returns the tuple of the next write index name and next generation - * that this {@code DataStream} should roll over to using {@link #rollover(Index, long, boolean, DataStreamAutoShardingEvent)}. + * Performs a rollover on the failure store of a {@code DataStream} instance and returns a new instance containing + * the updated list of failure store indices and incremented generation. * - * @param clusterMetadata Cluster metadata + * @param writeIndex new failure store write index + * @param generation new generation + * @return new {@code DataStream} instance with the rollover operation applied + */ + public DataStream rolloverFailureStore(Index writeIndex, long generation) { + ensureNotReplicated(); + + return unsafeRolloverFailureStore(writeIndex, generation); + } + + /** + * Like {@link #rolloverFailureStore(Index, long)}, but does no validation, use with care only. + */ + public DataStream unsafeRolloverFailureStore(Index writeIndex, long generation) { + List failureIndices = new ArrayList<>(this.failureIndices); + failureIndices.add(writeIndex); + return new DataStream( + name, + indices, + generation, + metadata, + hidden, + false, + system, + allowCustomRouting, + indexMode, + lifecycle, + failureStore, + failureIndices, + autoShardingEvent + ); + } + + /** + * Generates the next write index name and generation to be used for rolling over this data stream. * - * @return new {@code DataStream} instance with the dummy rollover operation applied + * @param clusterMetadata Cluster metadata + * @return tuple of the next write index name and next generation. */ public Tuple nextWriteIndexAndGeneration(Metadata clusterMetadata) { ensureNotReplicated(); @@ -527,11 +563,36 @@ public Tuple nextWriteIndexAndGeneration(Metadata clusterMetadata) * Like {@link #nextWriteIndexAndGeneration(Metadata)}, but does no validation, use with care only. */ public Tuple unsafeNextWriteIndexAndGeneration(Metadata clusterMetadata) { + return generateNextWriteIndexAndGeneration(clusterMetadata, DataStream::getDefaultBackingIndexName); + } + + /** + * Generates the next write index name and generation to be used for rolling over the failure store of this data stream. + * + * @param clusterMetadata Cluster metadata + * @return tuple of the next failure store write index name and next generation. + */ + public Tuple nextFailureStoreWriteIndexAndGeneration(Metadata clusterMetadata) { + ensureNotReplicated(); + return unsafeNextFailureStoreWriteIndexAndGeneration(clusterMetadata); + } + + /** + * Like {@link #nextFailureStoreWriteIndexAndGeneration(Metadata)}, but does no validation, use with care only. + */ + public Tuple unsafeNextFailureStoreWriteIndexAndGeneration(Metadata clusterMetadata) { + return generateNextWriteIndexAndGeneration(clusterMetadata, DataStream::getDefaultFailureStoreName); + } + + private Tuple generateNextWriteIndexAndGeneration( + Metadata clusterMetadata, + TriFunction nameGenerator + ) { String newWriteIndexName; long generation = this.generation; long currentTimeMillis = timeProvider.getAsLong(); do { - newWriteIndexName = DataStream.getDefaultBackingIndexName(getName(), ++generation, currentTimeMillis); + newWriteIndexName = nameGenerator.apply(getName(), ++generation, currentTimeMillis); } while (clusterMetadata.hasIndexAbstraction(newWriteIndexName)); return Tuple.tuple(newWriteIndexName, generation); } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java index fd67a8ac7e230..6c933ba1480df 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.Priority; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Nullable; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.Index; @@ -46,6 +47,7 @@ import java.util.Map; import java.util.Objects; import java.util.concurrent.atomic.AtomicReference; +import java.util.function.BiConsumer; import java.util.stream.Collectors; import static org.elasticsearch.cluster.metadata.DataStreamLifecycle.isDataStreamsLifecycleOnlyMode; @@ -265,6 +267,10 @@ static ClusterState createDataStream( final ComposableIndexTemplate template = isSystem ? systemDataStreamDescriptor.getComposableIndexTemplate() : lookupTemplateForDataStream(dataStreamName, currentState.metadata()); + // The initial backing index and the initial failure store index will have the same initial generation. + // This is not a problem as both have different prefixes (`.ds-` vs `.fs-`) and both will be using the same `generation` field + // when rolling over in the future. + final long initialGeneration = 1; // If we need to create a failure store, do so first. Do not reroute during the creation since we will do // that as part of creating the backing index if required. @@ -273,21 +279,23 @@ static ClusterState createDataStream( if (isSystem) { throw new IllegalArgumentException("Failure stores are not supported on system data streams"); } - String failureStoreIndexName = DataStream.getDefaultFailureStoreName(dataStreamName, 1, request.getStartTime()); + String failureStoreIndexName = DataStream.getDefaultFailureStoreName(dataStreamName, initialGeneration, request.getStartTime()); currentState = createFailureStoreIndex( metadataCreateIndexService, + "initialize_data_stream", settings, currentState, - request, + request.getStartTime(), dataStreamName, template, - failureStoreIndexName + failureStoreIndexName, + null ); failureStoreIndex = currentState.metadata().index(failureStoreIndexName); } if (writeIndex == null) { - String firstBackingIndexName = DataStream.getDefaultBackingIndexName(dataStreamName, 1, request.getStartTime()); + String firstBackingIndexName = DataStream.getDefaultBackingIndexName(dataStreamName, initialGeneration, request.getStartTime()); currentState = createBackingIndex( metadataCreateIndexService, currentState, @@ -322,7 +330,7 @@ static ClusterState createDataStream( DataStream newDataStream = new DataStream( dataStreamName, dsBackingIndices, - 1L, + initialGeneration, template.metadata() != null ? Map.copyOf(template.metadata()) : null, hidden, false, @@ -399,14 +407,16 @@ private static ClusterState createBackingIndex( return currentState; } - private static ClusterState createFailureStoreIndex( + public static ClusterState createFailureStoreIndex( MetadataCreateIndexService metadataCreateIndexService, + String cause, Settings settings, ClusterState currentState, - CreateDataStreamClusterStateUpdateRequest request, + long nameResolvedInstant, String dataStreamName, ComposableIndexTemplate template, - String failureStoreIndexName + String failureStoreIndexName, + @Nullable BiConsumer metadataTransformer ) throws Exception { if (DataStream.isFailureStoreEnabled() == false) { return currentState; @@ -423,11 +433,11 @@ private static ClusterState createFailureStoreIndex( } CreateIndexClusterStateUpdateRequest createIndexRequest = new CreateIndexClusterStateUpdateRequest( - "initialize_data_stream", + cause, failureStoreIndexName, failureStoreIndexName ).dataStreamName(dataStreamName) - .nameResolvedInstant(request.getStartTime()) + .nameResolvedInstant(nameResolvedInstant) .performReroute(false) .setMatchingTemplate(template) .settings(indexSettings); @@ -437,6 +447,7 @@ private static ClusterState createFailureStoreIndex( currentState, createIndexRequest, false, + metadataTransformer, AllocationActionListener.rerouteCompletionIsNotRequired() ); } catch (ResourceAlreadyExistsException e) { diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java index b22c79230ef3c..a0796c0f95639 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java @@ -10,7 +10,9 @@ import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; import org.elasticsearch.action.support.ActiveShardCount; +import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; @@ -51,6 +53,16 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC rolloverIndexRequest.lazy(request.paramAsBoolean("lazy", false)); rolloverIndexRequest.timeout(request.paramAsTime("timeout", rolloverIndexRequest.timeout())); rolloverIndexRequest.masterNodeTimeout(request.paramAsTime("master_timeout", rolloverIndexRequest.masterNodeTimeout())); + if (DataStream.isFailureStoreEnabled()) { + boolean failureStore = request.paramAsBoolean("failure_store", false); + if (failureStore) { + rolloverIndexRequest.setIndicesOptions( + IndicesOptions.builder(rolloverIndexRequest.indicesOptions()) + .failureStoreOptions(new IndicesOptions.FailureStoreOptions(false, true)) + .build() + ); + } + } rolloverIndexRequest.getCreateIndexRequest() .waitForActiveShards(ActiveShardCount.parseString(request.param("wait_for_active_shards"))); return channel -> new RestCancellableNodeClient(client, request.getHttpChannel()).admin() diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverServiceAutoShardingTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverServiceAutoShardingTests.java index fd21e0c27099e..906b2434f7d39 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverServiceAutoShardingTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverServiceAutoShardingTests.java @@ -107,7 +107,8 @@ public void testRolloverDataStreamWithoutExistingAutosharding() throws Exception randomBoolean(), false, null, - new AutoShardingResult(INCREASE_SHARDS, 3, 5, TimeValue.ZERO, 64.33) + new AutoShardingResult(INCREASE_SHARDS, 3, 5, TimeValue.ZERO, 64.33), + false ); assertRolloverResult(dataStream, rolloverResult, before, testThreadPool.absoluteTimeInMillis(), metConditions, 5); } @@ -126,7 +127,8 @@ public void testRolloverDataStreamWithoutExistingAutosharding() throws Exception randomBoolean(), false, null, - new AutoShardingResult(DECREASE_SHARDS, 3, 1, TimeValue.ZERO, 0.33) + new AutoShardingResult(DECREASE_SHARDS, 3, 1, TimeValue.ZERO, 0.33), + false ); assertRolloverResult( dataStream, @@ -152,7 +154,8 @@ public void testRolloverDataStreamWithoutExistingAutosharding() throws Exception randomBoolean(), false, null, - new AutoShardingResult(DECREASE_SHARDS, 3, 1, TimeValue.ZERO, 0.33) + new AutoShardingResult(DECREASE_SHARDS, 3, 1, TimeValue.ZERO, 0.33), + false ); assertRolloverResult( dataStream, @@ -182,7 +185,8 @@ public void testRolloverDataStreamWithoutExistingAutosharding() throws Exception randomBoolean(), false, null, - new AutoShardingResult(COOLDOWN_PREVENTED_INCREASE, 3, 5, TimeValue.timeValueMinutes(10), 64.33) + new AutoShardingResult(COOLDOWN_PREVENTED_INCREASE, 3, 5, TimeValue.timeValueMinutes(10), 64.33), + false ); // the expected number of shards remains 3 for the data stream due to the remaining cooldown assertRolloverResult(dataStream, rolloverResult, before, testThreadPool.absoluteTimeInMillis(), List.of(), 3); @@ -198,7 +202,8 @@ public void testRolloverDataStreamWithoutExistingAutosharding() throws Exception randomBoolean(), false, null, - new AutoShardingResult(COOLDOWN_PREVENTED_DECREASE, 3, 1, TimeValue.timeValueMinutes(10), 64.33) + new AutoShardingResult(COOLDOWN_PREVENTED_DECREASE, 3, 1, TimeValue.timeValueMinutes(10), 64.33), + false ); // the expected number of shards remains 3 for the data stream due to the remaining cooldown assertRolloverResult(dataStream, rolloverResult, before, testThreadPool.absoluteTimeInMillis(), List.of(), 3); @@ -215,7 +220,8 @@ public void testRolloverDataStreamWithoutExistingAutosharding() throws Exception randomBoolean(), false, null, - new AutoShardingResult(NO_CHANGE_REQUIRED, 3, 3, TimeValue.ZERO, 2.33) + new AutoShardingResult(NO_CHANGE_REQUIRED, 3, 3, TimeValue.ZERO, 2.33), + false ); assertRolloverResult(dataStream, rolloverResult, before, testThreadPool.absoluteTimeInMillis(), metConditions, 3); } @@ -231,7 +237,8 @@ public void testRolloverDataStreamWithoutExistingAutosharding() throws Exception randomBoolean(), false, null, - new AutoShardingResult(NOT_APPLICABLE, 1, 1, TimeValue.MAX_VALUE, null) + new AutoShardingResult(NOT_APPLICABLE, 1, 1, TimeValue.MAX_VALUE, null), + false ); assertRolloverResult(dataStream, rolloverResult, before, testThreadPool.absoluteTimeInMillis(), metConditions, 3); } @@ -303,7 +310,8 @@ public void testRolloverDataStreamWithExistingAutoShardEvent() throws Exception randomBoolean(), false, null, - new AutoShardingResult(INCREASE_SHARDS, 3, 5, TimeValue.ZERO, 64.33) + new AutoShardingResult(INCREASE_SHARDS, 3, 5, TimeValue.ZERO, 64.33), + false ); assertRolloverResult(dataStream, rolloverResult, before, testThreadPool.absoluteTimeInMillis(), metConditions, 5); } @@ -322,7 +330,8 @@ public void testRolloverDataStreamWithExistingAutoShardEvent() throws Exception randomBoolean(), false, null, - new AutoShardingResult(DECREASE_SHARDS, 3, 1, TimeValue.ZERO, 0.33) + new AutoShardingResult(DECREASE_SHARDS, 3, 1, TimeValue.ZERO, 0.33), + false ); assertRolloverResult( dataStream, @@ -348,7 +357,8 @@ public void testRolloverDataStreamWithExistingAutoShardEvent() throws Exception randomBoolean(), false, null, - new AutoShardingResult(DECREASE_SHARDS, 3, 1, TimeValue.ZERO, 0.33) + new AutoShardingResult(DECREASE_SHARDS, 3, 1, TimeValue.ZERO, 0.33), + false ); assertRolloverResult( dataStream, @@ -371,7 +381,8 @@ public void testRolloverDataStreamWithExistingAutoShardEvent() throws Exception randomBoolean(), false, null, - new AutoShardingResult(COOLDOWN_PREVENTED_INCREASE, 3, 5, TimeValue.timeValueMinutes(10), 64.33) + new AutoShardingResult(COOLDOWN_PREVENTED_INCREASE, 3, 5, TimeValue.timeValueMinutes(10), 64.33), + false ); // the expected number of shards remains 3 for the data stream due to the remaining cooldown assertRolloverResult(dataStream, rolloverResult, before, testThreadPool.absoluteTimeInMillis(), List.of(), 3); @@ -387,7 +398,8 @@ public void testRolloverDataStreamWithExistingAutoShardEvent() throws Exception randomBoolean(), false, null, - new AutoShardingResult(COOLDOWN_PREVENTED_DECREASE, 3, 1, TimeValue.timeValueMinutes(10), 64.33) + new AutoShardingResult(COOLDOWN_PREVENTED_DECREASE, 3, 1, TimeValue.timeValueMinutes(10), 64.33), + false ); // the expected number of shards remains 3 for the data stream due to the remaining cooldown assertRolloverResult(dataStream, rolloverResult, before, testThreadPool.absoluteTimeInMillis(), List.of(), 3); @@ -404,7 +416,8 @@ public void testRolloverDataStreamWithExistingAutoShardEvent() throws Exception randomBoolean(), false, null, - new AutoShardingResult(NO_CHANGE_REQUIRED, 3, 3, TimeValue.ZERO, 2.33) + new AutoShardingResult(NO_CHANGE_REQUIRED, 3, 3, TimeValue.ZERO, 2.33), + false ); assertRolloverResult(dataStream, rolloverResult, before, testThreadPool.absoluteTimeInMillis(), metConditions, 3); } @@ -420,7 +433,8 @@ public void testRolloverDataStreamWithExistingAutoShardEvent() throws Exception randomBoolean(), false, null, - new AutoShardingResult(NOT_APPLICABLE, 1, 1, TimeValue.MAX_VALUE, null) + new AutoShardingResult(NOT_APPLICABLE, 1, 1, TimeValue.MAX_VALUE, null), + false ); // if the auto sharding is not applicable we just use whatever's in the index template (1 shard in this case) assertRolloverResult(dataStream, rolloverResult, before, testThreadPool.absoluteTimeInMillis(), metConditions, 1); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverServiceTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverServiceTests.java index 23905c9445d18..d386eb40aea43 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverServiceTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverServiceTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexClusterStateUpdateRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.support.ActiveShardCount; +import org.elasticsearch.action.support.IndicesOptions.FailureStoreOptions; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.AliasAction; @@ -28,6 +29,7 @@ import org.elasticsearch.cluster.metadata.MetadataIndexAliasesService; import org.elasticsearch.cluster.metadata.Template; import org.elasticsearch.cluster.routing.allocation.WriteLoadForecaster; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.settings.Settings; @@ -199,26 +201,26 @@ public void testAliasValidation() { IllegalArgumentException exception = expectThrows( IllegalArgumentException.class, - () -> MetadataRolloverService.validate(metadata, aliasWithNoWriteIndex, randomAlphaOfLength(5), req) + () -> MetadataRolloverService.validate(metadata, aliasWithNoWriteIndex, randomAlphaOfLength(5), req, false) ); assertThat(exception.getMessage(), equalTo("rollover target [" + aliasWithNoWriteIndex + "] does not point to a write index")); exception = expectThrows( IllegalArgumentException.class, - () -> MetadataRolloverService.validate(metadata, randomFrom(index1, index2), randomAlphaOfLength(5), req) + () -> MetadataRolloverService.validate(metadata, randomFrom(index1, index2), randomAlphaOfLength(5), req, false) ); assertThat(exception.getMessage(), equalTo("rollover target is a [concrete index] but one of [alias,data_stream] was expected")); final String aliasName = randomAlphaOfLength(5); exception = expectThrows( IllegalArgumentException.class, - () -> MetadataRolloverService.validate(metadata, aliasName, randomAlphaOfLength(5), req) + () -> MetadataRolloverService.validate(metadata, aliasName, randomAlphaOfLength(5), req, false) ); assertThat(exception.getMessage(), equalTo("rollover target [" + aliasName + "] does not exist")); - MetadataRolloverService.validate(metadata, aliasWithWriteIndex, randomAlphaOfLength(5), req); + MetadataRolloverService.validate(metadata, aliasWithWriteIndex, randomAlphaOfLength(5), req, false); } public void testDataStreamValidation() throws IOException { Metadata.Builder md = Metadata.builder(); - DataStream randomDataStream = DataStreamTestHelper.randomInstance(); + DataStream randomDataStream = DataStreamTestHelper.randomInstance(false); for (Index index : randomDataStream.getIndices()) { md.put(DataStreamTestHelper.getIndexMetadataBuilderForIndex(index)); } @@ -226,18 +228,18 @@ public void testDataStreamValidation() throws IOException { Metadata metadata = md.build(); CreateIndexRequest req = new CreateIndexRequest(); - MetadataRolloverService.validate(metadata, randomDataStream.getName(), null, req); + MetadataRolloverService.validate(metadata, randomDataStream.getName(), null, req, false); IllegalArgumentException exception = expectThrows( IllegalArgumentException.class, - () -> MetadataRolloverService.validate(metadata, randomDataStream.getName(), randomAlphaOfLength(5), req) + () -> MetadataRolloverService.validate(metadata, randomDataStream.getName(), randomAlphaOfLength(5), req, false) ); assertThat(exception.getMessage(), equalTo("new index name may not be specified when rolling over a data stream")); CreateIndexRequest aliasReq = new CreateIndexRequest().alias(new Alias("no_aliases_permitted")); exception = expectThrows( IllegalArgumentException.class, - () -> MetadataRolloverService.validate(metadata, randomDataStream.getName(), null, aliasReq) + () -> MetadataRolloverService.validate(metadata, randomDataStream.getName(), null, aliasReq, false) ); assertThat( exception.getMessage(), @@ -248,7 +250,7 @@ public void testDataStreamValidation() throws IOException { CreateIndexRequest mappingReq = new CreateIndexRequest().mapping(mapping); exception = expectThrows( IllegalArgumentException.class, - () -> MetadataRolloverService.validate(metadata, randomDataStream.getName(), null, mappingReq) + () -> MetadataRolloverService.validate(metadata, randomDataStream.getName(), null, mappingReq, false) ); assertThat( exception.getMessage(), @@ -258,12 +260,23 @@ public void testDataStreamValidation() throws IOException { CreateIndexRequest settingReq = new CreateIndexRequest().settings(Settings.builder().put("foo", "bar")); exception = expectThrows( IllegalArgumentException.class, - () -> MetadataRolloverService.validate(metadata, randomDataStream.getName(), null, settingReq) + () -> MetadataRolloverService.validate(metadata, randomDataStream.getName(), null, settingReq, false) ); assertThat( exception.getMessage(), equalTo("aliases, mappings, and index settings may not be specified when rolling over a data stream") ); + + exception = expectThrows( + IllegalArgumentException.class, + () -> MetadataRolloverService.validate(metadata, randomDataStream.getName(), null, req, true) + ); + assertThat( + exception.getMessage(), + equalTo( + "unable to roll over failure store because [" + randomDataStream.getName() + "] does not have the failure store enabled" + ) + ); } public void testGenerateRolloverIndexName() { @@ -548,7 +561,8 @@ public void testRolloverClusterState() throws Exception { randomBoolean(), false, null, - null + null, + false ); long after = testThreadPool.absoluteTimeInMillis(); @@ -606,6 +620,7 @@ public void testRolloverClusterStateForDataStream() throws Exception { List> metConditions = Collections.singletonList(condition); CreateIndexRequest createIndexRequest = new CreateIndexRequest("_na_"); + String sourceIndexName = dataStream.getWriteIndex().getName(); long before = testThreadPool.absoluteTimeInMillis(); MetadataRolloverService.RolloverResult rolloverResult = rolloverService.rolloverClusterState( clusterState, @@ -617,11 +632,11 @@ public void testRolloverClusterStateForDataStream() throws Exception { randomBoolean(), false, null, - null + null, + false ); long after = testThreadPool.absoluteTimeInMillis(); - String sourceIndexName = DataStream.getDefaultBackingIndexName(dataStream.getName(), dataStream.getGeneration()); String newIndexName = DataStream.getDefaultBackingIndexName(dataStream.getName(), dataStream.getGeneration() + 1); assertEquals(sourceIndexName, rolloverResult.sourceIndexName()); assertEquals(newIndexName, rolloverResult.rolloverIndexName()); @@ -646,19 +661,101 @@ public void testRolloverClusterStateForDataStream() throws Exception { } } + public void testRolloverClusterStateForDataStreamFailureStore() throws Exception { + final DataStream dataStream = DataStreamTestHelper.randomInstance(true) + // ensure no replicate data stream + .promoteDataStream(); + ComposableIndexTemplate template = ComposableIndexTemplate.builder() + .indexPatterns(List.of(dataStream.getName() + "*")) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build(); + Metadata.Builder builder = Metadata.builder(); + builder.put("template", template); + dataStream.getIndices().forEach(index -> builder.put(DataStreamTestHelper.getIndexMetadataBuilderForIndex(index))); + dataStream.getFailureIndices().forEach(index -> builder.put(DataStreamTestHelper.getIndexMetadataBuilderForIndex(index))); + builder.put(dataStream); + final ClusterState clusterState = ClusterState.builder(new ClusterName("test")).metadata(builder).build(); + + ThreadPool testThreadPool = new TestThreadPool(getTestName()); + try { + MetadataRolloverService rolloverService = DataStreamTestHelper.getMetadataRolloverService( + dataStream, + testThreadPool, + Set.of(), + xContentRegistry() + ); + + MaxDocsCondition condition = new MaxDocsCondition(randomNonNegativeLong()); + List> metConditions = Collections.singletonList(condition); + CreateIndexRequest createIndexRequest = new CreateIndexRequest("_na_"); + + long before = testThreadPool.absoluteTimeInMillis(); + MetadataRolloverService.RolloverResult rolloverResult = rolloverService.rolloverClusterState( + clusterState, + dataStream.getName(), + null, + createIndexRequest, + metConditions, + Instant.now(), + randomBoolean(), + false, + null, + null, + true + ); + long after = testThreadPool.absoluteTimeInMillis(); + + var epochMillis = System.currentTimeMillis(); + String sourceIndexName = DataStream.getDefaultFailureStoreName(dataStream.getName(), dataStream.getGeneration(), epochMillis); + String newIndexName = DataStream.getDefaultFailureStoreName(dataStream.getName(), dataStream.getGeneration() + 1, epochMillis); + assertEquals(sourceIndexName, rolloverResult.sourceIndexName()); + assertEquals(newIndexName, rolloverResult.rolloverIndexName()); + Metadata rolloverMetadata = rolloverResult.clusterState().metadata(); + assertEquals(dataStream.getIndices().size() + dataStream.getFailureIndices().size() + 1, rolloverMetadata.indices().size()); + IndexMetadata rolloverIndexMetadata = rolloverMetadata.index(newIndexName); + + var ds = (DataStream) rolloverMetadata.getIndicesLookup().get(dataStream.getName()); + assertThat(ds.getType(), equalTo(IndexAbstraction.Type.DATA_STREAM)); + assertThat(ds.getIndices(), hasSize(dataStream.getIndices().size())); + assertThat(ds.getFailureIndices(), hasSize(dataStream.getFailureIndices().size() + 1)); + assertThat(ds.getFailureIndices(), hasItem(rolloverMetadata.index(sourceIndexName).getIndex())); + assertThat(ds.getFailureIndices(), hasItem(rolloverIndexMetadata.getIndex())); + assertThat(ds.getFailureStoreWriteIndex(), equalTo(rolloverIndexMetadata.getIndex())); + + RolloverInfo info = rolloverMetadata.index(sourceIndexName).getRolloverInfos().get(dataStream.getName()); + assertThat(info.getTime(), lessThanOrEqualTo(after)); + assertThat(info.getTime(), greaterThanOrEqualTo(before)); + assertThat(info.getMetConditions(), hasSize(1)); + assertThat(info.getMetConditions().get(0).value(), equalTo(condition.value())); + } finally { + testThreadPool.shutdown(); + } + } + public void testValidation() throws Exception { final String rolloverTarget; final String sourceIndexName; final String defaultRolloverIndexName; final boolean useDataStream = randomBoolean(); final Metadata.Builder builder = Metadata.builder(); + var failureStoreOptions = FailureStoreOptions.DEFAULT; if (useDataStream) { DataStream dataStream = DataStreamTestHelper.randomInstance() // ensure no replicate data stream .promoteDataStream(); rolloverTarget = dataStream.getName(); - sourceIndexName = dataStream.getIndices().get(dataStream.getIndices().size() - 1).getName(); - defaultRolloverIndexName = DataStream.getDefaultBackingIndexName(dataStream.getName(), dataStream.getGeneration() + 1); + if (dataStream.isFailureStore() && randomBoolean()) { + failureStoreOptions = new FailureStoreOptions(false, true); + sourceIndexName = dataStream.getFailureStoreWriteIndex().getName(); + defaultRolloverIndexName = DataStream.getDefaultFailureStoreName( + dataStream.getName(), + dataStream.getGeneration() + 1, + System.currentTimeMillis() + ); + } else { + sourceIndexName = dataStream.getIndices().get(dataStream.getIndices().size() - 1).getName(); + defaultRolloverIndexName = DataStream.getDefaultBackingIndexName(dataStream.getName(), dataStream.getGeneration() + 1); + } ComposableIndexTemplate template = ComposableIndexTemplate.builder() .indexPatterns(List.of(dataStream.getName() + "*")) .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) @@ -684,12 +781,14 @@ public void testValidation() throws Exception { MetadataCreateIndexService createIndexService = mock(MetadataCreateIndexService.class); MetadataIndexAliasesService metadataIndexAliasesService = mock(MetadataIndexAliasesService.class); + ClusterService clusterService = mock(ClusterService.class); MetadataRolloverService rolloverService = new MetadataRolloverService( null, createIndexService, metadataIndexAliasesService, EmptySystemIndices.INSTANCE, - WriteLoadForecaster.DEFAULT + WriteLoadForecaster.DEFAULT, + clusterService ); String newIndexName = useDataStream == false && randomBoolean() ? "logs-index-9" : null; @@ -704,7 +803,8 @@ public void testValidation() throws Exception { randomBoolean(), true, null, - null + null, + failureStoreOptions.includeFailureIndices() ); newIndexName = newIndexName == null ? defaultRolloverIndexName : newIndexName; @@ -746,7 +846,8 @@ public void testRolloverClusterStateForDataStreamNoTemplate() throws Exception { false, randomBoolean(), null, - null + null, + false ) ); assertThat(e.getMessage(), equalTo("no matching index template found for data stream [" + dataStream.getName() + "]")); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java index b34045b50654c..b6c0b5047ab77 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -175,6 +176,12 @@ public void testSerialize() throws Exception { .build() ); originalRequest.lazy(randomBoolean()); + originalRequest.setIndicesOptions( + IndicesOptions.builder(originalRequest.indicesOptions()) + .failureStoreOptions(new IndicesOptions.FailureStoreOptions(randomBoolean(), randomBoolean())) + .build() + ); + try (BytesStreamOutput out = new BytesStreamOutput()) { originalRequest.writeTo(out); BytesReference bytes = out.bytes(); @@ -183,6 +190,10 @@ public void testSerialize() throws Exception { assertThat(cloneRequest.getNewIndexName(), equalTo(originalRequest.getNewIndexName())); assertThat(cloneRequest.getRolloverTarget(), equalTo(originalRequest.getRolloverTarget())); assertThat(cloneRequest.isLazy(), equalTo(originalRequest.isLazy())); + assertThat( + cloneRequest.indicesOptions().failureStoreOptions(), + equalTo(originalRequest.indicesOptions().failureStoreOptions()) + ); for (Map.Entry> entry : cloneRequest.getConditions().getConditions().entrySet()) { Condition condition = originalRequest.getConditions().getConditions().get(entry.getKey()); // here we compare the string representation as there is some information loss when serializing @@ -247,6 +258,36 @@ public void testValidation() { ActionRequestValidationException validationException = rolloverRequest.validate(); assertNull(validationException); } + + { + RolloverRequest rolloverRequest = new RolloverRequest("alias-index", "new-index-name"); + rolloverRequest.setIndicesOptions( + IndicesOptions.builder(rolloverRequest.indicesOptions()) + .failureStoreOptions(new IndicesOptions.FailureStoreOptions(true, true)) + .build() + ); + ActionRequestValidationException validationException = rolloverRequest.validate(); + assertNotNull(validationException); + assertEquals(1, validationException.validationErrors().size()); + assertEquals( + "rollover cannot be applied to both regular and failure indices at the same time", + validationException.validationErrors().get(0) + ); + } + + { + RolloverRequest rolloverRequest = new RolloverRequest("alias-index", "new-index-name"); + rolloverRequest.setIndicesOptions( + IndicesOptions.builder(rolloverRequest.indicesOptions()) + .failureStoreOptions(new IndicesOptions.FailureStoreOptions(false, true)) + .build() + ); + rolloverRequest.lazy(true); + ActionRequestValidationException validationException = rolloverRequest.validate(); + assertNotNull(validationException); + assertEquals(1, validationException.validationErrors().size()); + assertEquals("lazily rolling over a failure store is currently not supported", validationException.validationErrors().get(0)); + } } public void testParsingWithType() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java index db156f983220e..9faa6c4ba2d3f 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java @@ -110,7 +110,8 @@ public class TransportRolloverActionTests extends ESTestCase { mockCreateIndexService, mdIndexAliasesService, EmptySystemIndices.INSTANCE, - WriteLoadForecaster.DEFAULT + WriteLoadForecaster.DEFAULT, + mockClusterService ); final DataStreamAutoShardingService dataStreamAutoShardingService = new DataStreamAutoShardingService( diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java index 56d36d8fb18b0..3187a3e391691 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java @@ -253,6 +253,20 @@ public void testRolloverDowngradeToRegularDataStream() { assertThat(rolledDs.getIndexMode(), nullValue()); } + public void testRolloverFailureStore() { + DataStream ds = DataStreamTestHelper.randomInstance(true).promoteDataStream(); + Tuple newCoordinates = ds.nextFailureStoreWriteIndexAndGeneration(Metadata.EMPTY_METADATA); + final DataStream rolledDs = ds.rolloverFailureStore(new Index(newCoordinates.v1(), UUIDs.randomBase64UUID()), newCoordinates.v2()); + assertThat(rolledDs.getName(), equalTo(ds.getName())); + assertThat(rolledDs.getGeneration(), equalTo(ds.getGeneration() + 1)); + assertThat(rolledDs.getIndices().size(), equalTo(ds.getIndices().size())); + assertThat(rolledDs.getFailureIndices().size(), equalTo(ds.getFailureIndices().size() + 1)); + assertTrue(rolledDs.getIndices().containsAll(ds.getIndices())); + assertTrue(rolledDs.getIndices().contains(rolledDs.getWriteIndex())); + assertTrue(rolledDs.getFailureIndices().containsAll(ds.getFailureIndices())); + assertTrue(rolledDs.getFailureIndices().contains(rolledDs.getFailureStoreWriteIndex())); + } + public void testRemoveBackingIndex() { int numBackingIndices = randomIntBetween(2, 32); int indexToRemove = randomIntBetween(1, numBackingIndices - 1); @@ -508,6 +522,18 @@ public void testDefaultBackingIndexName() { assertThat(defaultBackingIndexName, equalTo(expectedBackingIndexName)); } + public void testDefaultFailureStoreName() { + // this test does little more than flag that changing the default naming convention for failure store indices + // will also require changing a lot of hard-coded values in REST tests and docs + long failureStoreIndexNum = randomLongBetween(1, 1000001); + String dataStreamName = randomAlphaOfLength(6); + long epochMillis = randomLongBetween(1580536800000L, 1583042400000L); + String dateString = DataStream.DATE_FORMATTER.formatMillis(epochMillis); + String defaultFailureStoreName = DataStream.getDefaultFailureStoreName(dataStreamName, failureStoreIndexNum, epochMillis); + String expectedFailureStoreName = Strings.format(".fs-%s-%s-%06d", dataStreamName, dateString, failureStoreIndexNum); + assertThat(defaultFailureStoreName, equalTo(expectedFailureStoreName)); + } + public void testReplaceBackingIndex() { int numBackingIndices = randomIntBetween(2, 32); int indexToReplace = randomIntBetween(1, numBackingIndices - 1) - 1; diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamServiceTests.java index ea79bc8f13765..3df777c1f0eef 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamServiceTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.indices.SystemIndices; import org.elasticsearch.indices.SystemIndices.Feature; import org.elasticsearch.test.ESTestCase; +import org.mockito.stubbing.Answer; import java.util.ArrayList; import java.util.HashMap; @@ -465,29 +466,33 @@ public static ClusterState createDataStream(final String dataStreamName) throws private static MetadataCreateIndexService getMetadataCreateIndexService() throws Exception { MetadataCreateIndexService s = mock(MetadataCreateIndexService.class); when(s.getSystemIndices()).thenReturn(getSystemIndices()); - when(s.applyCreateIndexRequest(any(ClusterState.class), any(CreateIndexClusterStateUpdateRequest.class), anyBoolean(), any())) - .thenAnswer(mockInvocation -> { - ClusterState currentState = (ClusterState) mockInvocation.getArguments()[0]; - CreateIndexClusterStateUpdateRequest request = (CreateIndexClusterStateUpdateRequest) mockInvocation.getArguments()[1]; + Answer objectAnswer = mockInvocation -> { + ClusterState currentState = (ClusterState) mockInvocation.getArguments()[0]; + CreateIndexClusterStateUpdateRequest request = (CreateIndexClusterStateUpdateRequest) mockInvocation.getArguments()[1]; - Metadata.Builder b = Metadata.builder(currentState.metadata()) - .put( - IndexMetadata.builder(request.index()) - .settings( - Settings.builder() - .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) - .put(request.settings()) - .build() - ) - .putMapping(generateMapping("@timestamp")) - .system(getSystemIndices().isSystemName(request.index())) - .numberOfShards(1) - .numberOfReplicas(1) - .build(), - false - ); - return ClusterState.builder(currentState).metadata(b.build()).build(); - }); + Metadata.Builder b = Metadata.builder(currentState.metadata()) + .put( + IndexMetadata.builder(request.index()) + .settings( + Settings.builder() + .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) + .put(request.settings()) + .build() + ) + .putMapping(generateMapping("@timestamp")) + .system(getSystemIndices().isSystemName(request.index())) + .numberOfShards(1) + .numberOfReplicas(1) + .build(), + false + ); + return ClusterState.builder(currentState).metadata(b.build()).build(); + }; + when(s.applyCreateIndexRequest(any(ClusterState.class), any(CreateIndexClusterStateUpdateRequest.class), anyBoolean(), any())) + .thenAnswer(objectAnswer); + when( + s.applyCreateIndexRequest(any(ClusterState.class), any(CreateIndexClusterStateUpdateRequest.class), anyBoolean(), any(), any()) + ).thenAnswer(objectAnswer); return s; } diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java index 4cc019a300e8b..2980b8a48636a 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java @@ -315,16 +315,24 @@ public static DataStream randomInstance() { return randomInstance(System::currentTimeMillis); } + public static DataStream randomInstance(boolean failureStore) { + return randomInstance(System::currentTimeMillis, failureStore); + } + public static DataStream randomInstance(String name) { - return randomInstance(name, System::currentTimeMillis); + return randomInstance(name, System::currentTimeMillis, randomBoolean()); } public static DataStream randomInstance(LongSupplier timeProvider) { + return randomInstance(timeProvider, randomBoolean()); + } + + public static DataStream randomInstance(LongSupplier timeProvider, boolean failureStore) { String dataStreamName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT); - return randomInstance(dataStreamName, timeProvider); + return randomInstance(dataStreamName, timeProvider, failureStore); } - public static DataStream randomInstance(String dataStreamName, LongSupplier timeProvider) { + public static DataStream randomInstance(String dataStreamName, LongSupplier timeProvider, boolean failureStore) { List indices = randomIndexInstances(); long generation = indices.size() + ESTestCase.randomLongBetween(1, 128); indices.add(new Index(getDefaultBackingIndexName(dataStreamName, generation), UUIDs.randomBase64UUID(LuceneTestCase.random()))); @@ -333,9 +341,15 @@ public static DataStream randomInstance(String dataStreamName, LongSupplier time metadata = Map.of("key", "value"); } List failureIndices = List.of(); - boolean failureStore = randomBoolean(); + generation = generation + ESTestCase.randomLongBetween(1, 128); if (failureStore) { failureIndices = randomNonEmptyIndexInstances(); + failureIndices.add( + new Index( + getDefaultFailureStoreName(dataStreamName, generation, System.currentTimeMillis()), + UUIDs.randomBase64UUID(LuceneTestCase.random()) + ) + ); } return new DataStream( @@ -679,7 +693,8 @@ public static MetadataRolloverService getMetadataRolloverService( createIndexService, indexAliasesService, EmptySystemIndices.INSTANCE, - WriteLoadForecaster.DEFAULT + WriteLoadForecaster.DEFAULT, + clusterService ); } From e56dcee07832c75eca32e5d044b5a24f5ab0161f Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Tue, 2 Apr 2024 13:34:46 +0200 Subject: [PATCH 052/264] [Inference API] Add completion task type docs (#106876) --- .../inference/post-inference.asciidoc | 37 ++++++++++++++++++- .../inference/put-inference.asciidoc | 34 ++++++++++++++++- 2 files changed, 67 insertions(+), 4 deletions(-) diff --git a/docs/reference/inference/post-inference.asciidoc b/docs/reference/inference/post-inference.asciidoc index e4cbd26904271..08a58d7789e33 100644 --- a/docs/reference/inference/post-inference.asciidoc +++ b/docs/reference/inference/post-inference.asciidoc @@ -33,8 +33,8 @@ own model, use the <>. ==== {api-description-title} The perform {infer} API enables you to use {ml} models to perform specific tasks -on data that you provide as an input. The API returns a response with the -resutls of the tasks. The {infer} model you use can perform one specific task +on data that you provide as an input. The API returns a response with the +results of the tasks. The {infer} model you use can perform one specific task that has been defined when the model was created with the <>. @@ -60,6 +60,10 @@ The type of {infer} task that the model performs. (Required, array of strings) The text on which you want to perform the {infer} task. `input` can be a single string or an array. +[NOTE] +==== +Inference endpoints for the `completion` task type currently only support a single string as input. +==== [discrete] @@ -108,3 +112,32 @@ The API returns the following response: } ------------------------------------------------------------ // NOTCONSOLE + + +The next example performs a completion on the example question. + + +[source,console] +------------------------------------------------------------ +POST _inference/completion/openai_chat_completions +{ + "input": "What is Elastic?" +} +------------------------------------------------------------ +// TEST[skip:TBD] + + +The API returns the following response: + + +[source,console-result] +------------------------------------------------------------ +{ + "completion": [ + { + "result": "Elastic is a company that provides a range of software solutions for search, logging, security, and analytics. Their flagship product is Elasticsearch, an open-source, distributed search engine that allows users to search, analyze, and visualize large volumes of data in real-time. Elastic also offers products such as Kibana, a data visualization tool, and Logstash, a log management and pipeline tool, as well as various other tools and solutions for data analysis and management." + } + ] +} +------------------------------------------------------------ +// NOTCONSOLE diff --git a/docs/reference/inference/put-inference.asciidoc b/docs/reference/inference/put-inference.asciidoc index c0b9d508e13c3..110ec9d6fa98c 100644 --- a/docs/reference/inference/put-inference.asciidoc +++ b/docs/reference/inference/put-inference.asciidoc @@ -58,7 +58,8 @@ The unique identifier of the {infer} endpoint. (Required, string) The type of the {infer} task that the model will perform. Available task types: * `sparse_embedding`, -* `text_embedding`. +* `text_embedding`, +* `completion` [discrete] @@ -101,7 +102,7 @@ the same name and the updated API key. (Optional, string) Specifies the types of embeddings you want to get back. Defaults to `float`. Valid values are: - * `byte`: use it for signed int8 embeddings (this is a synonym of `int8`). + * `byte`: use it for signed int8 embeddings (this is a synonym of `int8`). * `float`: use it for the default float embeddings. * `int8`: use it for signed int8 embeddings. @@ -232,6 +233,18 @@ maximum token length. Defaults to `END`. Valid values are: the input is discarded. * `END`: when the input exceeds the maximum input token length the end of the input is discarded. + +`user`::: +(optional, string) +For `openai` service only. Specifies the user issuing the request, which can be used for abuse detection. +===== ++ +.`task_settings` for the `completion` task type +[%collapsible%closed] +===== +`user`::: +(optional, string) +For `openai` service only. Specifies the user issuing the request, which can be used for abuse detection. ===== @@ -402,3 +415,20 @@ PUT _inference/text_embedding/openai_embeddings } ------------------------------------------------------------ // TEST[skip:TBD] + +The next example shows how to create an {infer} endpoint called +`openai_completion` to perform a `completion` task type. + +[source,console] +------------------------------------------------------------ +PUT _inference/completion/openai_completion +{ + "service": "openai", + "service_settings": { + "api_key": "", + "model_id": "gpt-3.5-turbo" + } +} +------------------------------------------------------------ +// TEST[skip:TBD] + From 480ee1c7b574d4606ca4e4fd17ae7e2ffe43feb2 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Tue, 2 Apr 2024 14:38:59 +0200 Subject: [PATCH 053/264] Convert FetchFieldsContext to a record (#106991) Simple refactoring to save a few lines of code. --- .../search/fetch/subphase/FetchFieldsContext.java | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchFieldsContext.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchFieldsContext.java index c7fde5f77f067..5f5db5e533648 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchFieldsContext.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchFieldsContext.java @@ -12,14 +12,4 @@ /** * The context needed to retrieve fields. */ -public class FetchFieldsContext { - private final List fields; - - public FetchFieldsContext(List fields) { - this.fields = fields; - } - - public List fields() { - return fields; - } -} +public record FetchFieldsContext(List fields) {} From ee6ff33c5006b5b60759763e2b132b55487ca424 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 2 Apr 2024 08:46:41 -0400 Subject: [PATCH 054/264] ESQL: Remove unused test class (#106996) We don't use it any more. --- .../AbstractRationalUnaryPredicateTests.java | 38 ------------------- 1 file changed, 38 deletions(-) delete mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbstractRationalUnaryPredicateTests.java diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbstractRationalUnaryPredicateTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbstractRationalUnaryPredicateTests.java deleted file mode 100644 index e23956dd36715..0000000000000 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbstractRationalUnaryPredicateTests.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.expression.function.scalar.math; - -import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; -import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.tree.Source; -import org.elasticsearch.xpack.ql.type.DataType; -import org.elasticsearch.xpack.ql.type.DataTypes; -import org.hamcrest.Matcher; - -import java.util.List; - -public abstract class AbstractRationalUnaryPredicateTests extends AbstractScalarFunctionTestCase { - protected abstract RationalUnaryPredicate build(Source source, Expression value); - - protected abstract Matcher resultMatcher(double d); - - @Override - protected DataType expectedType(List argTypes) { - return DataTypes.BOOLEAN; - } - - @Override - protected final List argSpec() { - return List.of(required(rationals())); - } - - @Override - protected Expression build(Source source, List args) { - return build(source, args.get(0)); - } -} From f2e594fae7312a6665625537f750b2006f336f34 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Tue, 2 Apr 2024 15:18:54 +0200 Subject: [PATCH 055/264] Remove unused XContent parsing logic (#106950) Another round of mostly automatically cleaning up unused xcontent parser methods and parser instances that become unused as a result. --- .../mapping/get/GetFieldMappingsResponse.java | 7 --- .../AbstractAllocateAllocationCommand.java | 3 -- ...AllocateEmptyPrimaryAllocationCommand.java | 3 +- .../AllocateReplicaAllocationCommand.java | 3 +- ...AllocateStalePrimaryAllocationCommand.java | 3 +- .../java/org/elasticsearch/tasks/TaskId.java | 12 ----- .../protocol/xpack/XPackInfoResponse.java | 14 ------ .../protocol/xpack/graph/Connection.java | 50 ------------------- .../xpack/core/ilm/CheckShrinkReadyStep.java | 11 ---- .../InferTrainedModelDeploymentAction.java | 23 --------- .../core/ml/action/StartDatafeedAction.java | 4 -- .../core/ml/action/StopDatafeedAction.java | 4 -- .../evaluation/classification/Accuracy.java | 18 ------- .../evaluation/classification/Precision.java | 18 ------- .../evaluation/classification/Recall.java | 18 ------- .../metadata/FeatureImportanceBaseline.java | 4 -- .../metadata/TotalFeatureImportance.java | 4 -- .../xpack/core/watcher/trigger/Trigger.java | 10 ---- .../connector/ConnectorScheduling.java | 4 -- .../UpdateConnectorSyncJobErrorAction.java | 3 -- ...eConnectorSyncJobIngestionStatsAction.java | 4 -- .../rules/action/GetQueryRulesetAction.java | 8 --- .../action/GetSearchApplicationAction.java | 14 ------ .../xpack/watcher/watch/WatchParser.java | 12 ----- 24 files changed, 3 insertions(+), 251 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java index 0b37486a6e403..d1da8f4abc9d6 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java @@ -18,7 +18,6 @@ import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.rest.BaseRestHandler; -import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.ToXContentObject; @@ -124,12 +123,6 @@ public record FieldMappingMetadata(String fullName, BytesReference source) imple private static final ParseField FULL_NAME = new ParseField("full_name"); private static final ParseField MAPPING = new ParseField("mapping"); - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "field_mapping_meta_data", - true, - a -> new FieldMappingMetadata((String) a[0], (BytesReference) a[1]) - ); - /** * Returns the mappings as a map. Note that the returned map has a single key which is always the field's {@link Mapper#name}. */ diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AbstractAllocateAllocationCommand.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AbstractAllocateAllocationCommand.java index 844d9fa1addfc..fd9a89285243c 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AbstractAllocateAllocationCommand.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AbstractAllocateAllocationCommand.java @@ -24,7 +24,6 @@ import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.Objects; @@ -66,8 +65,6 @@ public void setNode(String node) { this.node = node; } - public abstract Builder parse(XContentParser parser) throws IOException; - public abstract T build(); protected void validate() { diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateEmptyPrimaryAllocationCommand.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateEmptyPrimaryAllocationCommand.java index 6d6a1f5f95528..e2fdec43d8e12 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateEmptyPrimaryAllocationCommand.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateEmptyPrimaryAllocationCommand.java @@ -74,8 +74,7 @@ public static AllocateEmptyPrimaryAllocationCommand fromXContent(XContentParser public static class Builder extends BasePrimaryAllocationCommand.Builder { - @Override - public Builder parse(XContentParser parser) throws IOException { + private Builder parse(XContentParser parser) throws IOException { return EMPTY_PRIMARY_PARSER.parse(parser, this, null); } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateReplicaAllocationCommand.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateReplicaAllocationCommand.java index be8d2447b0fd7..0590b01d111e4 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateReplicaAllocationCommand.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateReplicaAllocationCommand.java @@ -64,8 +64,7 @@ public static AllocateReplicaAllocationCommand fromXContent(XContentParser parse protected static class Builder extends AbstractAllocateAllocationCommand.Builder { - @Override - public Builder parse(XContentParser parser) throws IOException { + private Builder parse(XContentParser parser) throws IOException { return REPLICA_PARSER.parse(parser, this, null); } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateStalePrimaryAllocationCommand.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateStalePrimaryAllocationCommand.java index ebfe10d1cf16a..19b045051d5cf 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateStalePrimaryAllocationCommand.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateStalePrimaryAllocationCommand.java @@ -71,8 +71,7 @@ public static AllocateStalePrimaryAllocationCommand fromXContent(XContentParser public static class Builder extends BasePrimaryAllocationCommand.Builder { - @Override - public Builder parse(XContentParser parser) throws IOException { + private Builder parse(XContentParser parser) throws IOException { return STALE_PRIMARY_PARSER.parse(parser, this, null); } diff --git a/server/src/main/java/org/elasticsearch/tasks/TaskId.java b/server/src/main/java/org/elasticsearch/tasks/TaskId.java index f9420574058f0..8fa139ac58455 100644 --- a/server/src/main/java/org/elasticsearch/tasks/TaskId.java +++ b/server/src/main/java/org/elasticsearch/tasks/TaskId.java @@ -8,13 +8,10 @@ package org.elasticsearch.tasks; -import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.xcontent.ContextParser; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; @@ -88,15 +85,6 @@ public void writeTo(StreamOutput out) throws IOException { out.writeLong(id); } - public static ContextParser parser() { - return (p, c) -> { - if (p.currentToken() == XContentParser.Token.VALUE_STRING) { - return new TaskId(p.text()); - } - throw new ElasticsearchParseException("Expected a string but found [{}] instead", p.currentToken()); - }; - } - public String getNodeId() { return nodeId; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java index 0ea0cba8198c5..34126064997d6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java @@ -17,8 +17,6 @@ import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.license.License; import org.elasticsearch.protocol.xpack.license.LicenseStatus; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -32,8 +30,6 @@ import java.util.Set; import java.util.concurrent.TimeUnit; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - public class XPackInfoResponse extends ActionResponse implements ToXContentObject { /** * Value of the license's expiration time if it should never expire. @@ -276,16 +272,6 @@ public int hashCode() { return Objects.hash(hash, timestamp); } - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "build_info", - true, - (a, v) -> new BuildInfo((String) a[0], (String) a[1]) - ); - static { - PARSER.declareString(constructorArg(), new ParseField("hash")); - PARSER.declareString(constructorArg(), new ParseField("date")); - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { return builder.startObject().field("hash", hash).field("date", timestamp).endObject(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/Connection.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/Connection.java index 9303f65c35f73..170708dccbfb1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/Connection.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/Connection.java @@ -9,19 +9,14 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.protocol.xpack.graph.Vertex.VertexId; -import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent.Params; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; -import java.util.List; import java.util.Map; import java.util.Objects; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - /** * A Connection links exactly two {@link Vertex} objects. The basis of a * connection is one or more documents have been found that contain @@ -112,51 +107,6 @@ void toXContent(XContentBuilder builder, Params params, Map ver builder.field(DOC_COUNT.getPreferredName(), docCount); } - // When deserializing from XContent we need to wait for all vertices to be loaded before - // Connection objects can be created that reference them. This class provides the interim - // state for connections. - static class UnresolvedConnection { - int fromIndex; - int toIndex; - double weight; - long docCount; - - UnresolvedConnection(int fromIndex, int toIndex, double weight, long docCount) { - super(); - this.fromIndex = fromIndex; - this.toIndex = toIndex; - this.weight = weight; - this.docCount = docCount; - } - - public Connection resolve(List vertices) { - return new Connection(vertices.get(fromIndex), vertices.get(toIndex), weight, docCount); - } - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "ConnectionParser", - true, - args -> { - int source = (Integer) args[0]; - int target = (Integer) args[1]; - double weight = (Double) args[2]; - long docCount = (Long) args[3]; - return new UnresolvedConnection(source, target, weight, docCount); - } - ); - - static { - PARSER.declareInt(constructorArg(), SOURCE); - PARSER.declareInt(constructorArg(), TARGET); - PARSER.declareDouble(constructorArg(), WEIGHT); - PARSER.declareLong(constructorArg(), DOC_COUNT); - } - - static UnresolvedConnection fromXContent(XContentParser parser) throws IOException { - return PARSER.apply(parser, null); - } - } - /** * An identifier (implements hashcode and equals) that represents a * unique key for a {@link Connection} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/CheckShrinkReadyStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/CheckShrinkReadyStep.java index b43b298921027..b731dc0b6c2c8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/CheckShrinkReadyStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/CheckShrinkReadyStep.java @@ -17,7 +17,6 @@ import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.common.Strings; import org.elasticsearch.index.Index; -import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -151,16 +150,6 @@ public static final class Info implements ToXContentObject { static final ParseField EXPECTED_SHARDS = new ParseField("expected_shards"); static final ParseField SHARDS_TO_ALLOCATE = new ParseField("shards_left_to_allocate"); static final ParseField MESSAGE = new ParseField("message"); - static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "check_shrink_ready_step_info", - a -> new CheckShrinkReadyStep.Info((String) a[0], (long) a[1], (long) a[2]) - ); - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), NODE_ID); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), EXPECTED_SHARDS); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), SHARDS_TO_ALLOCATE); - PARSER.declareString((i, s) -> {}, MESSAGE); - } public Info(String nodeId, long expectedShards, long numberShardsLeftToAllocate) { this.nodeId = nodeId; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/InferTrainedModelDeploymentAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/InferTrainedModelDeploymentAction.java index 4209374740acc..2384a9f03e1e3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/InferTrainedModelDeploymentAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/InferTrainedModelDeploymentAction.java @@ -21,11 +21,9 @@ import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; -import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.EmptyConfigUpdate; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfigUpdate; @@ -70,30 +68,9 @@ public static class Request extends BaseTasksRequest { public static final ParseField DOCS = new ParseField("docs"); public static final ParseField TIMEOUT = new ParseField("timeout"); - public static final ParseField INFERENCE_CONFIG = new ParseField("inference_config"); public static final TimeValue DEFAULT_TIMEOUT = TimeValue.timeValueSeconds(10); - static final ObjectParser PARSER = new ObjectParser<>(NAME, Request.Builder::new); - static { - PARSER.declareString(Request.Builder::setId, InferModelAction.Request.DEPLOYMENT_ID); - PARSER.declareObjectArray(Request.Builder::setDocs, (p, c) -> p.mapOrdered(), DOCS); - PARSER.declareString(Request.Builder::setInferenceTimeout, TIMEOUT); - PARSER.declareNamedObject( - Request.Builder::setUpdate, - ((p, c, name) -> p.namedObject(InferenceConfigUpdate.class, name, c)), - INFERENCE_CONFIG - ); - } - - public static Request.Builder parseRequest(String id, XContentParser parser) { - Request.Builder builder = PARSER.apply(parser, null); - if (id != null) { - builder.setId(id); - } - return builder; - } - private String id; private final List> docs; private final InferenceConfigUpdate update; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java index 0ac1203f1144b..18763a78fa456 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java @@ -55,10 +55,6 @@ private StartDatafeedAction() { public static class Request extends MasterNodeRequest implements ToXContentObject { - public static Request fromXContent(XContentParser parser) { - return parseRequest(null, parser); - } - public static Request parseRequest(String datafeedId, XContentParser parser) { DatafeedParams params = DatafeedParams.PARSER.apply(parser, null); if (datafeedId != null) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDatafeedAction.java index 453e7a4528a87..2fd00a5ea3983 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDatafeedAction.java @@ -62,10 +62,6 @@ public static class Request extends BaseTasksRequest implements ToXCont PARSER.declareBoolean(Request::setAllowNoMatch, ALLOW_NO_MATCH_V7); } - public static Request fromXContent(XContentParser parser) { - return parseRequest(null, parser); - } - public static Request parseRequest(String datafeedId, XContentParser parser) { Request request = PARSER.apply(parser, null); if (datafeedId != null) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/Accuracy.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/Accuracy.java index 0a1778a6a6f30..8caefb47382ed 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/Accuracy.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/Accuracy.java @@ -17,7 +17,6 @@ import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation; -import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; @@ -36,7 +35,6 @@ import java.util.Optional; import java.util.Set; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.xpack.core.ml.dataframe.evaluation.MlEvaluationNamedXContentProvider.registeredMetricName; /** @@ -212,22 +210,6 @@ public static class Result implements EvaluationMetricResult { private static final ParseField CLASSES = new ParseField("classes"); private static final ParseField OVERALL_ACCURACY = new ParseField("overall_accuracy"); - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "accuracy_result", - true, - a -> new Result((List) a[0], (double) a[1]) - ); - - static { - PARSER.declareObjectArray(constructorArg(), PerClassSingleValue.PARSER, CLASSES); - PARSER.declareDouble(constructorArg(), OVERALL_ACCURACY); - } - - public static Result fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - /** List of per-class results. */ private final List classes; /** Fraction of documents for which predicted class equals the actual class. */ diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/Precision.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/Precision.java index 6936164ceb07e..25643a6fb8b10 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/Precision.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/Precision.java @@ -24,7 +24,6 @@ import org.elasticsearch.search.aggregations.bucket.filter.FiltersAggregator.KeyedFilter; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation; -import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; @@ -44,7 +43,6 @@ import java.util.Set; import java.util.stream.Collectors; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.xpack.core.ml.dataframe.evaluation.MlEvaluationNamedXContentProvider.registeredMetricName; /** @@ -205,22 +203,6 @@ public static class Result implements EvaluationMetricResult { private static final ParseField CLASSES = new ParseField("classes"); private static final ParseField AVG_PRECISION = new ParseField("avg_precision"); - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "precision_result", - true, - a -> new Result((List) a[0], (double) a[1]) - ); - - static { - PARSER.declareObjectArray(constructorArg(), PerClassSingleValue.PARSER, CLASSES); - PARSER.declareDouble(constructorArg(), AVG_PRECISION); - } - - public static Result fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - /** List of per-class results. */ private final List classes; /** Average of per-class precisions. */ diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/Recall.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/Recall.java index 6aaabc13c86c9..87c671df2add4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/Recall.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/Recall.java @@ -21,7 +21,6 @@ import org.elasticsearch.search.aggregations.PipelineAggregatorBuilders; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation; -import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; @@ -40,7 +39,6 @@ import java.util.Optional; import java.util.Set; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.xpack.core.ml.dataframe.evaluation.MlEvaluationNamedXContentProvider.registeredMetricName; /** @@ -175,22 +173,6 @@ public static class Result implements EvaluationMetricResult { private static final ParseField CLASSES = new ParseField("classes"); private static final ParseField AVG_RECALL = new ParseField("avg_recall"); - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "recall_result", - true, - a -> new Result((List) a[0], (double) a[1]) - ); - - static { - PARSER.declareObjectArray(constructorArg(), PerClassSingleValue.PARSER, CLASSES); - PARSER.declareDouble(constructorArg(), AVG_RECALL); - } - - public static Result fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - /** List of per-class results. */ private final List classes; /** Average of per-class recalls. */ diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/metadata/FeatureImportanceBaseline.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/metadata/FeatureImportanceBaseline.java index e1e6e2299fcc6..53547bc0fc580 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/metadata/FeatureImportanceBaseline.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/metadata/FeatureImportanceBaseline.java @@ -132,10 +132,6 @@ private static ConstructingObjectParser createParser(boolea return parser; } - public static ClassBaseline fromXContent(XContentParser parser, boolean lenient) throws IOException { - return lenient ? LENIENT_PARSER.parse(parser, null) : STRICT_PARSER.parse(parser, null); - } - public final Object className; public final double baseline; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/metadata/TotalFeatureImportance.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/metadata/TotalFeatureImportance.java index 75139fb5fe6a9..82de72177bc1c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/metadata/TotalFeatureImportance.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/metadata/TotalFeatureImportance.java @@ -226,10 +226,6 @@ private static ConstructingObjectParser createParser(bool return parser; } - public static ClassImportance fromXContent(XContentParser parser, boolean lenient) throws IOException { - return lenient ? LENIENT_PARSER.parse(parser, null) : STRICT_PARSER.parse(parser, null); - } - public final Object className; public final Importance importance; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/trigger/Trigger.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/trigger/Trigger.java index 34f034162f0b4..51db01c1cb2ab 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/trigger/Trigger.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/trigger/Trigger.java @@ -7,21 +7,11 @@ package org.elasticsearch.xpack.core.watcher.trigger; import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; public interface Trigger extends ToXContentObject { String type(); - interface Parser { - - String type(); - - T parse(XContentParser parser) throws IOException; - } - interface Builder { T build(); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorScheduling.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorScheduling.java index 637957b8ce66e..98b6bdf1f3250 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorScheduling.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorScheduling.java @@ -184,10 +184,6 @@ public static ScheduleConfig fromXContent(XContentParser parser) throws IOExcept return PARSER.parse(parser, null); } - public static ConstructingObjectParser getParser() { - return PARSER; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobErrorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobErrorAction.java index aef62c458bda1..3ce5d61e95fdb 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobErrorAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobErrorAction.java @@ -135,9 +135,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - public static UpdateConnectorSyncJobErrorAction.Request parse(XContentParser parser) { - return PARSER.apply(parser, null); - } } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobIngestionStatsAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobIngestionStatsAction.java index 5c3b3b25d7064..d76f2c3b788fc 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobIngestionStatsAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobIngestionStatsAction.java @@ -84,10 +84,6 @@ public Request( this.lastSeen = lastSeen; } - public static UpdateConnectorSyncJobIngestionStatsAction.Request parse(XContentParser parser) { - return PARSER.apply(parser, null); - } - public String getConnectorSyncJobId() { return connectorSyncJobId; } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/GetQueryRulesetAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/GetQueryRulesetAction.java index 814d9a045eb29..249cf66e39458 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/GetQueryRulesetAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/GetQueryRulesetAction.java @@ -154,14 +154,6 @@ public int hashCode() { return Objects.hash(queryRuleset); } - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_query_ruleset_response", - p -> new Response((QueryRuleset) p[0]) - ); - static { - PARSER.declareObject(constructorArg(), (p, c) -> QueryRuleset.fromXContent(c, p), QUERY_RULESET_FIELD); - } - public static Response fromXContent(String resourceName, XContentParser parser) throws IOException { return new Response(QueryRuleset.fromXContent(resourceName, parser)); } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/GetSearchApplicationAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/GetSearchApplicationAction.java index 83c04106edd4c..aa4a7f4375ed5 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/GetSearchApplicationAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/GetSearchApplicationAction.java @@ -128,20 +128,6 @@ public Response( this.searchApp = new SearchApplication(name, indices, analyticsCollectionName, updatedAtMillis, template); } - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_search_application_response", - p -> new Response((SearchApplication) p[0]) - ); - public static final ParseField SEARCH_APPLICATION_FIELD = new ParseField("searchApp"); - - static { - PARSER.declareObject(constructorArg(), (p, c) -> SearchApplication.fromXContent(c, p), SEARCH_APPLICATION_FIELD); - } - - public static Response parse(XContentParser parser) { - return PARSER.apply(parser, null); - } - public static Response fromXContent(String resourceName, XContentParser parser) throws IOException { return new Response(SearchApplication.fromXContent(resourceName, parser)); } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/watch/WatchParser.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/watch/WatchParser.java index 225b4c5d57d65..eecee5e3f8db8 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/watch/WatchParser.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/watch/WatchParser.java @@ -91,18 +91,6 @@ public Watch parse( return parse(name, includeStatus, false, source, now, xContentType, false, sourceSeqNo, sourcePrimaryTerm); } - public Watch parse( - String name, - boolean includeStatus, - BytesReference source, - ZonedDateTime now, - XContentType xContentType, - long sourceSeqNo, - long sourcePrimaryTerm - ) throws IOException { - return parse(name, includeStatus, false, source, now, xContentType, false, sourceSeqNo, sourcePrimaryTerm); - } - /** * Parses the watch represented by the given source. When parsing, any sensitive data that the * source might contain (e.g. passwords) will be converted to {@link Secret secrets} From c9ab20350d393cb9cdf6bc1bb83ea9f3fbeb9350 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Tue, 2 Apr 2024 06:22:54 -0700 Subject: [PATCH 056/264] Remove AwaitsFix --- .../java/org/elasticsearch/kibana/KibanaThreadPoolTests.java | 1 - 1 file changed, 1 deletion(-) diff --git a/modules/kibana/src/test/java/org/elasticsearch/kibana/KibanaThreadPoolTests.java b/modules/kibana/src/test/java/org/elasticsearch/kibana/KibanaThreadPoolTests.java index 5fe5bbf95ba56..0974fd6d36b18 100644 --- a/modules/kibana/src/test/java/org/elasticsearch/kibana/KibanaThreadPoolTests.java +++ b/modules/kibana/src/test/java/org/elasticsearch/kibana/KibanaThreadPoolTests.java @@ -29,7 +29,6 @@ protected Collection> nodePlugins() { return Set.of(KibanaPlugin.class); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106957") public void testKibanaThreadPool() { runWithBlockedThreadPools(() -> { // index documents From 35b72129e3687e86d303bc28f66f87fedae746c2 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 2 Apr 2024 13:37:40 +0000 Subject: [PATCH 057/264] Bump versions after 8.13.1 release --- .buildkite/pipelines/intake.yml | 2 +- .buildkite/pipelines/periodic-packaging.yml | 6 +++--- .buildkite/pipelines/periodic.yml | 10 +++++----- .ci/bwcVersions | 2 +- .ci/snapshotBwcVersions | 2 +- server/src/main/java/org/elasticsearch/Version.java | 1 + .../resources/org/elasticsearch/TransportVersions.csv | 1 + .../org/elasticsearch/index/IndexVersions.csv | 1 + 8 files changed, 14 insertions(+), 11 deletions(-) diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index cb8062fef02b4..abde05ec7919e 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -48,7 +48,7 @@ steps: timeout_in_minutes: 300 matrix: setup: - BWC_VERSION: ["7.17.20", "8.13.1", "8.14.0"] + BWC_VERSION: ["7.17.20", "8.13.2", "8.14.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index e8920db5ba1da..53243c2c081eb 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -529,8 +529,8 @@ steps: env: BWC_VERSION: 8.12.2 - - label: "{{matrix.image}} / 8.13.1 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.13.1 + - label: "{{matrix.image}} / 8.13.2 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.13.2 timeout_in_minutes: 300 matrix: setup: @@ -543,7 +543,7 @@ steps: machineType: custom-16-32768 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 8.13.1 + BWC_VERSION: 8.13.2 - label: "{{matrix.image}} / 8.14.0 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.14.0 diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 435e709bbf072..c5b9bb830a8d6 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -312,8 +312,8 @@ steps: buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.12.2 - - label: 8.13.1 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.13.1#bwcTest + - label: 8.13.2 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.13.2#bwcTest timeout_in_minutes: 300 agents: provider: gcp @@ -321,7 +321,7 @@ steps: machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 8.13.1 + BWC_VERSION: 8.13.2 - label: 8.14.0 / bwc command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.14.0#bwcTest timeout_in_minutes: 300 @@ -396,7 +396,7 @@ steps: setup: ES_RUNTIME_JAVA: - openjdk17 - BWC_VERSION: ["7.17.20", "8.13.1", "8.14.0"] + BWC_VERSION: ["7.17.20", "8.13.2", "8.14.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 @@ -438,7 +438,7 @@ steps: - graalvm-ce17 - openjdk17 - openjdk21 - BWC_VERSION: ["7.17.20", "8.13.1", "8.14.0"] + BWC_VERSION: ["7.17.20", "8.13.2", "8.14.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.ci/bwcVersions b/.ci/bwcVersions index 5199df27912ab..2d8ace4845f4f 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -30,5 +30,5 @@ BWC_VERSION: - "8.10.4" - "8.11.4" - "8.12.2" - - "8.13.1" + - "8.13.2" - "8.14.0" diff --git a/.ci/snapshotBwcVersions b/.ci/snapshotBwcVersions index f31603772a7f7..89449ff7f9f2f 100644 --- a/.ci/snapshotBwcVersions +++ b/.ci/snapshotBwcVersions @@ -1,4 +1,4 @@ BWC_VERSION: - "7.17.20" - - "8.13.1" + - "8.13.2" - "8.14.0" diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 3a4958e046a82..992308bd32018 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -169,6 +169,7 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_8_12_2 = new Version(8_12_02_99); public static final Version V_8_13_0 = new Version(8_13_00_99); public static final Version V_8_13_1 = new Version(8_13_01_99); + public static final Version V_8_13_2 = new Version(8_13_02_99); public static final Version V_8_14_0 = new Version(8_14_00_99); public static final Version CURRENT = V_8_14_0; diff --git a/server/src/main/resources/org/elasticsearch/TransportVersions.csv b/server/src/main/resources/org/elasticsearch/TransportVersions.csv index 0a1480526c9f0..679270e90e894 100644 --- a/server/src/main/resources/org/elasticsearch/TransportVersions.csv +++ b/server/src/main/resources/org/elasticsearch/TransportVersions.csv @@ -114,3 +114,4 @@ 8.12.1,8560001 8.12.2,8560001 8.13.0,8595000 +8.13.1,8595000 diff --git a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv index f66cda3c08fc7..b60066601bf68 100644 --- a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv +++ b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv @@ -114,3 +114,4 @@ 8.12.1,8500010 8.12.2,8500010 8.13.0,8503000 +8.13.1,8503000 From 3c10d47e294801f09220de8c49928a3108c15fb1 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 2 Apr 2024 13:38:50 +0000 Subject: [PATCH 058/264] Prune changelogs after 8.13.1 release --- docs/changelog/106564.yaml | 5 ----- docs/changelog/106602.yaml | 5 ----- docs/changelog/106655.yaml | 5 ----- docs/changelog/106678.yaml | 6 ------ docs/changelog/106793.yaml | 7 ------- docs/changelog/106799.yaml | 5 ----- docs/changelog/106878.yaml | 5 ----- 7 files changed, 38 deletions(-) delete mode 100644 docs/changelog/106564.yaml delete mode 100644 docs/changelog/106602.yaml delete mode 100644 docs/changelog/106655.yaml delete mode 100644 docs/changelog/106678.yaml delete mode 100644 docs/changelog/106793.yaml delete mode 100644 docs/changelog/106799.yaml delete mode 100644 docs/changelog/106878.yaml diff --git a/docs/changelog/106564.yaml b/docs/changelog/106564.yaml deleted file mode 100644 index a4e986c4b7d18..0000000000000 --- a/docs/changelog/106564.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106564 -summary: Fix the background set of significant terms aggregations in case the data is in different shards than the foreground set -area: Search -type: bug -issues: [] diff --git a/docs/changelog/106602.yaml b/docs/changelog/106602.yaml deleted file mode 100644 index 972d7b5d163d3..0000000000000 --- a/docs/changelog/106602.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106602 -summary: Raise loglevel of events related to transform lifecycle from DEBUG to INFO -area: Transform -type: enhancement -issues: [] diff --git a/docs/changelog/106655.yaml b/docs/changelog/106655.yaml deleted file mode 100644 index 98078595d5f0c..0000000000000 --- a/docs/changelog/106655.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106655 -summary: Fix Array out of bounds exception in the XLM Roberta tokenizer -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/106678.yaml b/docs/changelog/106678.yaml deleted file mode 100644 index 20bf12d6d4346..0000000000000 --- a/docs/changelog/106678.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 106678 -summary: Fix concurrency bug in `AbstractStringScriptFieldAutomatonQuery` -area: Search -type: bug -issues: - - 105911 diff --git a/docs/changelog/106793.yaml b/docs/changelog/106793.yaml deleted file mode 100644 index cf44f5a74d621..0000000000000 --- a/docs/changelog/106793.yaml +++ /dev/null @@ -1,7 +0,0 @@ -pr: 106793 -summary: Fail checkpoint on missing clusters -area: Transform -type: bug -issues: - - 104533 - - 106790 diff --git a/docs/changelog/106799.yaml b/docs/changelog/106799.yaml deleted file mode 100644 index c75cd5c15e44b..0000000000000 --- a/docs/changelog/106799.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106799 -summary: Add test to exercise reduction of terms aggregation order by key -area: Aggregations -type: bug -issues: [] diff --git a/docs/changelog/106878.yaml b/docs/changelog/106878.yaml deleted file mode 100644 index 585475bb5ea55..0000000000000 --- a/docs/changelog/106878.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106878 -summary: Gate reading of optional string array for bwc -area: Downsampling -type: bug -issues: [] From d8a90464f728dff49d63e2f446f2c78de395c0c5 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 2 Apr 2024 09:45:35 -0400 Subject: [PATCH 059/264] ESQL: Fix VALUES test for `text` (#106955) This fixes an error in the test for `text` fields running the `VALUES` agg. We have to sort the output or we'll get it in a random order. Closes #106939 Closes #106894 --- .../rest-api-spec/test/esql/80_text.yml | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml index 17470af049a45..dcc055707b5c2 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml @@ -541,10 +541,6 @@ setup: --- values: - - skip: - version: all - reason: 'AwaitsFix https://github.com/elastic/elasticsearch/issues/106939' - - requires: cluster_features: esql.agg_values reason: "values is available in 8.14+" @@ -552,7 +548,7 @@ values: - do: esql.query: body: - query: 'FROM test | STATS job = VALUES(job) | LIMIT 1' + query: 'FROM test | STATS job = VALUES(job) | EVAL job = MV_SORT(job) | LIMIT 1' - match: { columns.0.name: "job" } - match: { columns.0.type: "text" } - length: { values: 1 } @@ -569,11 +565,11 @@ values: - do: esql.query: body: - query: 'FROM test | STATS job = VALUES(job) BY tag | SORT tag | LIMIT 10' - - match: { columns.0.name: "job" } + query: 'FROM test | STATS job = VALUES(job) BY tag | EVAL job = MV_SORT(job) | SORT tag | LIMIT 10' + - match: { columns.0.name: "tag" } - match: { columns.0.type: "text" } - - match: { columns.1.name: "tag" } + - match: { columns.1.name: "job" } - match: { columns.1.type: "text" } - length: { values: 2 } - - match: { values.0: [ [ "Payroll Specialist", "Other" ], "baz" ] } - - match: { values.1: [ "IT Director", "foo bar" ] } + - match: { values.0: [ "baz", [ "Other", "Payroll Specialist" ] ] } + - match: { values.1: [ "foo bar", "IT Director" ] } From 959ef23c34762a273a35156e800120c7966527ab Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 2 Apr 2024 14:51:56 +0100 Subject: [PATCH 060/264] Accumulate results directly in TransportGetSnapshotsAction (#106983) There's no need to return a `SnapshotsInRepo` object all the way along the chain of listeners, we can just collect the results directly and signal completion with a `Void` response. --- .../get/TransportGetSnapshotsAction.java | 72 ++++++++----------- 1 file changed, 28 insertions(+), 44 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java index a66b318b16258..cb4942cc0efb8 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java @@ -10,7 +10,6 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.DelegatingActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.RefCountingListener; import org.elasticsearch.action.support.SubscribableListener; @@ -248,26 +247,16 @@ void getMultipleReposSnapshotInfo(ActionListener listener) } }) - .andThen((l, repositoryData) -> loadSnapshotInfos(repoName, repositoryData, l)) + .andThen((l, repositoryData) -> loadSnapshotInfos(repoName, repositoryData, l)) - .addListener(new DelegatingActionListener<>(listeners.acquire()) { - @Override - public void onResponse(SnapshotsInRepo snapshotsInRepo) { - allSnapshotInfos.add(snapshotsInRepo.snapshotInfos()); - totalCount.addAndGet(snapshotsInRepo.totalCount()); - delegate.onResponse(null); - } - - @Override - public void onFailure(Exception e) { - if (isMultiRepoRequest && e instanceof ElasticsearchException elasticsearchException) { - failuresByRepository.put(repoName, elasticsearchException); - delegate.onResponse(null); - } else { - delegate.onFailure(e); - } + .addListener(listeners.acquire().delegateResponse((l, e) -> { + if (isMultiRepoRequest && e instanceof ElasticsearchException elasticsearchException) { + failuresByRepository.put(repoName, elasticsearchException); + l.onResponse(null); + } else { + l.onFailure(e); } - }); + })); } } }) @@ -284,7 +273,7 @@ private boolean skipRepository(String repositoryName) { } } - private void loadSnapshotInfos(String repo, @Nullable RepositoryData repositoryData, ActionListener listener) { + private void loadSnapshotInfos(String repo, @Nullable RepositoryData repositoryData, ActionListener listener) { assert ThreadPool.assertCurrentThreadPool(ThreadPool.Names.MANAGEMENT); if (cancellableTask.notifyIfCancelled(listener)) { @@ -322,24 +311,17 @@ private void loadSnapshotInfos(String repo, @Nullable RepositoryData repositoryD assert fromSortValuePredicates.isMatchAll() : "filtering is not supported in non-verbose mode"; assert slmPolicyPredicate == SlmPolicyPredicate.MATCH_ALL_POLICIES : "filtering is not supported in non-verbose mode"; - listener.onResponse( - buildSimpleSnapshotInfos( - toResolve, - repo, - repositoryData, - snapshotsInProgress.forRepo(repo).stream().map(entry -> SnapshotInfo.inProgress(entry).basic()).toList() - ) + addSimpleSnapshotInfos( + toResolve, + repo, + repositoryData, + snapshotsInProgress.forRepo(repo).stream().map(entry -> SnapshotInfo.inProgress(entry).basic()).toList() ); + listener.onResponse(null); } } - /** - * Returns a list of snapshots from repository sorted by snapshot creation date - * - * @param repositoryName repository name - * @param snapshotIds snapshots for which to fetch snapshot information - */ - private void snapshots(String repositoryName, Collection snapshotIds, ActionListener listener) { + private void snapshots(String repositoryName, Collection snapshotIds, ActionListener listener) { if (cancellableTask.notifyIfCancelled(listener)) { return; } @@ -416,12 +398,14 @@ public void onFailure(Exception e) { } }) - .addListener(listener.safeMap(v -> - // no need to synchronize access to snapshots: Repository#getSnapshotInfo fails fast but we're on the success path here - applyAfterPredicate(snapshots)), executor, threadPool.getThreadContext()); + .addListener(listener.safeMap(v -> { + // no need to synchronize access to snapshots: Repository#getSnapshotInfo fails fast but we're on the success path here + applyAfterPredicateAndAdd(snapshots); + return null; + }), executor, threadPool.getThreadContext()); } - private SnapshotsInRepo buildSimpleSnapshotInfos( + private void addSimpleSnapshotInfos( final Set toResolve, final String repoName, final RepositoryData repositoryData, @@ -429,7 +413,8 @@ private SnapshotsInRepo buildSimpleSnapshotInfos( ) { if (repositoryData == null) { // only want current snapshots - return applyAfterPredicate(currentSnapshots); + applyAfterPredicateAndAdd(currentSnapshots); + return; } // else want non-current snapshots as well, which are found in the repository data List snapshotInfos = new ArrayList<>(); @@ -460,11 +445,12 @@ private SnapshotsInRepo buildSimpleSnapshotInfos( ) ); } - return applyAfterPredicate(snapshotInfos); + applyAfterPredicateAndAdd(snapshotInfos); } - private SnapshotsInRepo applyAfterPredicate(List snapshotInfos) { - return new SnapshotsInRepo(snapshotInfos.stream().filter(afterPredicate).toList(), snapshotInfos.size()); + private void applyAfterPredicateAndAdd(List snapshotInfos) { + allSnapshotInfos.add(snapshotInfos.stream().filter(afterPredicate).toList()); + totalCount.addAndGet(snapshotInfos.size()); } private GetSnapshotsResponse buildResponse() { @@ -672,8 +658,6 @@ private static int indexCount(SnapshotId snapshotId, RepositoryData repositoryDa } } - private record SnapshotsInRepo(List snapshotInfos, int totalCount) {} - /** * Throttling executor for retrieving {@link SnapshotInfo} instances from the repository without spamming the SNAPSHOT_META threadpool * and starving other users of access to it. Similar to {@link Repository#getSnapshotInfo} but allows for finer-grained control over From 225bb2caad4b9143b4a0aaf6c2c261245aaa12bf Mon Sep 17 00:00:00 2001 From: Nikolaj Volgushev Date: Tue, 2 Apr 2024 15:58:13 +0200 Subject: [PATCH 061/264] Inject reserved role name checker into role mgmt actions (#106759) This PR makes reserved role name validation injectable for the create and delete roles actions. --- .../action/role/DeleteRoleRequest.java | 30 ++-- .../action/role/DeleteRoleRequestBuilder.java | 11 +- .../action/role/DeleteRoleResponse.java | 11 +- .../security/action/role/PutRoleRequest.java | 74 +++----- .../action/role/PutRoleRequestBuilder.java | 10 +- .../role/PutRoleRequestBuilderFactory.java | 6 +- .../action/role/DeleteRoleRequestTests.java | 29 +++ .../action/role/PutRoleRequestTests.java | 169 +----------------- .../xpack/security/Security.java | 9 + .../role/TransportDeleteRoleAction.java | 20 ++- .../action/role/TransportPutRoleAction.java | 18 +- .../authz/ReservedRoleNameChecker.java | 34 ++++ .../action/role/RestDeleteRoleAction.java | 1 - .../rest/action/role/RestPutRoleAction.java | 2 +- .../role/TransportDeleteRoleActionTests.java | 22 ++- .../role/TransportPutRoleActionTests.java | 16 +- 16 files changed, 191 insertions(+), 271 deletions(-) create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/role/DeleteRoleRequestTests.java create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/ReservedRoleNameChecker.java diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/DeleteRoleRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/DeleteRoleRequest.java index 9d7eff847529f..121ea62ee99e7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/DeleteRoleRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/DeleteRoleRequest.java @@ -8,9 +8,10 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; import java.io.IOException; @@ -19,27 +20,26 @@ /** * A request delete a role from the security index */ -public class DeleteRoleRequest extends ActionRequest implements WriteRequest { +public class DeleteRoleRequest extends ActionRequest { private String name; - private RefreshPolicy refreshPolicy = RefreshPolicy.IMMEDIATE; - - public DeleteRoleRequest(StreamInput in) throws IOException { - super(in); - name = in.readString(); - refreshPolicy = RefreshPolicy.readFrom(in); - } + private WriteRequest.RefreshPolicy refreshPolicy = WriteRequest.RefreshPolicy.IMMEDIATE; public DeleteRoleRequest() {} - @Override - public DeleteRoleRequest setRefreshPolicy(RefreshPolicy refreshPolicy) { + public DeleteRoleRequest setRefreshPolicy(@Nullable String refreshPolicy) { + if (refreshPolicy != null) { + setRefreshPolicy(WriteRequest.RefreshPolicy.parse(refreshPolicy)); + } + return this; + } + + public DeleteRoleRequest setRefreshPolicy(WriteRequest.RefreshPolicy refreshPolicy) { this.refreshPolicy = refreshPolicy; return this; } - @Override - public RefreshPolicy getRefreshPolicy() { + public WriteRequest.RefreshPolicy getRefreshPolicy() { return refreshPolicy; } @@ -62,8 +62,6 @@ public String name() { @Override public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeString(name); - refreshPolicy.writeTo(out); + TransportAction.localOnly(); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/DeleteRoleRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/DeleteRoleRequestBuilder.java index 93dfed1ee2906..7ab099ef48b10 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/DeleteRoleRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/DeleteRoleRequestBuilder.java @@ -7,15 +7,13 @@ package org.elasticsearch.xpack.core.security.action.role; import org.elasticsearch.action.ActionRequestBuilder; -import org.elasticsearch.action.support.WriteRequestBuilder; import org.elasticsearch.client.internal.ElasticsearchClient; +import org.elasticsearch.core.Nullable; /** * A builder for requests to delete a role from the security index */ -public class DeleteRoleRequestBuilder extends ActionRequestBuilder - implements - WriteRequestBuilder { +public class DeleteRoleRequestBuilder extends ActionRequestBuilder { public DeleteRoleRequestBuilder(ElasticsearchClient client) { super(client, DeleteRoleAction.INSTANCE, new DeleteRoleRequest()); @@ -25,4 +23,9 @@ public DeleteRoleRequestBuilder name(String name) { request.name(name); return this; } + + public DeleteRoleRequestBuilder setRefreshPolicy(@Nullable String refreshPolicy) { + request.setRefreshPolicy(refreshPolicy); + return this; + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/DeleteRoleResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/DeleteRoleResponse.java index f1495e2fc7a59..32463f51e9e0a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/DeleteRoleResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/DeleteRoleResponse.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.core.security.action.role; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -19,12 +19,7 @@ */ public class DeleteRoleResponse extends ActionResponse implements ToXContentObject { - private boolean found = false; - - public DeleteRoleResponse(StreamInput in) throws IOException { - super(in); - found = in.readBoolean(); - } + private final boolean found; public DeleteRoleResponse(boolean found) { this.found = found; @@ -42,7 +37,7 @@ public boolean found() { @Override public void writeTo(StreamOutput out) throws IOException { - out.writeBoolean(found); + TransportAction.localOnly(); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequest.java index bb7fe59dce5e6..fea925f667bcf 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequest.java @@ -6,13 +6,12 @@ */ package org.elasticsearch.xpack.core.security.action.role; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; @@ -33,7 +32,7 @@ /** * Request object for adding a role to the security index */ -public class PutRoleRequest extends ActionRequest implements WriteRequest { +public class PutRoleRequest extends ActionRequest { private String name; private String[] clusterPrivileges = Strings.EMPTY_ARRAY; @@ -41,28 +40,10 @@ public class PutRoleRequest extends ActionRequest implements WriteRequest indicesPrivileges = new ArrayList<>(); private List applicationPrivileges = new ArrayList<>(); private String[] runAs = Strings.EMPTY_ARRAY; - private RefreshPolicy refreshPolicy = RefreshPolicy.IMMEDIATE; + private WriteRequest.RefreshPolicy refreshPolicy = WriteRequest.RefreshPolicy.IMMEDIATE; private Map metadata; private List remoteIndicesPrivileges = new ArrayList<>(); - - public PutRoleRequest(StreamInput in) throws IOException { - super(in); - name = in.readString(); - clusterPrivileges = in.readStringArray(); - int indicesSize = in.readVInt(); - indicesPrivileges = new ArrayList<>(indicesSize); - for (int i = 0; i < indicesSize; i++) { - indicesPrivileges.add(new RoleDescriptor.IndicesPrivileges(in)); - } - applicationPrivileges = in.readCollectionAsList(RoleDescriptor.ApplicationResourcePrivileges::new); - configurableClusterPrivileges = ConfigurableClusterPrivileges.readArray(in); - runAs = in.readStringArray(); - refreshPolicy = RefreshPolicy.readFrom(in); - metadata = in.readGenericMap(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0)) { - remoteIndicesPrivileges = in.readCollectionAsList(RoleDescriptor.RemoteIndicesPrivileges::new); - } - } + private boolean restrictRequest = false; public PutRoleRequest() {} @@ -96,6 +77,14 @@ public void addRemoteIndex(RoleDescriptor.RemoteIndicesPrivileges... privileges) remoteIndicesPrivileges.addAll(Arrays.asList(privileges)); } + public void restrictRequest(boolean restrictRequest) { + this.restrictRequest = restrictRequest; + } + + public boolean restrictRequest() { + return restrictRequest; + } + public void addRemoteIndex( final String[] remoteClusters, final String[] indices, @@ -145,17 +134,18 @@ public void runAs(String... usernames) { this.runAs = usernames; } - @Override - public PutRoleRequest setRefreshPolicy(RefreshPolicy refreshPolicy) { + public PutRoleRequest setRefreshPolicy(@Nullable String refreshPolicy) { + if (refreshPolicy != null) { + setRefreshPolicy(WriteRequest.RefreshPolicy.parse(refreshPolicy)); + } + return this; + } + + public PutRoleRequest setRefreshPolicy(WriteRequest.RefreshPolicy refreshPolicy) { this.refreshPolicy = refreshPolicy; return this; } - /** - * Should this request trigger a refresh ({@linkplain RefreshPolicy#IMMEDIATE}, the default), wait for a refresh ( - * {@linkplain RefreshPolicy#WAIT_UNTIL}), or proceed ignore refreshes entirely ({@linkplain RefreshPolicy#NONE}). - */ - @Override public WriteRequest.RefreshPolicy getRefreshPolicy() { return refreshPolicy; } @@ -202,29 +192,7 @@ public Map metadata() { @Override public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeString(name); - out.writeStringArray(clusterPrivileges); - out.writeVInt(indicesPrivileges.size()); - for (RoleDescriptor.IndicesPrivileges index : indicesPrivileges) { - index.writeTo(out); - } - out.writeCollection(applicationPrivileges); - ConfigurableClusterPrivileges.writeArray(out, this.configurableClusterPrivileges); - out.writeStringArray(runAs); - refreshPolicy.writeTo(out); - out.writeGenericMap(metadata); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0)) { - out.writeCollection(remoteIndicesPrivileges); - } else if (hasRemoteIndicesPrivileges()) { - throw new IllegalArgumentException( - "versions of Elasticsearch before [" - + TransportVersions.V_8_8_0 - + "] can't handle remote indices privileges and attempted to send to [" - + out.getTransportVersion() - + "]" - ); - } + TransportAction.localOnly(); } public RoleDescriptor roleDescriptor() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestBuilder.java index d07187b967929..f389a39df7979 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestBuilder.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.security.action.role; import org.elasticsearch.action.ActionRequestBuilder; -import org.elasticsearch.action.support.WriteRequestBuilder; import org.elasticsearch.client.internal.ElasticsearchClient; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.core.Nullable; @@ -20,9 +19,7 @@ /** * Builder for requests to add a role to the administrative index */ -public class PutRoleRequestBuilder extends ActionRequestBuilder - implements - WriteRequestBuilder { +public class PutRoleRequestBuilder extends ActionRequestBuilder { public PutRoleRequestBuilder(ElasticsearchClient client) { super(client, PutRoleAction.INSTANCE, new PutRoleRequest()); @@ -78,4 +75,9 @@ public PutRoleRequestBuilder metadata(Map metadata) { request.metadata(metadata); return this; } + + public PutRoleRequestBuilder setRefreshPolicy(@Nullable String refreshPolicy) { + request.setRefreshPolicy(refreshPolicy); + return this; + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestBuilderFactory.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestBuilderFactory.java index f965ec754a404..169bd5d4cc1f7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestBuilderFactory.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestBuilderFactory.java @@ -9,14 +9,12 @@ import org.elasticsearch.client.internal.Client; -import java.util.function.Predicate; - public interface PutRoleRequestBuilderFactory { - PutRoleRequestBuilder create(Client client, boolean restrictRequest, Predicate fileRolesStoreNameChecker); + PutRoleRequestBuilder create(Client client, boolean restrictRequest); class Default implements PutRoleRequestBuilderFactory { @Override - public PutRoleRequestBuilder create(Client client, boolean restrictRequest, Predicate fileRolesStoreNameChecker) { + public PutRoleRequestBuilder create(Client client, boolean restrictRequest) { // by default, we don't apply extra restrictions to Put Role requests and don't require checks against file-based roles // these dependencies are only used by our stateless implementation return new PutRoleRequestBuilder(client); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/role/DeleteRoleRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/role/DeleteRoleRequestTests.java new file mode 100644 index 0000000000000..0552c58de2ebf --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/role/DeleteRoleRequestTests.java @@ -0,0 +1,29 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.security.action.role; + +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.equalTo; + +public class DeleteRoleRequestTests extends ESTestCase { + + public void testSetRefreshPolicy() { + final DeleteRoleRequest request = new DeleteRoleRequest(); + final String refreshPolicy = randomFrom( + WriteRequest.RefreshPolicy.IMMEDIATE.getValue(), + WriteRequest.RefreshPolicy.WAIT_UNTIL.getValue() + ); + request.setRefreshPolicy(refreshPolicy); + assertThat(request.getRefreshPolicy().getValue(), equalTo(refreshPolicy)); + + request.setRefreshPolicy((String) null); + assertThat(request.getRefreshPolicy().getValue(), equalTo(refreshPolicy)); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestTests.java index eebf4ab46b2dd..8accbc1ff617e 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestTests.java @@ -6,35 +6,15 @@ */ package org.elasticsearch.xpack.core.security.action.role; -import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.ByteBufferStreamInput; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.TransportVersionUtils; -import org.elasticsearch.xpack.core.XPackClientPlugin; -import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.ApplicationResourcePrivileges; -import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivilege; -import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivileges; import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore; import org.elasticsearch.xpack.core.security.support.NativeRealmValidationUtil; import org.junit.BeforeClass; -import java.io.IOException; -import java.util.Arrays; -import java.util.HashMap; import java.util.Locale; -import java.util.Map; -import java.util.function.Supplier; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -167,60 +147,17 @@ public void testValidationOfApplicationPrivileges() { ); } - public void testSerialization() throws IOException { - final BytesStreamOutput out = new BytesStreamOutput(); - if (randomBoolean()) { - final TransportVersion version = TransportVersionUtils.randomCompatibleVersion(random()); - logger.info("Serializing with version {}", version); - out.setTransportVersion(version); - } - final boolean mayIncludeRemoteIndices = out.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0); - final PutRoleRequest original = buildRandomRequest(mayIncludeRemoteIndices); - original.writeTo(out); - - final NamedWriteableRegistry registry = new NamedWriteableRegistry(new XPackClientPlugin().getNamedWriteables()); - StreamInput in = new NamedWriteableAwareStreamInput(ByteBufferStreamInput.wrap(BytesReference.toBytes(out.bytes())), registry); - in.setTransportVersion(out.getTransportVersion()); - final PutRoleRequest copy = new PutRoleRequest(in); - - final RoleDescriptor actual = copy.roleDescriptor(); - final RoleDescriptor expected = original.roleDescriptor(); - assertThat(actual, equalTo(expected)); - } - - public void testSerializationWithRemoteIndicesThrowsOnUnsupportedVersions() throws IOException { - final BytesStreamOutput out = new BytesStreamOutput(); - final TransportVersion versionBeforeAdvancedRemoteClusterSecurity = TransportVersionUtils.getPreviousVersion( - TransportVersions.V_8_8_0 - ); - final TransportVersion version = TransportVersionUtils.randomVersionBetween( - random(), - TransportVersions.V_7_17_0, - versionBeforeAdvancedRemoteClusterSecurity + public void testSetRefreshPolicy() { + final PutRoleRequest request = new PutRoleRequest(); + final String refreshPolicy = randomFrom( + WriteRequest.RefreshPolicy.IMMEDIATE.getValue(), + WriteRequest.RefreshPolicy.WAIT_UNTIL.getValue() ); - out.setTransportVersion(version); + request.setRefreshPolicy(refreshPolicy); + assertThat(request.getRefreshPolicy().getValue(), equalTo(refreshPolicy)); - final PutRoleRequest original = buildRandomRequest(randomBoolean()); - if (original.hasRemoteIndicesPrivileges()) { - final var ex = expectThrows(IllegalArgumentException.class, () -> original.writeTo(out)); - assertThat( - ex.getMessage(), - containsString( - "versions of Elasticsearch before [" - + TransportVersions.V_8_8_0 - + "] can't handle remote indices privileges and attempted to send to [" - + version - + "]" - ) - ); - } else { - original.writeTo(out); - final NamedWriteableRegistry registry = new NamedWriteableRegistry(new XPackClientPlugin().getNamedWriteables()); - StreamInput in = new NamedWriteableAwareStreamInput(ByteBufferStreamInput.wrap(BytesReference.toBytes(out.bytes())), registry); - in.setTransportVersion(out.getTransportVersion()); - final PutRoleRequest copy = new PutRoleRequest(in); - assertThat(copy.roleDescriptor(), equalTo(original.roleDescriptor())); - } + request.setRefreshPolicy((String) null); + assertThat(request.getRefreshPolicy().getValue(), equalTo(refreshPolicy)); } private void assertSuccessfulValidation(PutRoleRequest request) { @@ -245,92 +182,4 @@ private PutRoleRequest buildRequestWithApplicationPrivilege(String appName, Stri request.addApplicationPrivileges(privilege); return request; } - - private PutRoleRequest buildRandomRequest(boolean allowRemoteIndices) { - final PutRoleRequest request = new PutRoleRequest(); - request.name(randomAlphaOfLengthBetween(4, 9)); - - request.cluster( - randomSubsetOf(Arrays.asList("monitor", "manage", "all", "manage_security", "manage_ml", "monitor_watcher")).toArray( - Strings.EMPTY_ARRAY - ) - ); - - for (int i = randomIntBetween(0, 4); i > 0; i--) { - request.addIndex( - generateRandomStringArray(randomIntBetween(1, 3), randomIntBetween(3, 8), false, false), - randomSubsetOf(randomIntBetween(1, 2), "read", "write", "index", "all").toArray(Strings.EMPTY_ARRAY), - generateRandomStringArray(randomIntBetween(1, 3), randomIntBetween(3, 8), true), - generateRandomStringArray(randomIntBetween(1, 3), randomIntBetween(3, 8), true), - null, - randomBoolean() - ); - } - - if (allowRemoteIndices) { - for (int i = randomIntBetween(0, 4); i > 0; i--) { - request.addRemoteIndex( - generateRandomStringArray(randomIntBetween(1, 3), randomIntBetween(3, 8), false, false), - generateRandomStringArray(randomIntBetween(1, 3), randomIntBetween(3, 8), false, false), - randomSubsetOf(randomIntBetween(1, 2), "read", "write", "index", "all").toArray(Strings.EMPTY_ARRAY), - generateRandomStringArray(randomIntBetween(1, 3), randomIntBetween(3, 8), true), - generateRandomStringArray(randomIntBetween(1, 3), randomIntBetween(3, 8), true), - null, - randomBoolean() - ); - } - } - - final Supplier stringWithInitialLowercase = () -> randomAlphaOfLength(1).toLowerCase(Locale.ROOT) - + randomAlphaOfLengthBetween(3, 12); - final ApplicationResourcePrivileges[] applicationPrivileges = new ApplicationResourcePrivileges[randomIntBetween(0, 5)]; - for (int i = 0; i < applicationPrivileges.length; i++) { - applicationPrivileges[i] = ApplicationResourcePrivileges.builder() - .application(stringWithInitialLowercase.get()) - .privileges(randomArray(1, 3, String[]::new, stringWithInitialLowercase)) - .resources(generateRandomStringArray(5, randomIntBetween(3, 8), false, false)) - .build(); - } - request.addApplicationPrivileges(applicationPrivileges); - switch (randomIntBetween(0, 3)) { - case 0: - request.conditionalCluster(new ConfigurableClusterPrivilege[0]); - break; - case 1: - request.conditionalCluster( - new ConfigurableClusterPrivileges.ManageApplicationPrivileges( - Sets.newHashSet(randomArray(0, 3, String[]::new, stringWithInitialLowercase)) - ) - ); - break; - case 2: - request.conditionalCluster( - new ConfigurableClusterPrivileges.WriteProfileDataPrivileges( - Sets.newHashSet(randomArray(0, 3, String[]::new, stringWithInitialLowercase)) - ) - ); - break; - case 3: - request.conditionalCluster( - new ConfigurableClusterPrivileges.WriteProfileDataPrivileges( - Sets.newHashSet(randomArray(0, 3, String[]::new, stringWithInitialLowercase)) - ), - new ConfigurableClusterPrivileges.ManageApplicationPrivileges( - Sets.newHashSet(randomArray(0, 3, String[]::new, stringWithInitialLowercase)) - ) - ); - break; - } - - request.runAs(generateRandomStringArray(4, 3, false, true)); - - final Map metadata = new HashMap<>(); - for (String key : generateRandomStringArray(3, 5, false, true)) { - metadata.put(key, randomFrom(Boolean.TRUE, Boolean.FALSE, 1, 2, randomAlphaOfLengthBetween(2, 9))); - } - request.metadata(metadata); - - request.setRefreshPolicy(randomFrom(WriteRequest.RefreshPolicy.values())); - return request; - } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 762d5dbf1a501..4fe4b35683343 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -71,6 +71,7 @@ import org.elasticsearch.license.LicenseService; import org.elasticsearch.license.LicensedFeature; import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.node.PluginComponentBinding; import org.elasticsearch.plugins.ClusterCoordinationPlugin; import org.elasticsearch.plugins.ClusterPlugin; import org.elasticsearch.plugins.ExtensiblePlugin; @@ -294,6 +295,7 @@ import org.elasticsearch.xpack.security.authz.AuthorizationDenialMessages; import org.elasticsearch.xpack.security.authz.AuthorizationService; import org.elasticsearch.xpack.security.authz.DlsFlsRequestCacheDifferentiator; +import org.elasticsearch.xpack.security.authz.ReservedRoleNameChecker; import org.elasticsearch.xpack.security.authz.SecuritySearchOperationListener; import org.elasticsearch.xpack.security.authz.accesscontrol.OptOutQueryCache; import org.elasticsearch.xpack.security.authz.interceptor.BulkShardRequestInterceptor; @@ -584,6 +586,7 @@ public class Security extends Plugin private final SetOnce client = new SetOnce<>(); private final SetOnce> reloadableComponents = new SetOnce<>(); private final SetOnce authorizationDenialMessages = new SetOnce<>(); + private final SetOnce reservedRoleNameCheckerFactory = new SetOnce<>(); public Security(Settings settings) { this(settings, Collections.emptyList()); @@ -851,6 +854,11 @@ Collection createComponents( if (hasPrivilegesRequestBuilderFactory.get() == null) { hasPrivilegesRequestBuilderFactory.trySet(new HasPrivilegesRequestBuilderFactory.Default()); } + if (reservedRoleNameCheckerFactory.get() == null) { + reservedRoleNameCheckerFactory.set(new ReservedRoleNameChecker.Factory.Default()); + } + final ReservedRoleNameChecker reservedRoleNameChecker = reservedRoleNameCheckerFactory.get().create(fileRolesStore.get()::exists); + components.add(new PluginComponentBinding<>(ReservedRoleNameChecker.class, reservedRoleNameChecker)); final Map, ActionListener>>> customRoleProviders = new LinkedHashMap<>(); for (SecurityExtension extension : securityExtensions) { @@ -2106,6 +2114,7 @@ public void loadExtensions(ExtensionLoader loader) { loadSingletonExtensionAndSetOnce(loader, createApiKeyRequestBuilderFactory, CreateApiKeyRequestBuilderFactory.class); loadSingletonExtensionAndSetOnce(loader, hasPrivilegesRequestBuilderFactory, HasPrivilegesRequestBuilderFactory.class); loadSingletonExtensionAndSetOnce(loader, authorizationDenialMessages, AuthorizationDenialMessages.class); + loadSingletonExtensionAndSetOnce(loader, reservedRoleNameCheckerFactory, ReservedRoleNameChecker.Factory.class); } private void loadSingletonExtensionAndSetOnce(ExtensionLoader loader, SetOnce setOnce, Class clazz) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleAction.java index e76e60574f7aa..5548fc7497fd3 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleAction.java @@ -9,30 +9,36 @@ import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.role.DeleteRoleAction; import org.elasticsearch.xpack.core.security.action.role.DeleteRoleRequest; import org.elasticsearch.xpack.core.security.action.role.DeleteRoleResponse; -import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore; +import org.elasticsearch.xpack.security.authz.ReservedRoleNameChecker; import org.elasticsearch.xpack.security.authz.store.NativeRolesStore; -public class TransportDeleteRoleAction extends HandledTransportAction { +public class TransportDeleteRoleAction extends TransportAction { private final NativeRolesStore rolesStore; + private final ReservedRoleNameChecker reservedRoleNameChecker; @Inject - public TransportDeleteRoleAction(ActionFilters actionFilters, NativeRolesStore rolesStore, TransportService transportService) { - super(DeleteRoleAction.NAME, transportService, actionFilters, DeleteRoleRequest::new, EsExecutors.DIRECT_EXECUTOR_SERVICE); + public TransportDeleteRoleAction( + ActionFilters actionFilters, + NativeRolesStore rolesStore, + TransportService transportService, + ReservedRoleNameChecker reservedRoleNameChecker + ) { + super(DeleteRoleAction.NAME, actionFilters, transportService.getTaskManager()); this.rolesStore = rolesStore; + this.reservedRoleNameChecker = reservedRoleNameChecker; } @Override protected void doExecute(Task task, DeleteRoleRequest request, ActionListener listener) { - if (ReservedRolesStore.isReserved(request.name())) { + if (reservedRoleNameChecker.isReserved(request.name())) { listener.onFailure(new IllegalArgumentException("role [" + request.name() + "] is reserved and cannot be deleted")); return; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleAction.java index b85ef3f3c819f..87b9bb72884be 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleAction.java @@ -10,9 +10,8 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.NamedXContentRegistry; @@ -20,23 +19,29 @@ import org.elasticsearch.xpack.core.security.action.role.PutRoleRequest; import org.elasticsearch.xpack.core.security.action.role.PutRoleResponse; import org.elasticsearch.xpack.core.security.authz.support.DLSRoleQueryValidator; +import org.elasticsearch.xpack.security.authz.ReservedRoleNameChecker; import org.elasticsearch.xpack.security.authz.store.NativeRolesStore; -public class TransportPutRoleAction extends HandledTransportAction { +import static org.elasticsearch.action.ValidateActions.addValidationError; + +public class TransportPutRoleAction extends TransportAction { private final NativeRolesStore rolesStore; private final NamedXContentRegistry xContentRegistry; + private final ReservedRoleNameChecker reservedRoleNameChecker; @Inject public TransportPutRoleAction( ActionFilters actionFilters, NativeRolesStore rolesStore, TransportService transportService, - NamedXContentRegistry xContentRegistry + NamedXContentRegistry xContentRegistry, + ReservedRoleNameChecker reservedRoleNameChecker ) { - super(PutRoleAction.NAME, transportService, actionFilters, PutRoleRequest::new, EsExecutors.DIRECT_EXECUTOR_SERVICE); + super(PutRoleAction.NAME, actionFilters, transportService.getTaskManager()); this.rolesStore = rolesStore; this.xContentRegistry = xContentRegistry; + this.reservedRoleNameChecker = reservedRoleNameChecker; } @Override @@ -62,6 +67,9 @@ private Exception validateRequest(final PutRoleRequest request) { if (validationException != null) { return validationException; } + if (reservedRoleNameChecker.isReserved(request.name())) { + throw addValidationError("Role [" + request.name() + "] is reserved and may not be used.", null); + } try { DLSRoleQueryValidator.validateQueryField(request.roleDescriptor().getIndicesPrivileges(), xContentRegistry); } catch (ElasticsearchException | IllegalArgumentException e) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/ReservedRoleNameChecker.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/ReservedRoleNameChecker.java new file mode 100644 index 0000000000000..a34096bb7d630 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/ReservedRoleNameChecker.java @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.authz; + +import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore; + +import java.util.function.Predicate; + +public interface ReservedRoleNameChecker { + interface Factory { + ReservedRoleNameChecker create(Predicate fileRoleStoreNameChecker); + + class Default implements Factory { + @Override + public ReservedRoleNameChecker create(Predicate fileRoleStoreNameChecker) { + return new ReservedRoleNameChecker.Default(); + } + } + } + + boolean isReserved(String roleName); + + class Default implements ReservedRoleNameChecker { + @Override + public boolean isReserved(String roleName) { + return ReservedRolesStore.isReserved(roleName); + } + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestDeleteRoleAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestDeleteRoleAction.java index cf5e4d12e7b37..0c3f0364e60d4 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestDeleteRoleAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestDeleteRoleAction.java @@ -51,7 +51,6 @@ public String getName() { public RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClient client) throws IOException { final String name = request.param("name"); final String refresh = request.param("refresh"); - return channel -> new DeleteRoleRequestBuilder(client).name(name) .setRefreshPolicy(refresh) .execute(new RestBuilderListener<>(channel) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestPutRoleAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestPutRoleAction.java index 4be786177af65..75b2435fa7505 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestPutRoleAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestPutRoleAction.java @@ -64,7 +64,7 @@ public String getName() { @Override public RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClient client) throws IOException { final boolean restrictRequest = request.hasParam(RestRequest.PATH_RESTRICTED); - final PutRoleRequestBuilder requestBuilder = builderFactory.create(client, restrictRequest, fileRolesStore::exists) + final PutRoleRequestBuilder requestBuilder = builderFactory.create(client, restrictRequest) .source(request.param("name"), request.requiredContent(), request.getXContentType()) .setRefreshPolicy(request.param("refresh")); return channel -> requestBuilder.execute(new RestBuilderListener<>(channel) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleActionTests.java index 2e969d36f1956..84e4dc402c767 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleActionTests.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.core.security.action.role.DeleteRoleResponse; import org.elasticsearch.xpack.core.security.authc.AuthenticationTestHelper; import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore; +import org.elasticsearch.xpack.security.authz.ReservedRoleNameChecker; import org.elasticsearch.xpack.security.authz.store.NativeRolesStore; import org.junit.BeforeClass; @@ -61,7 +62,12 @@ public void testReservedRole() { null, Collections.emptySet() ); - TransportDeleteRoleAction action = new TransportDeleteRoleAction(mock(ActionFilters.class), rolesStore, transportService); + TransportDeleteRoleAction action = new TransportDeleteRoleAction( + mock(ActionFilters.class), + rolesStore, + transportService, + new ReservedRoleNameChecker.Default() + ); DeleteRoleRequest request = new DeleteRoleRequest(); request.name(roleName); @@ -105,7 +111,12 @@ private void testValidRole(String roleName) { null, Collections.emptySet() ); - TransportDeleteRoleAction action = new TransportDeleteRoleAction(mock(ActionFilters.class), rolesStore, transportService); + TransportDeleteRoleAction action = new TransportDeleteRoleAction( + mock(ActionFilters.class), + rolesStore, + transportService, + new ReservedRoleNameChecker.Default() + ); DeleteRoleRequest request = new DeleteRoleRequest(); request.name(roleName); @@ -153,7 +164,12 @@ public void testException() { null, Collections.emptySet() ); - TransportDeleteRoleAction action = new TransportDeleteRoleAction(mock(ActionFilters.class), rolesStore, transportService); + TransportDeleteRoleAction action = new TransportDeleteRoleAction( + mock(ActionFilters.class), + rolesStore, + transportService, + new ReservedRoleNameChecker.Default() + ); DeleteRoleRequest request = new DeleteRoleRequest(); request.name(roleName); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleActionTests.java index 38e042231afd6..8610273f205c9 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleActionTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.xpack.core.security.authc.AuthenticationTestHelper; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore; +import org.elasticsearch.xpack.security.authz.ReservedRoleNameChecker; import org.elasticsearch.xpack.security.authz.store.NativeRolesStore; import org.junit.BeforeClass; @@ -107,7 +108,8 @@ public void testReservedRole() { mock(ActionFilters.class), rolesStore, transportService, - xContentRegistry() + xContentRegistry(), + new ReservedRoleNameChecker.Default() ); PutRoleRequest request = new PutRoleRequest(); @@ -156,7 +158,8 @@ private void testValidRole(String roleName) { mock(ActionFilters.class), rolesStore, transportService, - xContentRegistry() + xContentRegistry(), + new ReservedRoleNameChecker.Default() ); final boolean created = randomBoolean(); @@ -209,7 +212,8 @@ public void testException() { mock(ActionFilters.class), rolesStore, transportService, - xContentRegistry() + xContentRegistry(), + new ReservedRoleNameChecker.Default() ); PutRoleRequest request = new PutRoleRequest(); @@ -259,7 +263,8 @@ public void testCreationOfRoleWithMalformedQueryJsonFails() { mock(ActionFilters.class), rolesStore, transportService, - xContentRegistry() + xContentRegistry(), + new ReservedRoleNameChecker.Default() ); PutRoleRequest request = new PutRoleRequest(); request.name("test"); @@ -314,7 +319,8 @@ public void testCreationOfRoleWithUnsupportedQueryFails() throws Exception { mock(ActionFilters.class), rolesStore, transportService, - xContentRegistry() + xContentRegistry(), + new ReservedRoleNameChecker.Default() ); PutRoleRequest request = new PutRoleRequest(); request.name("test"); From e8a3c9b9a3de121bd6703ccf42b959527f4108ed Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Tue, 2 Apr 2024 07:12:42 -0700 Subject: [PATCH 062/264] AwaitsFix #106968 --- .../reservedstate/service/FileSettingsServiceTests.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java index b309f10903d09..53ca55f8a5f81 100644 --- a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.reservedstate.service; +import org.apache.lucene.tests.util.LuceneTestCase.AwaitsFix; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterName; @@ -54,6 +55,7 @@ import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; +@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106968") public class FileSettingsServiceTests extends ESTestCase { private Environment env; private ClusterService clusterService; @@ -253,7 +255,6 @@ public void testStopWorksInMiddleOfProcessing() throws Exception { deadThreadLatch.countDown(); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106968") public void testStopWorksIfProcessingDidntReturnYet() throws Exception { var spiedController = spy(controller); var service = new FileSettingsService(clusterService, spiedController, env); From 499ebbf8ffcebd06899db9bf409498b07385e33d Mon Sep 17 00:00:00 2001 From: Craig Taverner Date: Tue, 2 Apr 2024 16:36:56 +0200 Subject: [PATCH 063/264] ESQL: Fix issues with ST_* functions and compound geometries (#106992) * Add mores tests to cartesian complex geometry tests * Support multi-shapes within constant literals This was not supported for lucene push-down for any of the relates functions on point indices: ST_INTERSECTS, ST_WITHIN, ST_CONTAINS. In addition ST_CONTAINS had specific bugs around handling multi-component shapes within literals/constants. * Update docs/changelog/106992.yaml * Delete docs/changelog/106992.yaml * Control result order for multi-cluster tests --- .../resources/cartesian_multipolygons.csv | 10 + .../cartesian_multipolygons.csv-spec | 65 ++++- .../src/main/resources/spatial.csv-spec | 257 +++++++++++++++++- ...ianPointDocValuesAndConstantEvaluator.java | 8 +- ...nsCartesianSourceAndConstantEvaluator.java | 8 +- ...GeoPointDocValuesAndConstantEvaluator.java | 8 +- ...ContainsGeoSourceAndConstantEvaluator.java | 8 +- .../scalar/spatial/SpatialContains.java | 37 ++- .../spatial/SpatialEvaluatorFactory.java | 38 +++ .../spatial/SpatialRelatesFunction.java | 2 +- .../scalar/spatial/SpatialRelatesUtils.java | 9 + .../querydsl/query/SpatialRelatesQuery.java | 13 +- 12 files changed, 417 insertions(+), 46 deletions(-) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv index e65cdd29a22b8..cbfc8048adbad 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv @@ -9,3 +9,13 @@ id:l, name:keyword, shape:cartesian_shape 7, Bottom right with holes, "POLYGON((2 0\, 3 0\, 3 1\, 2 1\, 2 0)\, (2.4 0.4\, 2.6 0.4\, 2.6 0.6\, 2.4 0.6\, 2.4 0.4))" 8, Top right with holes, "POLYGON((2 2\, 3 2\, 3 3\, 2 3\, 2 2)\, (2.4 2.4\, 2.6 2.4\, 2.6 2.6\, 2.4 2.6\, 2.4 2.4))" 9, Top left with holes, "POLYGON((0 2\, 1 2\, 1 3\, 0 3\, 0 2)\, (0.4 2.4\, 0.6 2.4\, 0.6 2.6\, 0.4 2.6\, 0.4 2.4))" +10, Four diagonals, "MULTILINESTRING((0 0\, 1 1)\, (2 0\, 3 1)\, (2 2\, 3 3)\, (0 2\, 1 3))" +11, Bottom left diagonal, "LINESTRING(0 0\, 1 1)" +12, Bottom right diagonal, "LINESTRING(2 0\, 3 1)" +13, Top right diagonal, "LINESTRING(2 2\, 3 3)" +14, Top left diagonal, "LINESTRING(0 2\, 1 3)" +15, Four points, "MULTIPOINT(0.5 0.5\, 2.5 0.5\, 2.5 2.5\, 0.5 2.5)" +16, Bottom left point, "POINT(0.5 0.5)" +17, Bottom right point, "POINT(2.5 0.5)" +18, Top right point, "POINT(2.5 2.5)" +19, Top left point, "POINT(0.5 2.5)" diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv-spec index c721d3c4899db..a2411cfd7a335 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv-spec @@ -13,11 +13,15 @@ FROM cartesian_multipolygons | SORT id ; -id:l | name:keyword | shape:cartesian_shape -0 | Four squares | MULTIPOLYGON(((0 0, 1 0, 1 1, 0 1, 0 0)), ((2 0, 3 0, 3 1, 2 1, 2 0)), ((2 2, 3 2, 3 3, 2 3, 2 2)), ((0 2, 1 2, 1 3, 0 3, 0 2))) -1 | Bottom left | POLYGON((0 0, 1 0, 1 1, 0 1, 0 0)) +id:l | name:keyword | shape:cartesian_shape +0 | Four squares | MULTIPOLYGON(((0 0, 1 0, 1 1, 0 1, 0 0)), ((2 0, 3 0, 3 1, 2 1, 2 0)), ((2 2, 3 2, 3 3, 2 3, 2 2)), ((0 2, 1 2, 1 3, 0 3, 0 2))) +1 | Bottom left | POLYGON((0 0, 1 0, 1 1, 0 1, 0 0)) 5 | Four squares with holes | MULTIPOLYGON(((0 0, 1 0, 1 1, 0 1, 0 0), (0.4 0.4, 0.6 0.4, 0.6 0.6, 0.4 0.6, 0.4 0.4)), ((2 0, 3 0, 3 1, 2 1, 2 0), (2.4 0.4, 2.6 0.4, 2.6 0.6, 2.4 0.6, 2.4 0.4)), ((2 2, 3 2, 3 3, 2 3, 2 2), (2.4 2.4, 2.6 2.4, 2.6 2.6, 2.4 2.6, 2.4 2.4)), ((0 2, 1 2, 1 3, 0 3, 0 2), (0.4 2.4, 0.6 2.4, 0.6 2.6, 0.4 2.6, 0.4 2.4))) -6 | Bottom left with holes | POLYGON((0 0, 1 0, 1 1, 0 1, 0 0), (0.4 0.4, 0.6 0.4, 0.6 0.6, 0.4 0.6, 0.4 0.4)) +6 | Bottom left with holes | POLYGON((0 0, 1 0, 1 1, 0 1, 0 0), (0.4 0.4, 0.6 0.4, 0.6 0.6, 0.4 0.6, 0.4 0.4)) +10 | Four diagonals | MULTILINESTRING((0 0, 1 1), (2 0, 3 1), (2 2, 3 3), (0 2, 1 3)) +11 | Bottom left diagonal | LINESTRING(0 0, 1 1) +15 | Four points | MULTIPOINT(0.5 0.5, 2.5 0.5, 2.5 2.5, 0.5 2.5) +16 | Bottom left point | POINT(0.5 0.5) ; whereContainsSinglePolygon @@ -41,9 +45,11 @@ FROM cartesian_multipolygons | SORT id ; -id:l | name:keyword | shape:cartesian_shape -1 | Bottom left | POLYGON((0 0, 1 0, 1 1, 0 1, 0 0)) -6 | Bottom left with holes | POLYGON((0 0, 1 0, 1 1, 0 1, 0 0), (0.4 0.4, 0.6 0.4, 0.6 0.6, 0.4 0.6, 0.4 0.4)) +id:l | name:keyword | shape:cartesian_shape +1 | Bottom left | POLYGON((0 0, 1 0, 1 1, 0 1, 0 0)) +6 | Bottom left with holes | POLYGON((0 0, 1 0, 1 1, 0 1, 0 0), (0.4 0.4, 0.6 0.4, 0.6 0.6, 0.4 0.6, 0.4 0.4)) +11 | Bottom left diagonal | LINESTRING(0 0, 1 1) +16 | Bottom left point | POINT(0.5 0.5) ; #################################################################################################### @@ -62,6 +68,10 @@ id:l | name:keyword | shape:cartesian_shape 1 | Bottom left | POLYGON((0 0, 1 0, 1 1, 0 1, 0 0)) 5 | Four squares with holes | MULTIPOLYGON(((0 0, 1 0, 1 1, 0 1, 0 0), (0.4 0.4, 0.6 0.4, 0.6 0.6, 0.4 0.6, 0.4 0.4)), ((2 0, 3 0, 3 1, 2 1, 2 0), (2.4 0.4, 2.6 0.4, 2.6 0.6, 2.4 0.6, 2.4 0.4)), ((2 2, 3 2, 3 3, 2 3, 2 2), (2.4 2.4, 2.6 2.4, 2.6 2.6, 2.4 2.6, 2.4 2.4)), ((0 2, 1 2, 1 3, 0 3, 0 2), (0.4 2.4, 0.6 2.4, 0.6 2.6, 0.4 2.6, 0.4 2.4))) 6 | Bottom left with holes | POLYGON((0 0, 1 0, 1 1, 0 1, 0 0), (0.4 0.4, 0.6 0.4, 0.6 0.6, 0.4 0.6, 0.4 0.4)) +10 | Four diagonals | MULTILINESTRING((0 0, 1 1), (2 0, 3 1), (2 2, 3 3), (0 2, 1 3)) +11 | Bottom left diagonal | LINESTRING(0 0, 1 1) +15 | Four points | MULTIPOINT(0.5 0.5, 2.5 0.5, 2.5 2.5, 0.5 2.5) +16 | Bottom left point | POINT(0.5 0.5) ; whereContainsSmallerPolygon @@ -86,6 +96,7 @@ FROM cartesian_multipolygons ; id:l | name:keyword | shape:cartesian_shape +16 | Bottom left point | POINT(0.5 0.5) ; #################################################################################################### @@ -110,6 +121,16 @@ id:l | name:keyword | shape:cartesian_shape 7 | Bottom right with holes | POLYGON((2 0, 3 0, 3 1, 2 1, 2 0), (2.4 0.4, 2.6 0.4, 2.6 0.6, 2.4 0.6, 2.4 0.4)) 8 | Top right with holes | POLYGON((2 2, 3 2, 3 3, 2 3, 2 2), (2.4 2.4, 2.6 2.4, 2.6 2.6, 2.4 2.6, 2.4 2.4)) 9 | Top left with holes | POLYGON((0 2, 1 2, 1 3, 0 3, 0 2), (0.4 2.4, 0.6 2.4, 0.6 2.6, 0.4 2.6, 0.4 2.4)) +10 | Four diagonals | MULTILINESTRING((0 0, 1 1), (2 0, 3 1), (2 2, 3 3), (0 2, 1 3)) +11 | Bottom left diagonal | LINESTRING(0 0, 1 1) +12 | Bottom right diagonal | LINESTRING(2 0, 3 1) +13 | Top right diagonal | LINESTRING(2 2, 3 3) +14 | Top left diagonal | LINESTRING(0 2, 1 3) +15 | Four points | MULTIPOINT(0.5 0.5, 2.5 0.5, 2.5 2.5, 0.5 2.5) +16 | Bottom left point | POINT(0.5 0.5) +17 | Bottom right point | POINT(2.5 0.5) +18 | Top right point | POINT(2.5 2.5) +19 | Top left point | POINT(0.5 2.5) ; whereContainsLargerPolygon @@ -142,6 +163,16 @@ id:l | name:keyword | shape:cartesian_shape 7 | Bottom right with holes | POLYGON((2 0, 3 0, 3 1, 2 1, 2 0), (2.4 0.4, 2.6 0.4, 2.6 0.6, 2.4 0.6, 2.4 0.4)) 8 | Top right with holes | POLYGON((2 2, 3 2, 3 3, 2 3, 2 2), (2.4 2.4, 2.6 2.4, 2.6 2.6, 2.4 2.6, 2.4 2.4)) 9 | Top left with holes | POLYGON((0 2, 1 2, 1 3, 0 3, 0 2), (0.4 2.4, 0.6 2.4, 0.6 2.6, 0.4 2.6, 0.4 2.4)) +10 | Four diagonals | MULTILINESTRING((0 0, 1 1), (2 0, 3 1), (2 2, 3 3), (0 2, 1 3)) +11 | Bottom left diagonal | LINESTRING(0 0, 1 1) +12 | Bottom right diagonal | LINESTRING(2 0, 3 1) +13 | Top right diagonal | LINESTRING(2 2, 3 3) +14 | Top left diagonal | LINESTRING(0 2, 1 3) +15 | Four points | MULTIPOINT(0.5 0.5, 2.5 0.5, 2.5 2.5, 0.5 2.5) +16 | Bottom left point | POINT(0.5 0.5) +17 | Bottom right point | POINT(2.5 0.5) +18 | Top right point | POINT(2.5 2.5) +19 | Top left point | POINT(0.5 2.5) ; #################################################################################################### @@ -166,6 +197,16 @@ id:l | name:keyword | shape:cartesian_shape 7 | Bottom right with holes | POLYGON((2 0, 3 0, 3 1, 2 1, 2 0), (2.4 0.4, 2.6 0.4, 2.6 0.6, 2.4 0.6, 2.4 0.4)) 8 | Top right with holes | POLYGON((2 2, 3 2, 3 3, 2 3, 2 2), (2.4 2.4, 2.6 2.4, 2.6 2.6, 2.4 2.6, 2.4 2.4)) 9 | Top left with holes | POLYGON((0 2, 1 2, 1 3, 0 3, 0 2), (0.4 2.4, 0.6 2.4, 0.6 2.6, 0.4 2.6, 0.4 2.4)) +10 | Four diagonals | MULTILINESTRING((0 0, 1 1), (2 0, 3 1), (2 2, 3 3), (0 2, 1 3)) +11 | Bottom left diagonal | LINESTRING(0 0, 1 1) +12 | Bottom right diagonal | LINESTRING(2 0, 3 1) +13 | Top right diagonal | LINESTRING(2 2, 3 3) +14 | Top left diagonal | LINESTRING(0 2, 1 3) +15 | Four points | MULTIPOINT(0.5 0.5, 2.5 0.5, 2.5 2.5, 0.5 2.5) +16 | Bottom left point | POINT(0.5 0.5) +17 | Bottom right point | POINT(2.5 0.5) +18 | Top right point | POINT(2.5 2.5) +19 | Top left point | POINT(0.5 2.5) ; whereContainsEvenLargerPolygon @@ -198,4 +239,14 @@ id:l | name:keyword | shape:cartesian_shape 7 | Bottom right with holes | POLYGON((2 0, 3 0, 3 1, 2 1, 2 0), (2.4 0.4, 2.6 0.4, 2.6 0.6, 2.4 0.6, 2.4 0.4)) 8 | Top right with holes | POLYGON((2 2, 3 2, 3 3, 2 3, 2 2), (2.4 2.4, 2.6 2.4, 2.6 2.6, 2.4 2.6, 2.4 2.4)) 9 | Top left with holes | POLYGON((0 2, 1 2, 1 3, 0 3, 0 2), (0.4 2.4, 0.6 2.4, 0.6 2.6, 0.4 2.6, 0.4 2.4)) +10 | Four diagonals | MULTILINESTRING((0 0, 1 1), (2 0, 3 1), (2 2, 3 3), (0 2, 1 3)) +11 | Bottom left diagonal | LINESTRING(0 0, 1 1) +12 | Bottom right diagonal | LINESTRING(2 0, 3 1) +13 | Top right diagonal | LINESTRING(2 2, 3 3) +14 | Top left diagonal | LINESTRING(0 2, 1 3) +15 | Four points | MULTIPOINT(0.5 0.5, 2.5 0.5, 2.5 2.5, 0.5 2.5) +16 | Bottom left point | POINT(0.5 0.5) +17 | Bottom right point | POINT(2.5 0.5) +18 | Top right point | POINT(2.5 2.5) +19 | Top left point | POINT(0.5 2.5) ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec index 57554c41a6dec..c1421d91dffa5 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec @@ -1063,7 +1063,7 @@ FROM airports_web | WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))")) ; -abbrev:keyword | location:cartesian_point | name:text | scalerank:i | type:k +abbrev:keyword | location:cartesian_point | name:text | scalerank:i | type:k HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | 9 | mid ; @@ -1081,6 +1081,92 @@ wkt:keyword | pt:cartesian_point "POINT(1 -1)" | POINT(1 -1) ; +cartesianPointIntersectsPointShape +required_feature: esql.st_intersects + +FROM airports_web +| WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("POINT(4783520.559160681 1661010.0197476079)")) +; + +abbrev:keyword | location:cartesian_point | name:text | scalerank:i | type:k +HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | 9 | mid +; + +cartesianPointIntersectsPoint +required_feature: esql.st_intersects + +FROM airports_web +| WHERE ST_INTERSECTS(location, TO_CARTESIANPOINT("POINT(4783520.559160681 1661010.0197476079)")) +; + +abbrev:keyword | location:cartesian_point | name:text | scalerank:i | type:k +HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | 9 | mid +; + +cartesianPointIntersectsMultiPoint +required_feature: esql.st_intersects + +FROM airports_web +| WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("MULTIPOINT(4783520.559160681 1661010.0197476079, 1408119.2975413958 7484813.53657096)")) +| SORT abbrev DESC +; + +abbrev:keyword | location:cartesian_point | name:text | scalerank:i | type:k +HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | 9 | mid +CPH | POINT (1408119.2975413958 7484813.53657096) | Copenhagen | 3 | major +; + +cartesianPointIntersectsLineString +required_feature: esql.st_intersects + +FROM airports_web +| WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("LINESTRING(4783520.559160681 1661010.0197476079, 1408119.2975413958 7484813.53657096)")) +| SORT abbrev DESC +; + +abbrev:keyword | location:cartesian_point | name:text | scalerank:i | type:k +HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | 9 | mid +CPH | POINT (1408119.2975413958 7484813.53657096) | Copenhagen | 3 | major +; + +cartesianPointIntersectsMultiLineString +required_feature: esql.st_intersects + +FROM airports_web +| WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("MULTILINESTRING((4783520.559160681 1661010.0197476079, 1408119.2975413958 7484813.53657096),(1408119.2975413958 7484813.53657096, 1996039.722208033 8322469.9470024165))")) +| SORT abbrev DESC +; + +abbrev:keyword | location:cartesian_point | name:text | scalerank:i | type:k +HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | 9 | mid +CPH | POINT (1408119.2975413958 7484813.53657096) | Copenhagen | 3 | major +ARN | POINT(1996039.722208033 8322469.9470024165) | Arlanda | 2 | major +; + +cartesianPointIntersectsPointShapeWithCentroid +required_feature: esql.st_intersects + +FROM airports_web +| WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("POINT(4783520.559160681 1661010.0197476079)")) +| STATS centroid=ST_CENTROID(location), count=COUNT() +; + +centroid:cartesian_point | count:long +POINT (4783520.5 1661010.0) | 1 +; + +cartesianPointIntersectsPointWithCentroid +required_feature: esql.st_intersects + +FROM airports_web +| WHERE ST_INTERSECTS(location, TO_CARTESIANPOINT("POINT(4783520.559160681 1661010.0197476079)")) +| STATS centroid=ST_CENTROID(location), count=COUNT() +; + +centroid:cartesian_point | count:long +POINT (4783520.5 1661010.0) | 1 +; + ############################################### # Tests for ST_CONTAINS on CARTESIAN_POINT type @@ -1103,7 +1189,7 @@ FROM airports_web | WHERE ST_CONTAINS(TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))"), location) ; -abbrev:keyword | location:cartesian_point | name:text | scalerank:i | type:k +abbrev:keyword | location:cartesian_point | name:text | scalerank:i | type:k HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | 9 | mid ; @@ -1155,6 +1241,85 @@ ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] wkt:keyword | pt:cartesian_point ; +cartesianPointContainsPointShape +required_feature: esql.st_intersects + +FROM airports_web +| WHERE ST_CONTAINS(location, TO_CARTESIANSHAPE("POINT(4783520.559160681 1661010.0197476079)")) +; + +abbrev:keyword | location:cartesian_point | name:text | scalerank:i | type:k +HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | 9 | mid +; + +cartesianPointContainsPoint +required_feature: esql.st_intersects + +FROM airports_web +| WHERE ST_CONTAINS(location, TO_CARTESIANPOINT("POINT(4783520.559160681 1661010.0197476079)")) +; + +abbrev:keyword | location:cartesian_point | name:text | scalerank:i | type:k +HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | 9 | mid +; + +cartesianPointContainsMultiPoint +required_feature: esql.st_intersects + +FROM airports_web +| WHERE ST_CONTAINS(location, TO_CARTESIANSHAPE("MULTIPOINT(4783520.559160681 1661010.0197476079, 1408119.2975413958 7484813.53657096)")) +| SORT abbrev DESC +; + +abbrev:keyword | location:cartesian_point | name:text | scalerank:i | type:k +; + +cartesianPointContainsLineString +required_feature: esql.st_intersects + +FROM airports_web +| WHERE ST_CONTAINS(location, TO_CARTESIANSHAPE("LINESTRING(4783520.559160681 1661010.0197476079, 1408119.2975413958 7484813.53657096)")) +| SORT abbrev DESC +; + +abbrev:keyword | location:cartesian_point | name:text | scalerank:i | type:k +; + +cartesianPointContainsMultiLineString +required_feature: esql.st_intersects + +FROM airports_web +| WHERE ST_CONTAINS(location, TO_CARTESIANSHAPE("MULTILINESTRING((4783520.559160681 1661010.0197476079, 1408119.2975413958 7484813.53657096),(1408119.2975413958 7484813.53657096, 1996039.722208033 8322469.9470024165))")) +| SORT abbrev DESC +; + +abbrev:keyword | location:cartesian_point | name:text | scalerank:i | type:k +; + +cartesianPointContainsPointShapeWithCentroid +required_feature: esql.st_intersects + +FROM airports_web +| WHERE ST_CONTAINS(location, TO_CARTESIANSHAPE("POINT(4783520.559160681 1661010.0197476079)")) +| STATS centroid=ST_CENTROID(location), count=COUNT() +; + +centroid:cartesian_point | count:long +POINT (4783520.5 1661010.0) | 1 +; + +cartesianPointContainsPointWithCentroid +required_feature: esql.st_intersects + +FROM airports_web +| WHERE ST_CONTAINS(location, TO_CARTESIANPOINT("POINT(4783520.559160681 1661010.0197476079)")) +| STATS centroid=ST_CENTROID(location), count=COUNT() +; + +centroid:cartesian_point | count:long +POINT (4783520.5 1661010.0) | 1 +; + ############################################### # Tests for ST_WITHIN on CARTESIAN_POINT type @@ -1177,7 +1342,7 @@ FROM airports_web | WHERE ST_WITHIN(location, TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))")) ; -abbrev:keyword | location:cartesian_point | name:text | scalerank:i | type:k +abbrev:keyword | location:cartesian_point | name:text | scalerank:i | type:k HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | 9 | mid ; @@ -1195,6 +1360,92 @@ wkt:keyword | pt:cartesian_point "POINT(1 -1)" | POINT(1 -1) ; +cartesianPointWithinPointShape +required_feature: esql.st_intersects + +FROM airports_web +| WHERE ST_WITHIN(location, TO_CARTESIANSHAPE("POINT(4783520.559160681 1661010.0197476079)")) +; + +abbrev:keyword | location:cartesian_point | name:text | scalerank:i | type:k +HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | 9 | mid +; + +cartesianPointWithinPoint +required_feature: esql.st_intersects + +FROM airports_web +| WHERE ST_WITHIN(location, TO_CARTESIANPOINT("POINT(4783520.559160681 1661010.0197476079)")) +; + +abbrev:keyword | location:cartesian_point | name:text | scalerank:i | type:k +HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | 9 | mid +; + +cartesianPointWithinMultiPoint +required_feature: esql.st_intersects + +FROM airports_web +| WHERE ST_WITHIN(location, TO_CARTESIANSHAPE("MULTIPOINT(4783520.559160681 1661010.0197476079, 1408119.2975413958 7484813.53657096)")) +| SORT abbrev DESC +; + +abbrev:keyword | location:cartesian_point | name:text | scalerank:i | type:k +HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | 9 | mid +CPH | POINT (1408119.2975413958 7484813.53657096) | Copenhagen | 3 | major +; + +cartesianPointWithinLineString +required_feature: esql.st_intersects + +FROM airports_web +| WHERE ST_WITHIN(location, TO_CARTESIANSHAPE("LINESTRING(4783520.559160681 1661010.0197476079, 1408119.2975413958 7484813.53657096)")) +| SORT abbrev DESC +; + +abbrev:keyword | location:cartesian_point | name:text | scalerank:i | type:k +HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | 9 | mid +CPH | POINT (1408119.2975413958 7484813.53657096) | Copenhagen | 3 | major +; + +cartesianPointWithinMultiLineString +required_feature: esql.st_intersects + +FROM airports_web +| WHERE ST_WITHIN(location, TO_CARTESIANSHAPE("MULTILINESTRING((4783520.559160681 1661010.0197476079, 1408119.2975413958 7484813.53657096),(1408119.2975413958 7484813.53657096, 1996039.722208033 8322469.9470024165))")) +| SORT abbrev DESC +; + +abbrev:keyword | location:cartesian_point | name:text | scalerank:i | type:k +HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | 9 | mid +CPH | POINT (1408119.2975413958 7484813.53657096) | Copenhagen | 3 | major +ARN | POINT(1996039.722208033 8322469.9470024165) | Arlanda | 2 | major +; + +cartesianPointWithinPointShapeWithCentroid +required_feature: esql.st_intersects + +FROM airports_web +| WHERE ST_WITHIN(location, TO_CARTESIANSHAPE("POINT(4783520.559160681 1661010.0197476079)")) +| STATS centroid=ST_CENTROID(location), count=COUNT() +; + +centroid:cartesian_point | count:long +POINT (4783520.5 1661010.0) | 1 +; + +cartesianPointWithinPointWithCentroid +required_feature: esql.st_intersects + +FROM airports_web +| WHERE ST_WITHIN(location, TO_CARTESIANPOINT("POINT(4783520.559160681 1661010.0197476079)")) +| STATS centroid=ST_CENTROID(location), count=COUNT() +; + +centroid:cartesian_point | count:long +POINT (4783520.5 1661010.0) | 1 +; + ############################################### # Tests for Equality and casting with GEO_POINT diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndConstantEvaluator.java index ce7e2889fc298..982bbd3b518d5 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndConstantEvaluator.java @@ -28,12 +28,12 @@ public final class SpatialContainsCartesianPointDocValuesAndConstantEvaluator im private final EvalOperator.ExpressionEvaluator leftValue; - private final Component2D rightValue; + private final Component2D[] rightValue; private final DriverContext driverContext; public SpatialContainsCartesianPointDocValuesAndConstantEvaluator(Source source, - EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, + EvalOperator.ExpressionEvaluator leftValue, Component2D[] rightValue, DriverContext driverContext) { this.warnings = new Warnings(source); this.leftValue = leftValue; @@ -106,10 +106,10 @@ static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final EvalOperator.ExpressionEvaluator.Factory leftValue; - private final Component2D rightValue; + private final Component2D[] rightValue; public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, - Component2D rightValue) { + Component2D[] rightValue) { this.source = source; this.leftValue = leftValue; this.rightValue = rightValue; diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndConstantEvaluator.java index 55dfbede4c003..a9c3a4f887a7d 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndConstantEvaluator.java @@ -30,12 +30,12 @@ public final class SpatialContainsCartesianSourceAndConstantEvaluator implements private final EvalOperator.ExpressionEvaluator leftValue; - private final Component2D rightValue; + private final Component2D[] rightValue; private final DriverContext driverContext; public SpatialContainsCartesianSourceAndConstantEvaluator(Source source, - EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, + EvalOperator.ExpressionEvaluator leftValue, Component2D[] rightValue, DriverContext driverContext) { this.warnings = new Warnings(source); this.leftValue = leftValue; @@ -110,10 +110,10 @@ static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final EvalOperator.ExpressionEvaluator.Factory leftValue; - private final Component2D rightValue; + private final Component2D[] rightValue; public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, - Component2D rightValue) { + Component2D[] rightValue) { this.source = source; this.leftValue = leftValue; this.rightValue = rightValue; diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndConstantEvaluator.java index 957800fb3c38e..de4537e6e0a10 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndConstantEvaluator.java @@ -28,12 +28,12 @@ public final class SpatialContainsGeoPointDocValuesAndConstantEvaluator implemen private final EvalOperator.ExpressionEvaluator leftValue; - private final Component2D rightValue; + private final Component2D[] rightValue; private final DriverContext driverContext; public SpatialContainsGeoPointDocValuesAndConstantEvaluator(Source source, - EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, + EvalOperator.ExpressionEvaluator leftValue, Component2D[] rightValue, DriverContext driverContext) { this.warnings = new Warnings(source); this.leftValue = leftValue; @@ -106,10 +106,10 @@ static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final EvalOperator.ExpressionEvaluator.Factory leftValue; - private final Component2D rightValue; + private final Component2D[] rightValue; public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, - Component2D rightValue) { + Component2D[] rightValue) { this.source = source; this.leftValue = leftValue; this.rightValue = rightValue; diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndConstantEvaluator.java index 186eacc680c2c..a84c661df18d8 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndConstantEvaluator.java @@ -30,12 +30,12 @@ public final class SpatialContainsGeoSourceAndConstantEvaluator implements EvalO private final EvalOperator.ExpressionEvaluator leftValue; - private final Component2D rightValue; + private final Component2D[] rightValue; private final DriverContext driverContext; public SpatialContainsGeoSourceAndConstantEvaluator(Source source, - EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, + EvalOperator.ExpressionEvaluator leftValue, Component2D[] rightValue, DriverContext driverContext) { this.warnings = new Warnings(source); this.leftValue = leftValue; @@ -110,10 +110,10 @@ static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final EvalOperator.ExpressionEvaluator.Factory leftValue; - private final Component2D rightValue; + private final Component2D[] rightValue; public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, - Component2D rightValue) { + Component2D[] rightValue) { this.source = source; this.leftValue = leftValue; this.rightValue = rightValue; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java index 8bf33a7e3dc61..2a4915f38fb48 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java @@ -14,6 +14,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.ann.Fixed; import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.geometry.Point; import org.elasticsearch.index.mapper.GeoShapeIndexer; import org.elasticsearch.index.mapper.ShapeIndexer; import org.elasticsearch.lucene.spatial.CartesianShapeIndexer; @@ -94,6 +95,18 @@ private boolean geometryRelatesGeometries(GeometryDocValueReader leftDocValueRea } return true; } + + private boolean pointRelatesGeometries(long encoded, Component2D[] rightComponent2Ds) { + // This code path exists for doc-values points, and we could consider re-using the point class to reduce garbage creation + Point point = spatialCoordinateType.longAsPoint(encoded); + for (Component2D rightComponent2D : rightComponent2Ds) { + // Every component of the right geometry must be contained within the left geometry for this to pass + if (pointRelatesGeometry(point, rightComponent2D) == false) { + return false; + } + } + return true; + } } @FunctionInfo( @@ -189,7 +202,7 @@ public SpatialRelatesFunction surrogate() { ); evaluatorMap.put( SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSourceAndConstant(spatialType, otherType), - new SpatialEvaluatorFactory.SpatialEvaluatorWithConstantFactory( + new SpatialEvaluatorFactory.SpatialEvaluatorWithConstantArrayFactory( SpatialContainsGeoSourceAndConstantEvaluator.Factory::new ) ); @@ -202,7 +215,7 @@ public SpatialRelatesFunction surrogate() { ); evaluatorMap.put( SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSourceAndConstant(spatialType, otherType).withLeftDocValues(), - new SpatialEvaluatorFactory.SpatialEvaluatorWithConstantFactory( + new SpatialEvaluatorFactory.SpatialEvaluatorWithConstantArrayFactory( SpatialContainsGeoPointDocValuesAndConstantEvaluator.Factory::new ) ); @@ -221,7 +234,7 @@ public SpatialRelatesFunction surrogate() { ); evaluatorMap.put( SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSourceAndConstant(spatialType, otherType), - new SpatialEvaluatorFactory.SpatialEvaluatorWithConstantFactory( + new SpatialEvaluatorFactory.SpatialEvaluatorWithConstantArrayFactory( SpatialContainsCartesianSourceAndConstantEvaluator.Factory::new ) ); @@ -234,7 +247,7 @@ public SpatialRelatesFunction surrogate() { ); evaluatorMap.put( SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSourceAndConstant(spatialType, otherType).withLeftDocValues(), - new SpatialEvaluatorFactory.SpatialEvaluatorWithConstantFactory( + new SpatialEvaluatorFactory.SpatialEvaluatorWithConstantArrayFactory( SpatialContainsCartesianPointDocValuesAndConstantEvaluator.Factory::new ) ); @@ -244,8 +257,8 @@ public SpatialRelatesFunction surrogate() { } @Evaluator(extraName = "GeoSourceAndConstant", warnExceptions = { IllegalArgumentException.class, IOException.class }) - static boolean processGeoSourceAndConstant(BytesRef leftValue, @Fixed Component2D rightValue) throws IOException { - return GEO.geometryRelatesGeometry(leftValue, rightValue); + static boolean processGeoSourceAndConstant(BytesRef leftValue, @Fixed Component2D[] rightValue) throws IOException { + return GEO.geometryRelatesGeometries(leftValue, rightValue); } @Evaluator(extraName = "GeoSourceAndSource", warnExceptions = { IllegalArgumentException.class, IOException.class }) @@ -254,8 +267,8 @@ static boolean processGeoSourceAndSource(BytesRef leftValue, BytesRef rightValue } @Evaluator(extraName = "GeoPointDocValuesAndConstant", warnExceptions = { IllegalArgumentException.class }) - static boolean processGeoPointDocValuesAndConstant(long leftValue, @Fixed Component2D rightValue) { - return GEO.pointRelatesGeometry(leftValue, rightValue); + static boolean processGeoPointDocValuesAndConstant(long leftValue, @Fixed Component2D[] rightValue) { + return GEO.pointRelatesGeometries(leftValue, rightValue); } @Evaluator(extraName = "GeoPointDocValuesAndSource", warnExceptions = { IllegalArgumentException.class }) @@ -265,8 +278,8 @@ static boolean processGeoPointDocValuesAndSource(long leftValue, BytesRef rightV } @Evaluator(extraName = "CartesianSourceAndConstant", warnExceptions = { IllegalArgumentException.class, IOException.class }) - static boolean processCartesianSourceAndConstant(BytesRef leftValue, @Fixed Component2D rightValue) throws IOException { - return CARTESIAN.geometryRelatesGeometry(leftValue, rightValue); + static boolean processCartesianSourceAndConstant(BytesRef leftValue, @Fixed Component2D[] rightValue) throws IOException { + return CARTESIAN.geometryRelatesGeometries(leftValue, rightValue); } @Evaluator(extraName = "CartesianSourceAndSource", warnExceptions = { IllegalArgumentException.class, IOException.class }) @@ -275,8 +288,8 @@ static boolean processCartesianSourceAndSource(BytesRef leftValue, BytesRef righ } @Evaluator(extraName = "CartesianPointDocValuesAndConstant", warnExceptions = { IllegalArgumentException.class }) - static boolean processCartesianPointDocValuesAndConstant(long leftValue, @Fixed Component2D rightValue) { - return CARTESIAN.pointRelatesGeometry(leftValue, rightValue); + static boolean processCartesianPointDocValuesAndConstant(long leftValue, @Fixed Component2D[] rightValue) { + return CARTESIAN.pointRelatesGeometries(leftValue, rightValue); } @Evaluator(extraName = "CartesianPointDocValuesAndSource") diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialEvaluatorFactory.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialEvaluatorFactory.java index cea7d926c3e39..e3bb3e8c8a3c2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialEvaluatorFactory.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialEvaluatorFactory.java @@ -18,6 +18,7 @@ import java.util.function.Function; import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils.asLuceneComponent2D; +import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils.asLuceneComponent2Ds; /** * SpatialRelatesFunction classes, like SpatialIntersects, support various combinations of incoming types, which can be sourced from @@ -123,6 +124,10 @@ public Expression right() { } } + /** + * This evaluator factory is used when both sides are not constants or literal, and need to be evaluated. + * They could be sourced from the index, or from previous evaluators. + */ protected static class SpatialEvaluatorFactoryWithFields extends SpatialEvaluatorFactory< EvalOperator.ExpressionEvaluator.Factory, EvalOperator.ExpressionEvaluator.Factory> { @@ -145,6 +150,10 @@ public EvalOperator.ExpressionEvaluator.Factory get( } } + /** + * This evaluator factory is used when the right hand side is a constant or literal, + * and the left is sourced from the index, or from previous evaluators. + */ protected static class SpatialEvaluatorWithConstantFactory extends SpatialEvaluatorFactory< EvalOperator.ExpressionEvaluator.Factory, Component2D> { @@ -168,6 +177,35 @@ public EvalOperator.ExpressionEvaluator.Factory get( } } + /** + * This evaluator factory is used when the right hand side is a constant or literal, + * and the left is sourced from the index, or from previous evaluators. + * It uses an array of Component2Ds to model the constant side for use within CONTAINS which is does not directly support multi-shapes, + * so we need to split the shapes into multiple components and perform operations on each. + */ + protected static class SpatialEvaluatorWithConstantArrayFactory extends SpatialEvaluatorFactory< + EvalOperator.ExpressionEvaluator.Factory, + Component2D[]> { + + SpatialEvaluatorWithConstantArrayFactory( + TriFunction< + Source, + EvalOperator.ExpressionEvaluator.Factory, + Component2D[], + EvalOperator.ExpressionEvaluator.Factory> factoryCreator + ) { + super(factoryCreator); + } + + @Override + public EvalOperator.ExpressionEvaluator.Factory get( + SpatialSourceSupplier s, + Function toEvaluator + ) { + return factoryCreator.apply(s.source(), toEvaluator.apply(s.left()), asLuceneComponent2Ds(s.crsType(), s.right())); + } + } + protected record SpatialEvaluatorFieldKey(DataType dataType, boolean isConstant) {} record SpatialEvaluatorKey( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunction.java index 09938558b6cce..b18a3ba4926f4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunction.java @@ -297,7 +297,7 @@ protected boolean pointRelatesGeometry(long encoded, Component2D component2D) { return pointRelatesGeometry(point, component2D); } - private boolean pointRelatesGeometry(Point point, Component2D component2D) { + protected boolean pointRelatesGeometry(Point point, Component2D component2D) { if (queryRelation == CONTAINS) { return component2D.withinPoint(point.getX(), point.getY()) == Component2D.WithinRelation.CANDIDATE; } else { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesUtils.java index d558e1c21c045..db45a791a122e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesUtils.java @@ -50,6 +50,15 @@ static Component2D asLuceneComponent2D(SpatialRelatesFunction.SpatialCrsType crs } } + /** + * This function is used to convert a spatial constant to an array of lucene Component2Ds. + * When both left and right sides are constants, we convert the left to a doc-values byte array and the right to a Component2D[]. + * The reason for generating an array instead of a single component is for multi-shape support with ST_CONTAINS. + */ + static Component2D[] asLuceneComponent2Ds(SpatialRelatesFunction.SpatialCrsType crsType, Expression expression) { + return asLuceneComponent2Ds(crsType, makeGeometryFromLiteral(expression)); + } + static Component2D[] asLuceneComponent2Ds(SpatialRelatesFunction.SpatialCrsType crsType, Geometry geometry) { if (crsType == SpatialRelatesFunction.SpatialCrsType.GEO) { var luceneGeometries = LuceneGeometriesUtils.toLatLonGeometry(geometry, true, t -> {}); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java index e67ea0cf5624f..a16c227f7f277 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java @@ -37,7 +37,6 @@ import java.io.IOException; import java.util.Objects; -import java.util.function.Consumer; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_POINT; @@ -231,16 +230,15 @@ private static org.apache.lucene.search.Query pointShapeQuery( SearchExecutionContext context ) { final boolean hasDocValues = context.getFieldType(fieldName).hasDocValues(); + if (geometry == null || geometry.isEmpty()) { + // Should never be null, but can be an empty geometry + return new MatchNoDocsQuery(); + } if (geometry.type() != ShapeType.POINT && relation == ShapeField.QueryRelation.CONTAINS) { // A point field can never contain a non-point geometry return new MatchNoDocsQuery(); } - final Consumer checker = t -> { - if (t == ShapeType.POINT || t == ShapeType.MULTIPOINT || t == ShapeType.LINESTRING || t == ShapeType.MULTILINESTRING) { - throw new QueryShardException(context, "Field [" + fieldName + "] does not support " + t + " queries"); - } - }; - final XYGeometry[] luceneGeometries = LuceneGeometriesUtils.toXYGeometry(geometry, checker); + final XYGeometry[] luceneGeometries = LuceneGeometriesUtils.toXYGeometry(geometry, t -> {}); org.apache.lucene.search.Query query = XYPointField.newGeometryQuery(fieldName, luceneGeometries); if (hasDocValues) { final org.apache.lucene.search.Query queryDocValues = XYDocValuesField.newSlowGeometryQuery(fieldName, luceneGeometries); @@ -264,6 +262,7 @@ private static org.apache.lucene.search.Query shapeShapeQuery( throw new QueryShardException(context, relation + " query relation not supported for Field [" + fieldName + "]."); } if (geometry == null || geometry.isEmpty()) { + // Should never be null, but can be an empty geometry return new MatchNoDocsQuery(); } final XYGeometry[] luceneGeometries; From 08c7b655a2dff7f0c66db5247a7f7fd0dd66fa88 Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 2 Apr 2024 16:14:22 +0100 Subject: [PATCH 064/264] Remove `ClusterStateObserver` constructor with implicit timeout (#107004) This constructor is only used in tests, and it's trappy to hide the timeout from callers like this. Inlining it to make the timeout explicit everywhere. --- .../java/org/elasticsearch/index/store/CorruptedFileIT.java | 1 + .../snapshots/SnapshotsServiceDoubleFinalizationIT.java | 1 + .../java/org/elasticsearch/cluster/ClusterStateObserver.java | 4 ---- 3 files changed, 2 insertions(+), 4 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/store/CorruptedFileIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/store/CorruptedFileIT.java index a99f6c4340941..a6e82f982b576 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/store/CorruptedFileIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/store/CorruptedFileIT.java @@ -439,6 +439,7 @@ public void testCorruptionOnNetworkLayer() throws InterruptedException { final var maxRetries = MaxRetryAllocationDecider.SETTING_ALLOCATION_MAX_RETRY.get(Settings.EMPTY); new ClusterStateObserver( internalCluster().getCurrentMasterNodeInstance(ClusterService.class), + TimeValue.timeValueMillis(60000), logger, new ThreadContext(Settings.EMPTY) ).waitForNextChange(new ClusterStateObserver.Listener() { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotsServiceDoubleFinalizationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotsServiceDoubleFinalizationIT.java index 23f218130a053..854d5f39ddaad 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotsServiceDoubleFinalizationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotsServiceDoubleFinalizationIT.java @@ -167,6 +167,7 @@ public void testNoDoubleFinalization() throws Exception { private PlainActionFuture setWaitForClusterState(Predicate predicate) { final var clusterStateObserver = new ClusterStateObserver( internalCluster().getCurrentMasterNodeInstance(ClusterService.class), + TimeValue.timeValueMillis(60000), logger, new ThreadContext(Settings.EMPTY) ); diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterStateObserver.java b/server/src/main/java/org/elasticsearch/cluster/ClusterStateObserver.java index 40ddafa498ecb..52e2c2399397e 100644 --- a/server/src/main/java/org/elasticsearch/cluster/ClusterStateObserver.java +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterStateObserver.java @@ -46,10 +46,6 @@ public class ClusterStateObserver { volatile Long startTimeMS; volatile boolean timedOut; - public ClusterStateObserver(ClusterService clusterService, Logger logger, ThreadContext contextHolder) { - this(clusterService, new TimeValue(60000), logger, contextHolder); - } - /** * @param timeout a global timeout for this observer. After it has expired the observer * will fail any existing or new #waitForNextChange calls. Set to null From ee7e25404caba295c5d7e0d83a22fb0e41a763ec Mon Sep 17 00:00:00 2001 From: Athena Brown Date: Tue, 2 Apr 2024 11:14:41 -0600 Subject: [PATCH 065/264] Remove remaining Version usages in licensing (#101800) This commit is a follow-up to the changes converting the trial license refresh logic to use `TrialLicenseVersion` (#100169), converting the remaining uses of `Version`, which are used to ensure wire compatibility, with `TransportVersion`. Incidentally, this commit also removes code for compatibility with 7.6.0 and earlier, as those versions can no longer be in a cluster with current-version nodes. --- .../license/ClusterStateLicenseService.java | 29 ++++++++--------- .../org/elasticsearch/license/License.java | 3 ++ .../elasticsearch/license/LicenseUtils.java | 31 ++++++++----------- .../license/SelfGeneratedLicense.java | 5 ++- .../license/StartBasicClusterTask.java | 10 +++--- .../license/StartTrialClusterTask.java | 21 ++++++------- .../StartupSelfGeneratedLicenseTask.java | 6 ++-- .../xpack/core/XPackFeatures.java | 4 ++- .../elasticsearch/xpack/core/XPackPlugin.java | 3 +- ...actClusterStateLicenseServiceTestCase.java | 11 ++++++- .../ClusterStateLicenseServiceTests.java | 11 +++++-- .../license/LicenseScheduleTests.java | 5 ++- .../license/LicenseUtilsTests.java | 3 +- 13 files changed, 77 insertions(+), 65 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/ClusterStateLicenseService.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/ClusterStateLicenseService.java index d65fda90b87e8..7cab82559c7fc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/ClusterStateLicenseService.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/ClusterStateLicenseService.java @@ -9,7 +9,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.cluster.AckedClusterStateUpdateTask; @@ -29,6 +28,7 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.features.FeatureService; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.license.internal.MutableLicenseService; import org.elasticsearch.license.internal.TrialLicenseVersion; @@ -65,6 +65,7 @@ public class ClusterStateLicenseService extends AbstractLifecycleComponent private final Settings settings; private final ClusterService clusterService; + private final FeatureService featureService; /** * The xpack feature state to update when license changes are made. @@ -103,10 +104,12 @@ public ClusterStateLicenseService( ThreadPool threadPool, ClusterService clusterService, Clock clock, - XPackLicenseState xPacklicenseState + XPackLicenseState xPacklicenseState, + FeatureService featureService ) { this.settings = settings; this.clusterService = clusterService; + this.featureService = featureService; this.startTrialTaskQueue = clusterService.createTaskQueue( "license-service-start-trial", Priority.NORMAL, @@ -242,10 +245,15 @@ protected PutLicenseResponse newResponse(boolean acknowledged) { @Override public ClusterState execute(ClusterState currentState) throws Exception { XPackPlugin.checkReadyForXPackCustomMetadata(currentState); - final Version oldestNodeVersion = currentState.nodes().getSmallestNonClientNodeVersion(); - if (licenseIsCompatible(newLicense, oldestNodeVersion) == false) { + int maxCompatibleLicenseVersion = LicenseUtils.getMaxCompatibleLicenseVersion(); + if (maxCompatibleLicenseVersion < newLicense.version()) { throw new IllegalStateException( - "The provided license is not compatible with node version [" + oldestNodeVersion + "]" + LoggerMessageFormat.format( + "The provided license is of version [{}] but this node is only compatible with version [{}] " + + "licences or older", + newLicense.version(), + maxCompatibleLicenseVersion + ) ); } Metadata currentMetadata = currentState.metadata(); @@ -267,11 +275,6 @@ private void submitUnbatchedTask(@SuppressWarnings("SameParameterValue") String clusterService.submitUnbatchedStateUpdateTask(source, task); } - private static boolean licenseIsCompatible(License license, Version version) { - final int maxVersion = LicenseUtils.getMaxLicenseVersion(version); - return license.version() <= maxVersion; - } - private boolean isAllowedLicenseType(License.LicenseType type) { logger.debug("Checking license [{}] against allowed license types: {}", type, allowedLicenseTypes); return allowedLicenseTypes.contains(type); @@ -341,7 +344,7 @@ public void startTrialLicense(PostStartTrialRequest request, final ActionListene } startTrialTaskQueue.submitTask( StartTrialClusterTask.TASK_SOURCE, - new StartTrialClusterTask(logger, clusterService.getClusterName().value(), clock, request, listener), + new StartTrialClusterTask(logger, clusterService.getClusterName().value(), clock, featureService, request, listener), null // TODO should pass in request.masterNodeTimeout() here ); } @@ -468,9 +471,7 @@ private void maybeRegisterOrUpdateLicense(ClusterState previousClusterState, Clu // auto-generate license if no licenses ever existed or if the current license is basic and // needs extended or if the license signature needs to be updated. this will trigger a subsequent cluster changed event if (currentClusterState.getNodes().isLocalNodeElectedMaster() - && (noLicense - || LicenseUtils.licenseNeedsExtended(currentLicense) - || LicenseUtils.signatureNeedsUpdate(currentLicense, currentClusterState.nodes()))) { + && (noLicense || LicenseUtils.licenseNeedsExtended(currentLicense) || LicenseUtils.signatureNeedsUpdate(currentLicense))) { registerOrUpdateSelfGeneratedLicense(); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/License.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/License.java index 6c942ab911416..2b01f4d7fa2a4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/License.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/License.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.features.NodeFeature; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; @@ -38,6 +39,8 @@ */ public class License implements ToXContentObject { + public static final NodeFeature INDEPENDENT_TRIAL_VERSION_FEATURE = new NodeFeature("license-trial-independent-version"); + public enum LicenseType { BASIC, STANDARD, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseUtils.java index fbffd808b1546..42f0ddb3f5234 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseUtils.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseUtils.java @@ -7,8 +7,6 @@ package org.elasticsearch.license; import org.elasticsearch.ElasticsearchSecurityException; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.hash.MessageDigests; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.license.License.LicenseType; @@ -63,27 +61,24 @@ public static boolean licenseNeedsExtended(License license) { * Checks if the signature of a self generated license with older version needs to be * recreated with the new key */ - public static boolean signatureNeedsUpdate(License license, DiscoveryNodes currentNodes) { + public static boolean signatureNeedsUpdate(License license) { assert License.VERSION_ENTERPRISE == License.VERSION_CURRENT : "update this method when adding a new version"; String typeName = license.type(); - return (LicenseType.isBasic(typeName) || LicenseType.isTrial(typeName)) && - // only upgrade signature when all nodes are ready to deserialize the new signature - (license.version() < License.VERSION_CRYPTO_ALGORITHMS - && compatibleLicenseVersion(currentNodes) >= License.VERSION_CRYPTO_ALGORITHMS); + return (LicenseType.isBasic(typeName) || LicenseType.isTrial(typeName)) + && license.version() < License.VERSION_CRYPTO_ALGORITHMS + && getMaxCompatibleLicenseVersion() >= License.VERSION_CRYPTO_ALGORITHMS;// only upgrade signature when all nodes are ready to + // deserialize the new signature } - public static int compatibleLicenseVersion(DiscoveryNodes currentNodes) { - return getMaxLicenseVersion(currentNodes.getMinNodeVersion()); - } - - public static int getMaxLicenseVersion(Version version) { - if (version != null && version.before(Version.V_7_6_0)) { - return License.VERSION_CRYPTO_ALGORITHMS; - } else { - assert License.VERSION_ENTERPRISE == License.VERSION_CURRENT : "update this method when adding a new version"; - return License.VERSION_ENTERPRISE; - } + /** + * Gets the maximum license version this cluster is compatible with. This is semantically different from {@link License#VERSION_CURRENT} + * as that field is the maximum that can be handled _by this node_, whereas this method determines the maximum license version + * that can be handled _by this cluster_. + */ + public static int getMaxCompatibleLicenseVersion() { + assert License.VERSION_ENTERPRISE == License.VERSION_CURRENT : "update this method when adding a new version"; + return License.VERSION_ENTERPRISE; } /** diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/SelfGeneratedLicense.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/SelfGeneratedLicense.java index 01a3298074669..6758d7ce1d708 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/SelfGeneratedLicense.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/SelfGeneratedLicense.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.license; -import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -27,8 +26,8 @@ class SelfGeneratedLicense { - public static License create(License.Builder specBuilder, DiscoveryNodes currentNodes) { - return create(specBuilder, LicenseUtils.compatibleLicenseVersion(currentNodes)); + public static License create(License.Builder specBuilder) { + return create(specBuilder, LicenseUtils.getMaxCompatibleLicenseVersion()); } public static License create(License.Builder specBuilder, int version) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/StartBasicClusterTask.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/StartBasicClusterTask.java index 1953a31c452ab..95b24d586161a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/StartBasicClusterTask.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/StartBasicClusterTask.java @@ -12,7 +12,6 @@ import org.elasticsearch.cluster.ClusterStateTaskExecutor; import org.elasticsearch.cluster.ClusterStateTaskListener; import org.elasticsearch.cluster.metadata.Metadata; -import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.core.Nullable; import org.elasticsearch.license.internal.TrialLicenseVersion; import org.elasticsearch.xpack.core.XPackPlugin; @@ -51,7 +50,6 @@ public class StartBasicClusterTask implements ClusterStateTaskListener { public LicensesMetadata execute( LicensesMetadata currentLicensesMetadata, - DiscoveryNodes discoveryNodes, ClusterStateTaskExecutor.TaskContext taskContext ) throws Exception { assert taskContext.getTask() == this; @@ -62,7 +60,7 @@ public LicensesMetadata execute( License currentLicense = LicensesMetadata.extractLicense(currentLicensesMetadata); final LicensesMetadata updatedLicensesMetadata; if (shouldGenerateNewBasicLicense(currentLicense)) { - License selfGeneratedLicense = generateBasicLicense(discoveryNodes); + License selfGeneratedLicense = generateBasicLicense(); if (request.isAcknowledged() == false && currentLicense != null) { Map ackMessageMap = LicenseUtils.getAckMessages(selfGeneratedLicense, currentLicense); if (ackMessageMap.isEmpty() == false) { @@ -104,7 +102,7 @@ private static boolean shouldGenerateNewBasicLicense(License currentLicense) { || LicenseSettings.BASIC_SELF_GENERATED_LICENSE_EXPIRATION_MILLIS != LicenseUtils.getExpiryDate(currentLicense); } - private License generateBasicLicense(DiscoveryNodes discoveryNodes) { + private License generateBasicLicense() { final License.Builder specBuilder = License.builder() .uid(UUID.randomUUID().toString()) .issuedTo(clusterName) @@ -113,7 +111,7 @@ private License generateBasicLicense(DiscoveryNodes discoveryNodes) { .type(License.LicenseType.BASIC) .expiryDate(LicenseSettings.BASIC_SELF_GENERATED_LICENSE_EXPIRATION_MILLIS); - return SelfGeneratedLicense.create(specBuilder, discoveryNodes); + return SelfGeneratedLicense.create(specBuilder); } public String getDescription() { @@ -129,7 +127,7 @@ public ClusterState execute(BatchExecutionContext batchEx var currentLicensesMetadata = originalLicensesMetadata; for (final var taskContext : batchExecutionContext.taskContexts()) { try (var ignored = taskContext.captureResponseHeaders()) { - currentLicensesMetadata = taskContext.getTask().execute(currentLicensesMetadata, initialState.nodes(), taskContext); + currentLicensesMetadata = taskContext.getTask().execute(currentLicensesMetadata, taskContext); } } if (currentLicensesMetadata == originalLicensesMetadata) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/StartTrialClusterTask.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/StartTrialClusterTask.java index 02b4bc15eaaee..67731b03d3e65 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/StartTrialClusterTask.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/StartTrialClusterTask.java @@ -12,8 +12,8 @@ import org.elasticsearch.cluster.ClusterStateTaskExecutor; import org.elasticsearch.cluster.ClusterStateTaskListener; import org.elasticsearch.cluster.metadata.Metadata; -import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.core.Nullable; +import org.elasticsearch.features.FeatureService; import org.elasticsearch.license.internal.TrialLicenseVersion; import org.elasticsearch.xpack.core.XPackPlugin; @@ -41,11 +41,13 @@ public class StartTrialClusterTask implements ClusterStateTaskListener { private final PostStartTrialRequest request; private final ActionListener listener; private final Clock clock; + private final FeatureService featureService; StartTrialClusterTask( Logger logger, String clusterName, Clock clock, + FeatureService featureService, PostStartTrialRequest request, ActionListener listener ) { @@ -54,22 +56,17 @@ public class StartTrialClusterTask implements ClusterStateTaskListener { this.request = request; this.listener = listener; this.clock = clock; + this.featureService = featureService; } private LicensesMetadata execute( LicensesMetadata currentLicensesMetadata, - DiscoveryNodes discoveryNodes, + ClusterState state, ClusterStateTaskExecutor.TaskContext taskContext ) { assert taskContext.getTask() == this; - if (discoveryNodes.getMaxNodeVersion().after(discoveryNodes.getSmallestNonClientNodeVersion())) { - throw new IllegalStateException( - "Please ensure all nodes are on the same version before starting your trial, the highest node version in this cluster is [" - + discoveryNodes.getMaxNodeVersion() - + "] and the lowest node version is [" - + discoveryNodes.getMinNodeVersion() - + "]" - ); + if (featureService.clusterHasFeature(state, License.INDEPENDENT_TRIAL_VERSION_FEATURE) == false) { + throw new IllegalStateException("Please ensure all nodes are up to date before starting your trial"); } final var listener = ActionListener.runBefore(this.listener, () -> { logger.debug("started self generated trial license: {}", currentLicensesMetadata); @@ -96,7 +93,7 @@ private LicensesMetadata execute( } else { specBuilder.maxNodes(LicenseSettings.SELF_GENERATED_LICENSE_MAX_NODES); } - License selfGeneratedLicense = SelfGeneratedLicense.create(specBuilder, discoveryNodes); + License selfGeneratedLicense = SelfGeneratedLicense.create(specBuilder); LicensesMetadata newLicensesMetadata = new LicensesMetadata(selfGeneratedLicense, TrialLicenseVersion.CURRENT); taskContext.success(() -> listener.onResponse(new PostStartTrialResponse(PostStartTrialResponse.Status.UPGRADED_TO_TRIAL))); return newLicensesMetadata; @@ -124,7 +121,7 @@ public ClusterState execute(BatchExecutionContext batchEx var currentLicensesMetadata = originalLicensesMetadata; for (final var taskContext : batchExecutionContext.taskContexts()) { try (var ignored = taskContext.captureResponseHeaders()) { - currentLicensesMetadata = taskContext.getTask().execute(currentLicensesMetadata, initialState.nodes(), taskContext); + currentLicensesMetadata = taskContext.getTask().execute(currentLicensesMetadata, initialState, taskContext); } } if (currentLicensesMetadata == originalLicensesMetadata) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/StartupSelfGeneratedLicenseTask.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/StartupSelfGeneratedLicenseTask.java index 518b45dd027ad..8d7dbe77e119f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/StartupSelfGeneratedLicenseTask.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/StartupSelfGeneratedLicenseTask.java @@ -60,7 +60,7 @@ public ClusterState execute(ClusterState currentState) throws Exception { LicenseSettings.SELF_GENERATED_LICENSE_TYPE.get(settings) ); return updateWithLicense(currentState, type); - } else if (LicenseUtils.signatureNeedsUpdate(currentLicensesMetadata.getLicense(), currentState.nodes())) { + } else if (LicenseUtils.signatureNeedsUpdate(currentLicensesMetadata.getLicense())) { return updateLicenseSignature(currentState, currentLicensesMetadata); } else if (LicenseUtils.licenseNeedsExtended(currentLicensesMetadata.getLicense())) { return extendBasic(currentState, currentLicensesMetadata); @@ -86,7 +86,7 @@ private ClusterState updateLicenseSignature(ClusterState currentState, LicensesM .issueDate(issueDate) .type(type) .expiryDate(expiryDate); - License selfGeneratedLicense = SelfGeneratedLicense.create(specBuilder, currentState.nodes()); + License selfGeneratedLicense = SelfGeneratedLicense.create(specBuilder); TrialLicenseVersion trialVersion = currentLicenseMetadata.getMostRecentTrialVersion(); LicensesMetadata newLicenseMetadata = new LicensesMetadata(selfGeneratedLicense, trialVersion); mdBuilder.putCustom(LicensesMetadata.TYPE, newLicenseMetadata); @@ -149,7 +149,7 @@ private ClusterState updateWithLicense(ClusterState currentState, License.Licens .issueDate(issueDate) .type(type) .expiryDate(expiryDate); - License selfGeneratedLicense = SelfGeneratedLicense.create(specBuilder, currentState.nodes()); + License selfGeneratedLicense = SelfGeneratedLicense.create(specBuilder); LicensesMetadata licensesMetadata; if (License.LicenseType.TRIAL.equals(type)) { licensesMetadata = new LicensesMetadata(selfGeneratedLicense, TrialLicenseVersion.CURRENT); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackFeatures.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackFeatures.java index 887b40b5ab64b..13404772e79a9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackFeatures.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackFeatures.java @@ -10,6 +10,7 @@ import org.elasticsearch.Version; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; +import org.elasticsearch.license.License; import org.elasticsearch.xpack.core.datatiers.NodesDataTiersUsageTransportAction; import java.util.Map; @@ -23,7 +24,8 @@ public class XPackFeatures implements FeatureSpecification { @Override public Set getFeatures() { return Set.of( - NodesDataTiersUsageTransportAction.LOCALLY_PRECALCULATED_STATS_FEATURE // Added in 8.12 + NodesDataTiersUsageTransportAction.LOCALLY_PRECALCULATED_STATS_FEATURE, // Added in 8.12 + License.INDEPENDENT_TRIAL_VERSION_FEATURE // 8.14.0 ); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java index 72b8a8b9e4d98..1826146a5c7c0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java @@ -329,7 +329,8 @@ public Collection createComponents(PluginServices services) { services.threadPool(), services.clusterService(), getClock(), - getLicenseState() + getLicenseState(), + services.featureService() ); setLicenseService(licenseService); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/AbstractClusterStateLicenseServiceTestCase.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/AbstractClusterStateLicenseServiceTestCase.java index fc257258eb213..27e5c1213f1f9 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/AbstractClusterStateLicenseServiceTestCase.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/AbstractClusterStateLicenseServiceTestCase.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; +import org.elasticsearch.features.FeatureService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; @@ -26,6 +27,7 @@ import org.junit.After; import org.junit.Before; +import java.util.List; import java.util.stream.Stream; import static java.util.Collections.emptySet; @@ -64,7 +66,14 @@ protected void setInitialState(License license, XPackLicenseState licenseState, protected void setInitialState(License license, XPackLicenseState licenseState, Settings settings, String selfGeneratedType) { licenseType = selfGeneratedType; settings = Settings.builder().put(settings).put(LicenseSettings.SELF_GENERATED_LICENSE_TYPE.getKey(), licenseType).build(); - licenseService = new ClusterStateLicenseService(settings, threadPool, clusterService, clock, licenseState); + licenseService = new ClusterStateLicenseService( + settings, + threadPool, + clusterService, + clock, + licenseState, + new FeatureService(List.of()) + ); ClusterState state = mock(ClusterState.class); final ClusterBlocks noBlock = ClusterBlocks.builder().build(); when(state.blocks()).thenReturn(noBlock); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/ClusterStateLicenseServiceTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/ClusterStateLicenseServiceTests.java index c1a8c0a10a9f7..33f162a06f350 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/ClusterStateLicenseServiceTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/ClusterStateLicenseServiceTests.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.features.FeatureService; import org.elasticsearch.license.licensor.LicenseSigner; import org.elasticsearch.protocol.xpack.license.LicensesStatus; import org.elasticsearch.protocol.xpack.license.PutLicenseResponse; @@ -95,7 +96,8 @@ public void testLogExpirationWarning() { mock(ThreadPool.class), mockDefaultClusterService(), mock(Clock.class), - mock(XPackLicenseState.class) + mock(XPackLicenseState.class), + new FeatureService(List.of()) ); final String message = service.buildExpirationMessage(time, expired).toString(); if (expired) { @@ -190,7 +192,8 @@ public void testStartBasicStartsNewLicenseIfFieldsDifferent() throws Exception { mock(ThreadPool.class), clusterService, clock, - mock(XPackLicenseState.class) + mock(XPackLicenseState.class), + new FeatureService(List.of()) ); verify(clusterService).createTaskQueue(eq("license-service-start-basic"), any(), taskExecutorCaptor.capture()); @@ -276,12 +279,14 @@ private void tryRegisterLicense(Settings baseSettings, License license, Consumer final Clock clock = randomBoolean() ? Clock.systemUTC() : Clock.systemDefaultZone(); final XPackLicenseState licenseState = mock(XPackLicenseState.class); final ThreadPool threadPool = mock(ThreadPool.class); + final ClusterStateLicenseService service = new ClusterStateLicenseService( settings, threadPool, clusterService, clock, - licenseState + licenseState, + new FeatureService(List.of()) ); final PutLicenseRequest request = new PutLicenseRequest(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicenseScheduleTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicenseScheduleTests.java index 78bd500b55e24..d7b308bc51f46 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicenseScheduleTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicenseScheduleTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.scheduler.SchedulerEngine; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.features.FeatureService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.junit.Before; @@ -17,6 +18,7 @@ import java.time.Clock; import java.time.Instant; import java.time.format.DateTimeFormatter; +import java.util.List; import java.util.Locale; import static org.hamcrest.Matchers.equalTo; @@ -35,7 +37,8 @@ public void setup() throws Exception { mock(ThreadPool.class), mock(ClusterService.class), mock(Clock.class), - mock(XPackLicenseState.class) + mock(XPackLicenseState.class), + new FeatureService(List.of()) ); schedule = service.nextLicenseCheck(license); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicenseUtilsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicenseUtilsTests.java index 7704c3b397b0d..5f7c6761312d1 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicenseUtilsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicenseUtilsTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.license; import org.elasticsearch.ElasticsearchSecurityException; -import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.UUIDs; import org.elasticsearch.license.internal.XPackLicenseStatus; import org.elasticsearch.test.ESTestCase; @@ -44,7 +43,7 @@ public void testIsLicenseExpiredException() { } public void testVersionsUpToDate() { - assertThat(LicenseUtils.compatibleLicenseVersion(DiscoveryNodes.EMPTY_NODES), equalTo(License.VERSION_CURRENT)); + assertThat(LicenseUtils.getMaxCompatibleLicenseVersion(), equalTo(License.VERSION_CURRENT)); } public void testGetXPackLicenseStatus() { From 646ac4fcde523e19fe9002c462608438d448d3a3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Efe=20G=C3=BCrkan=20YALAMAN?= Date: Tue, 2 Apr 2024 19:20:39 +0200 Subject: [PATCH 066/264] Update license acknowledgement messages for Search (#106857) License state messages are shown to user when license expires, or license state changed. Updated Search text to include current features. --- .../java/org/elasticsearch/license/XPackLicenseState.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java index ed240c98c0b64..120ef76561a61 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java @@ -225,7 +225,10 @@ private static String[] enterpriseSearchAcknowledgementMessages(OperationMode cu case TRIAL: case PLATINUM: case ENTERPRISE: - return new String[] { "Search Applications and behavioral analytics will be disabled" }; + return new String[] { + "Search Applications and behavioral analytics will be disabled.", + "Elastic Web crawler will be disabled.", + "Connector clients require at least a platinum license." }; } break; } From 412f2f561716670b0864d8bfc8e97f263413004b Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 2 Apr 2024 21:53:19 +0200 Subject: [PATCH 067/264] New Lucene spanish plural stemmer (#106952) Add Lucene SpanishPluralStemFilter based on apache/lucene#11284 --- docs/changelog/106952.yaml | 5 +++++ .../analysis/tokenfilters/stemmer-tokenfilter.asciidoc | 1 + .../analysis/common/StemmerTokenFilterFactory.java | 3 +++ 3 files changed, 9 insertions(+) create mode 100644 docs/changelog/106952.yaml diff --git a/docs/changelog/106952.yaml b/docs/changelog/106952.yaml new file mode 100644 index 0000000000000..1b45bf6ca28a2 --- /dev/null +++ b/docs/changelog/106952.yaml @@ -0,0 +1,5 @@ +pr: 106952 +summary: Add Lucene spanish plural stemmer +area: Search +type: enhancement +issues: [] diff --git a/docs/reference/analysis/tokenfilters/stemmer-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/stemmer-tokenfilter.asciidoc index b8d883b057823..42ac594fca3bf 100644 --- a/docs/reference/analysis/tokenfilters/stemmer-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/stemmer-tokenfilter.asciidoc @@ -236,6 +236,7 @@ https://snowballstem.org/algorithms/serbian/stemmer.html[*`serbian`*] Spanish:: https://www.ercim.eu/publication/ws-proceedings/CLEF2/savoy.pdf[*`light_spanish`*], https://snowballstem.org/algorithms/spanish/stemmer.html[`spanish`] +https://www.wikilengua.org/index.php/Plural_(formaci%C3%B3n)[`spanish_plural`] Swedish:: https://snowballstem.org/algorithms/swedish/stemmer.html[*`swedish`*], diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java index 7385987567fb0..69b0921a4144f 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java @@ -23,6 +23,7 @@ import org.apache.lucene.analysis.en.KStemFilter; import org.apache.lucene.analysis.en.PorterStemFilter; import org.apache.lucene.analysis.es.SpanishLightStemFilter; +import org.apache.lucene.analysis.es.SpanishPluralStemFilter; import org.apache.lucene.analysis.fa.PersianStemFilter; import org.apache.lucene.analysis.fi.FinnishLightStemFilter; import org.apache.lucene.analysis.fr.FrenchLightStemFilter; @@ -246,6 +247,8 @@ public TokenStream create(TokenStream tokenStream) { return new SnowballFilter(tokenStream, new SpanishStemmer()); } else if ("light_spanish".equalsIgnoreCase(language) || "lightSpanish".equalsIgnoreCase(language)) { return new SpanishLightStemFilter(tokenStream); + } else if ("spanish_plural".equalsIgnoreCase(language)) { + return new SpanishPluralStemFilter(tokenStream); // Sorani Kurdish stemmer } else if ("sorani".equalsIgnoreCase(language)) { From 1e253a04fcfecac2242e6e570531f287d967b813 Mon Sep 17 00:00:00 2001 From: Volodymyr Krasnikov <129072588+volodk85@users.noreply.github.com> Date: Tue, 2 Apr 2024 13:39:01 -0700 Subject: [PATCH 068/264] Propagate ?master_timeout query parameter from CCR apis to downstreams (#105168) * Propagate ?master_timeout query parameter from crr put follow api * Update docs/changelog/105168.yaml * fix sily mistake * spotless * fix 2 * Add ?master_timeout query param to apis which use it * Update rest api specs + yaml tests * Add master_timeout (+timeout) to remaining api endpoints * Update 105168.yaml Correct message * Enable randomly ?master_timeout param to ccr IT tests * Add timeout param to RestCcrStatsAction * propagate master_timeout param between put_follow -> resume_follow calls * Propagate master_timeout down to persistent task layer * Add transport version for ccr stats request object change * Add BwC test for CcrStatsAction.Request obj --- docs/changelog/105168.yaml | 5 ++ .../delete-auto-follow-pattern.asciidoc | 7 +++ .../get-auto-follow-pattern.asciidoc | 8 +++ .../pause-auto-follow-pattern.asciidoc | 7 +++ .../put-auto-follow-pattern.asciidoc | 8 +++ .../resume-auto-follow-pattern.asciidoc | 7 +++ .../ccr/apis/follow/get-follow-info.asciidoc | 8 +++ .../ccr/apis/follow/get-follow-stats.asciidoc | 6 ++ .../apis/follow/post-forget-follower.asciidoc | 6 ++ .../apis/follow/post-pause-follow.asciidoc | 8 +++ .../apis/follow/post-resume-follow.asciidoc | 8 +++ .../ccr/apis/follow/post-unfollow.asciidoc | 12 +++- .../ccr/apis/follow/put-follow.asciidoc | 5 ++ .../reference/ccr/apis/get-ccr-stats.asciidoc | 11 ++++ .../api/ccr.delete_auto_follow_pattern.json | 6 ++ .../rest-api-spec/api/ccr.follow.json | 4 ++ .../rest-api-spec/api/ccr.follow_info.json | 6 ++ .../rest-api-spec/api/ccr.follow_stats.json | 6 ++ .../api/ccr.forget_follower.json | 6 ++ .../api/ccr.get_auto_follow_pattern.json | 6 ++ .../api/ccr.pause_auto_follow_pattern.json | 6 ++ .../rest-api-spec/api/ccr.pause_follow.json | 6 ++ .../api/ccr.put_auto_follow_pattern.json | 6 ++ .../api/ccr.resume_auto_follow_pattern.json | 6 ++ .../rest-api-spec/api/ccr.resume_follow.json | 6 ++ .../rest-api-spec/api/ccr.stats.json | 10 ++++ .../rest-api-spec/api/ccr.unfollow.json | 6 ++ .../org/elasticsearch/TransportVersions.java | 1 + .../rest-api-spec/test/ccr/auto_follow.yml | 3 + .../test/ccr/follow_and_unfollow.yml | 1 + .../rest-api-spec/test/ccr/follow_info.yml | 1 + .../elasticsearch/xpack/ccr/AutoFollowIT.java | 18 ++++++ .../ccr/action/TransportCcrStatsAction.java | 3 + .../action/TransportPauseFollowAction.java | 2 +- .../ccr/action/TransportPutFollowAction.java | 1 + .../action/TransportResumeFollowAction.java | 2 +- .../ccr/action/TransportUnfollowAction.java | 2 +- .../xpack/ccr/rest/RestCcrStatsAction.java | 4 ++ .../RestDeleteAutoFollowPatternAction.java | 1 + .../xpack/ccr/rest/RestFollowInfoAction.java | 1 + .../xpack/ccr/rest/RestFollowStatsAction.java | 3 + .../ccr/rest/RestForgetFollowerAction.java | 6 +- .../rest/RestGetAutoFollowPatternAction.java | 1 + .../RestPauseAutoFollowPatternAction.java | 1 + .../xpack/ccr/rest/RestPauseFollowAction.java | 1 + .../rest/RestPutAutoFollowPatternAction.java | 4 +- .../xpack/ccr/rest/RestPutFollowAction.java | 7 ++- .../RestResumeAutoFollowPatternAction.java | 1 + .../ccr/rest/RestResumeFollowAction.java | 8 ++- .../xpack/ccr/rest/RestUnfollowAction.java | 1 + .../elasticsearch/xpack/CcrIntegTestCase.java | 6 ++ .../xpack/CcrSingleNodeTestCase.java | 6 ++ .../action/PutFollowActionRequestTests.java | 5 +- .../xpack/core/ccr/action/CcrStatsAction.java | 44 ++++++++++++++ .../core/ccr/action/PutFollowAction.java | 5 +- .../core/ccr/action/CcrStatsActionTests.java | 60 +++++++++++++++++++ 56 files changed, 368 insertions(+), 17 deletions(-) create mode 100644 docs/changelog/105168.yaml create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ccr/action/CcrStatsActionTests.java diff --git a/docs/changelog/105168.yaml b/docs/changelog/105168.yaml new file mode 100644 index 0000000000000..0f3792b832f55 --- /dev/null +++ b/docs/changelog/105168.yaml @@ -0,0 +1,5 @@ +pr: 105168 +summary: Add ?master_timeout query parameter to ccr apis +area: CCR +type: bug +issues: [] diff --git a/docs/reference/ccr/apis/auto-follow/delete-auto-follow-pattern.asciidoc b/docs/reference/ccr/apis/auto-follow/delete-auto-follow-pattern.asciidoc index c53ad7b268027..dffb20715a676 100644 --- a/docs/reference/ccr/apis/auto-follow/delete-auto-follow-pattern.asciidoc +++ b/docs/reference/ccr/apis/auto-follow/delete-auto-follow-pattern.asciidoc @@ -54,6 +54,13 @@ This API deletes a configured collection of ``:: (Required, string) Specifies the auto-follow pattern collection to delete. +[[ccr-delete-auto-follow-pattern-query-params]] +==== {api-query-parms-title} + +`master_timeout`:: +(Optional, <>) Specifies the period of time to wait for +a connection to the master node. If no response is received before the timeout +expires, the request fails and returns an error. Defaults to `30s`. [[ccr-delete-auto-follow-pattern-examples]] ==== {api-examples-title} diff --git a/docs/reference/ccr/apis/auto-follow/get-auto-follow-pattern.asciidoc b/docs/reference/ccr/apis/auto-follow/get-auto-follow-pattern.asciidoc index a3169adad1c2a..bcb0406fd43ba 100644 --- a/docs/reference/ccr/apis/auto-follow/get-auto-follow-pattern.asciidoc +++ b/docs/reference/ccr/apis/auto-follow/get-auto-follow-pattern.asciidoc @@ -72,6 +72,14 @@ This API will return the specified auto-follow pattern collection. to retrieve. If you do not specify a name, the API returns information for all collections. +[[ccr-get-auto-follow-pattern-query-params]] +==== {api-query-parms-title} + +`master_timeout`:: +(Optional, <>) Specifies the period of time to wait for +a connection to the master node. If no response is received before the timeout +expires, the request fails and returns an error. Defaults to `30s`. + [[ccr-get-auto-follow-pattern-examples]] ==== {api-examples-title} diff --git a/docs/reference/ccr/apis/auto-follow/pause-auto-follow-pattern.asciidoc b/docs/reference/ccr/apis/auto-follow/pause-auto-follow-pattern.asciidoc index 875f6626e618a..0ca8a8de6e572 100644 --- a/docs/reference/ccr/apis/auto-follow/pause-auto-follow-pattern.asciidoc +++ b/docs/reference/ccr/apis/auto-follow/pause-auto-follow-pattern.asciidoc @@ -40,6 +40,13 @@ meantime. ``:: (Required, string) Name of the auto-follow pattern to pause. +[[ccr-pause-auto-follow-pattern-query-params]] +==== {api-query-parms-title} + +`master_timeout`:: +(Optional, <>) Specifies the period of time to wait for +a connection to the master node. If no response is received before the timeout +expires, the request fails and returns an error. Defaults to `30s`. [[ccr-pause-auto-follow-pattern-examples]] ==== {api-examples-title} diff --git a/docs/reference/ccr/apis/auto-follow/put-auto-follow-pattern.asciidoc b/docs/reference/ccr/apis/auto-follow/put-auto-follow-pattern.asciidoc index e6509e024de84..f38454a1ad024 100644 --- a/docs/reference/ccr/apis/auto-follow/put-auto-follow-pattern.asciidoc +++ b/docs/reference/ccr/apis/auto-follow/put-auto-follow-pattern.asciidoc @@ -71,6 +71,14 @@ the new patterns. ``:: (Required, string) The name of the collection of auto-follow patterns. +[[ccr-put-auto-follow-pattern-query-params]] +==== {api-query-parms-title} + +`master_timeout`:: +(Optional, <>) Specifies the period of time to wait for +a connection to the master node. If no response is received before the timeout +expires, the request fails and returns an error. Defaults to `30s`. + [[ccr-put-auto-follow-pattern-request-body]] ==== {api-request-body-title} diff --git a/docs/reference/ccr/apis/auto-follow/resume-auto-follow-pattern.asciidoc b/docs/reference/ccr/apis/auto-follow/resume-auto-follow-pattern.asciidoc index c28fddb766bf2..431137a6a9c18 100644 --- a/docs/reference/ccr/apis/auto-follow/resume-auto-follow-pattern.asciidoc +++ b/docs/reference/ccr/apis/auto-follow/resume-auto-follow-pattern.asciidoc @@ -35,6 +35,13 @@ have been deleted or closed in the meantime. ``:: (Required, string) Specifies the name of the auto-follow pattern to resume. +[[ccr-resume-auto-follow-pattern-query-params]] +==== {api-query-parms-title} + +`master_timeout`:: +(Optional, <>) Specifies the period of time to wait for +a connection to the master node. If no response is received before the timeout +expires, the request fails and returns an error. Defaults to `30s`. [[ccr-resume-auto-follow-pattern-examples]] ==== {api-examples-title} diff --git a/docs/reference/ccr/apis/follow/get-follow-info.asciidoc b/docs/reference/ccr/apis/follow/get-follow-info.asciidoc index c4562b2000dbc..26bb9ac50d2e3 100644 --- a/docs/reference/ccr/apis/follow/get-follow-info.asciidoc +++ b/docs/reference/ccr/apis/follow/get-follow-info.asciidoc @@ -49,6 +49,14 @@ replication options and whether the follower indices are active or paused. ``:: (Required, string) A comma-delimited list of follower index patterns. +[[ccr-get-follow-info-query-params]] +==== {api-query-parms-title} + +`master_timeout`:: +(Optional, <>) Specifies the period of time to wait for +a connection to the master node. If no response is received before the timeout +expires, the request fails and returns an error. Defaults to `30s`. + [role="child_attributes"] [[ccr-get-follow-info-response-body]] ==== {api-response-body-title} diff --git a/docs/reference/ccr/apis/follow/get-follow-stats.asciidoc b/docs/reference/ccr/apis/follow/get-follow-stats.asciidoc index c910b0431a6ea..72224cc7f51f4 100644 --- a/docs/reference/ccr/apis/follow/get-follow-stats.asciidoc +++ b/docs/reference/ccr/apis/follow/get-follow-stats.asciidoc @@ -56,6 +56,12 @@ following tasks associated with each shard for the specified indices. ``:: (Required, string) A comma-delimited list of index patterns. +[[ccr-get-follow-stats-query-params]] +==== {api-query-parms-title} + +`timeout`:: +(Optional, time) Controls the amount of time to wait for results. Defaults to unlimited. + [role="child_attributes"] [[ccr-get-follow-stats-response-body]] ==== {api-response-body-title} diff --git a/docs/reference/ccr/apis/follow/post-forget-follower.asciidoc b/docs/reference/ccr/apis/follow/post-forget-follower.asciidoc index 7df4132da91e1..ea7e8640056bf 100644 --- a/docs/reference/ccr/apis/follow/post-forget-follower.asciidoc +++ b/docs/reference/ccr/apis/follow/post-forget-follower.asciidoc @@ -100,6 +100,12 @@ the <> is invoked. ``:: (Required, string) The name of the leader index. +[[ccr-post-forget-follower-query-params]] +==== {api-query-parms-title} + +`timeout`:: +(Optional, time) Controls the amount of time to wait for results. Defaults to unlimited. + [[ccr-post-forget-follower-request-body]] ==== {api-request-body-title} diff --git a/docs/reference/ccr/apis/follow/post-pause-follow.asciidoc b/docs/reference/ccr/apis/follow/post-pause-follow.asciidoc index 0ccdd4e1b30f7..b0a6752358cf7 100644 --- a/docs/reference/ccr/apis/follow/post-pause-follow.asciidoc +++ b/docs/reference/ccr/apis/follow/post-pause-follow.asciidoc @@ -53,6 +53,14 @@ following task. ``:: (Required, string) The name of the follower index. +[[ccr-post-pause-follow-query-params]] +==== {api-query-parms-title} + +`master_timeout`:: +(Optional, <>) Specifies the period of time to wait for +a connection to the master node. If no response is received before the timeout +expires, the request fails and returns an error. Defaults to `30s`. + [[ccr-post-pause-follow-examples]] ==== {api-examples-title} diff --git a/docs/reference/ccr/apis/follow/post-resume-follow.asciidoc b/docs/reference/ccr/apis/follow/post-resume-follow.asciidoc index 3d72fa0bff3e8..1f1996837cd8e 100644 --- a/docs/reference/ccr/apis/follow/post-resume-follow.asciidoc +++ b/docs/reference/ccr/apis/follow/post-resume-follow.asciidoc @@ -66,6 +66,14 @@ returns, the follower index will resume fetching operations from the leader inde ``:: (Required, string) The name of the follower index. +[[ccr-post-resume-follow-query-params]] +==== {api-query-parms-title} + +`master_timeout`:: +(Optional, <>) Specifies the period of time to wait for +a connection to the master node. If no response is received before the timeout +expires, the request fails and returns an error. Defaults to `30s`. + [[ccr-post-resume-follow-request-body]] ==== {api-request-body-title} include::../follow-request-body.asciidoc[tag=ccr-resume-follow-request-body] diff --git a/docs/reference/ccr/apis/follow/post-unfollow.asciidoc b/docs/reference/ccr/apis/follow/post-unfollow.asciidoc index 7bf7dba5fa8de..0851340a4da08 100644 --- a/docs/reference/ccr/apis/follow/post-unfollow.asciidoc +++ b/docs/reference/ccr/apis/follow/post-unfollow.asciidoc @@ -38,9 +38,9 @@ POST //_ccr/unfollow [[ccr-post-unfollow-prereqs]] ==== {api-prereq-title} -* If the {es} {security-features} are enabled, you must have `manage_follow_index` +* If the {es} {security-features} are enabled, you must have `manage_follow_index` index privileges for the follower index. For more information, see -<>. +<>. [[ccr-post-unfollow-desc]] ==== {api-description-title} @@ -60,6 +60,14 @@ irreversible operation. ``:: (Required, string) The name of the follower index. +[[ccr-post-unfollow-query-params]] +==== {api-query-parms-title} + +`master_timeout`:: +(Optional, <>) Specifies the period of time to wait for +a connection to the master node. If no response is received before the timeout +expires, the request fails and returns an error. Defaults to `30s`. + [[ccr-post-unfollow-examples]] ==== {api-examples-title} diff --git a/docs/reference/ccr/apis/follow/put-follow.asciidoc b/docs/reference/ccr/apis/follow/put-follow.asciidoc index 93e8a710751a8..a7fdfc5ae96c9 100644 --- a/docs/reference/ccr/apis/follow/put-follow.asciidoc +++ b/docs/reference/ccr/apis/follow/put-follow.asciidoc @@ -65,6 +65,11 @@ referenced leader index. When this API returns, the follower index exists, and follower shard requires transferring all the remote Lucene segment files to the follower index. +`master_timeout`:: +(Optional, <>) Specifies the period of time to wait for +a connection to the master node. If no response is received before the timeout +expires, the request fails and returns an error. Defaults to `30s`. + [[ccr-put-follow-request-body]] ==== {api-request-body-title} diff --git a/docs/reference/ccr/apis/get-ccr-stats.asciidoc b/docs/reference/ccr/apis/get-ccr-stats.asciidoc index 02f5cf886049d..69836aec910df 100644 --- a/docs/reference/ccr/apis/get-ccr-stats.asciidoc +++ b/docs/reference/ccr/apis/get-ccr-stats.asciidoc @@ -50,6 +50,17 @@ This API gets {ccr} stats. This API will return all stats related to {ccr}. In particular, this API returns stats about auto-following, and returns the same shard-level stats as in the <>. +[[ccr-get-stats-query-params]] +==== {api-query-parms-title} + +`timeout`:: +(Optional, time) Controls the amount of time to wait for results. Defaults to unlimited. + +`master_timeout`:: +(Optional, <>) Specifies the period of time to wait for +a connection to the master node. If no response is received before the timeout +expires, the request fails and returns an error. Defaults to `30s`. + [role="child_attributes"] [[ccr-get-stats-response-body]] ==== {api-response-body-title} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.delete_auto_follow_pattern.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.delete_auto_follow_pattern.json index 52f56ca321369..c316e9fcbd36c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.delete_auto_follow_pattern.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.delete_auto_follow_pattern.json @@ -24,6 +24,12 @@ } } ] + }, + "params":{ + "master_timeout":{ + "type":"time", + "description":"Explicit operation timeout for connection to master node" + } } } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.follow.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.follow.json index 6905a53ead480..642477cb45e48 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.follow.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.follow.json @@ -31,6 +31,10 @@ "type":"string", "description":"Sets the number of shard copies that must be active before returning. Defaults to 0. Set to `all` for all shard copies, otherwise set to any non-negative value less than or equal to the total number of copies for the shard (number of replicas + 1)", "default":"0" + }, + "master_timeout":{ + "type":"time", + "description":"Explicit operation timeout for connection to master node" } }, "body":{ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.follow_info.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.follow_info.json index 17ecd57155a94..a2af3f9be77d1 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.follow_info.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.follow_info.json @@ -24,6 +24,12 @@ } } ] + }, + "params":{ + "master_timeout":{ + "type":"time", + "description":"Explicit operation timeout for connection to master node" + } } } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.follow_stats.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.follow_stats.json index 54de8c4df64f4..75d0bafb5083e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.follow_stats.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.follow_stats.json @@ -24,6 +24,12 @@ } } ] + }, + "params":{ + "timeout":{ + "type":"time", + "description":"Explicit operation timeout" + } } } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.forget_follower.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.forget_follower.json index 8106a74b9d003..61a132a1f4ac4 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.forget_follower.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.forget_follower.json @@ -26,6 +26,12 @@ } ] }, + "params":{ + "timeout":{ + "type":"time", + "description":"Explicit operation timeout" + } + }, "body":{ "description":"the name and UUID of the follower index, the name of the cluster containing the follower index, and the alias from the perspective of that cluster for the remote cluster containing the leader index", "required":true diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.get_auto_follow_pattern.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.get_auto_follow_pattern.json index 8073fd72bba33..663f37fca3a9a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.get_auto_follow_pattern.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.get_auto_follow_pattern.json @@ -30,6 +30,12 @@ } } ] + }, + "params":{ + "master_timeout":{ + "type":"time", + "description":"Explicit operation timeout for connection to master node" + } } } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.pause_auto_follow_pattern.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.pause_auto_follow_pattern.json index 9375673489e1c..c766304fc0512 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.pause_auto_follow_pattern.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.pause_auto_follow_pattern.json @@ -24,6 +24,12 @@ } } ] + }, + "params":{ + "master_timeout":{ + "type":"time", + "description":"Explicit operation timeout for connection to master node" + } } } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.pause_follow.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.pause_follow.json index a4923df29c565..1c1fcf71d08ff 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.pause_follow.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.pause_follow.json @@ -24,6 +24,12 @@ } } ] + }, + "params":{ + "master_timeout":{ + "type":"time", + "description":"Explicit operation timeout for connection to master node" + } } } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.put_auto_follow_pattern.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.put_auto_follow_pattern.json index 6331b4ff06106..6f5f03bc9a3d2 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.put_auto_follow_pattern.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.put_auto_follow_pattern.json @@ -26,6 +26,12 @@ } ] }, + "params":{ + "master_timeout":{ + "type":"time", + "description":"Explicit operation timeout for connection to master node" + } + }, "body":{ "description":"The specification of the auto follow pattern", "required":true diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.resume_auto_follow_pattern.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.resume_auto_follow_pattern.json index b679155b28a09..a4ea108426ed8 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.resume_auto_follow_pattern.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.resume_auto_follow_pattern.json @@ -24,6 +24,12 @@ } } ] + }, + "params":{ + "master_timeout":{ + "type":"time", + "description":"Explicit operation timeout for connection to master node" + } } } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.resume_follow.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.resume_follow.json index d6addce133a69..854ff83291a8b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.resume_follow.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.resume_follow.json @@ -26,6 +26,12 @@ } ] }, + "params":{ + "master_timeout":{ + "type":"time", + "description":"Explicit operation timeout for connection to master node" + } + }, "body":{ "description":"The name of the leader index and other optional ccr related parameters", "required":false diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.stats.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.stats.json index ac47e9c3d0b7e..5ebc781109abd 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.stats.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.stats.json @@ -18,6 +18,16 @@ ] } ] + }, + "params":{ + "timeout":{ + "type":"time", + "description":"Explicit operation timeout" + }, + "master_timeout":{ + "type":"time", + "description":"Explicit operation timeout for connection to master node" + } } } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.unfollow.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.unfollow.json index 7a49b4a10987e..096d39fa16480 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.unfollow.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ccr.unfollow.json @@ -24,6 +24,12 @@ } } ] + }, + "params":{ + "master_timeout":{ + "type":"time", + "description":"Explicit operation timeout for connection to master node" + } } } } diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index b0649c9429884..e072bfc799e5d 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -160,6 +160,7 @@ static TransportVersion def(int id) { public static final TransportVersion ADD_PERSISTENT_TASK_EXCEPTIONS = def(8_619_00_0); public static final TransportVersion ESQL_REDUCER_NODE_FRAGMENT = def(8_620_00_0); public static final TransportVersion FAILURE_STORE_ROLLOVER = def(8_621_00_0); + public static final TransportVersion CCR_STATS_API_TIMEOUT_PARAM = def(8_622_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/x-pack/plugin/ccr/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/ccr/auto_follow.yml b/x-pack/plugin/ccr/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/ccr/auto_follow.yml index 5003ebfdc4d71..66f9d5df3d9d0 100644 --- a/x-pack/plugin/ccr/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/ccr/auto_follow.yml +++ b/x-pack/plugin/ccr/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/ccr/auto_follow.yml @@ -201,6 +201,7 @@ - do: ccr.put_auto_follow_pattern: name: pattern_test + master_timeout: 10s body: remote_cluster: local leader_index_patterns: ['logs-*'] @@ -224,6 +225,7 @@ - do: ccr.pause_auto_follow_pattern: name: pattern_test + master_timeout: 10s - is_true: acknowledged - do: @@ -243,6 +245,7 @@ - do: ccr.resume_auto_follow_pattern: name: pattern_test + master_timeout: 10s - is_true: acknowledged - do: diff --git a/x-pack/plugin/ccr/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/ccr/follow_and_unfollow.yml b/x-pack/plugin/ccr/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/ccr/follow_and_unfollow.yml index 78a0e67f13419..ac3ed826973b0 100644 --- a/x-pack/plugin/ccr/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/ccr/follow_and_unfollow.yml +++ b/x-pack/plugin/ccr/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/ccr/follow_and_unfollow.yml @@ -40,6 +40,7 @@ ccr.follow: index: bar wait_for_active_shards: 1 + master_timeout: 10s body: remote_cluster: local leader_index: foo diff --git a/x-pack/plugin/ccr/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/ccr/follow_info.yml b/x-pack/plugin/ccr/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/ccr/follow_info.yml index f3224482c274e..604cbd83a70d1 100644 --- a/x-pack/plugin/ccr/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/ccr/follow_info.yml +++ b/x-pack/plugin/ccr/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/ccr/follow_info.yml @@ -34,6 +34,7 @@ ccr.follow: index: bar wait_for_active_shards: 1 + master_timeout: 10s body: remote_cluster: local leader_index: foo diff --git a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/AutoFollowIT.java b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/AutoFollowIT.java index df8f19ab3888c..dc899989eb042 100644 --- a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/AutoFollowIT.java +++ b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/AutoFollowIT.java @@ -734,17 +734,26 @@ private void putAutoFollowPatterns(String name, String[] patterns, List request.setLeaderIndexExclusionPatterns(exclusionPatterns); // Need to set this, because following an index in the same cluster request.setFollowIndexNamePattern("copy-{{leader_index}}"); + if (randomBoolean()) { + request.masterNodeTimeout(randomFrom("10s", "20s", "30s")); + } assertTrue(followerClient().execute(PutAutoFollowPatternAction.INSTANCE, request).actionGet().isAcknowledged()); } private void deleteAutoFollowPattern(final String name) { DeleteAutoFollowPatternAction.Request request = new DeleteAutoFollowPatternAction.Request(name); + if (randomBoolean()) { + request.masterNodeTimeout(randomFrom("10s", "20s", "30s")); + } assertTrue(followerClient().execute(DeleteAutoFollowPatternAction.INSTANCE, request).actionGet().isAcknowledged()); } private AutoFollowStats getAutoFollowStats() { CcrStatsAction.Request request = new CcrStatsAction.Request(); + if (randomBoolean()) { + request.masterNodeTimeout(randomFrom("10s", "20s", "30s")); + } return followerClient().execute(CcrStatsAction.INSTANCE, request).actionGet().getAutoFollowStats(); } @@ -756,17 +765,26 @@ private void createLeaderIndex(String index, Settings settings) { private void pauseAutoFollowPattern(final String name) { ActivateAutoFollowPatternAction.Request request = new ActivateAutoFollowPatternAction.Request(name, false); + if (randomBoolean()) { + request.masterNodeTimeout(randomFrom("10s", "20s", "30s")); + } assertAcked(followerClient().execute(ActivateAutoFollowPatternAction.INSTANCE, request).actionGet()); } private void resumeAutoFollowPattern(final String name) { ActivateAutoFollowPatternAction.Request request = new ActivateAutoFollowPatternAction.Request(name, true); + if (randomBoolean()) { + request.masterNodeTimeout(randomFrom("10s", "20s", "30s")); + } assertAcked(followerClient().execute(ActivateAutoFollowPatternAction.INSTANCE, request).actionGet()); } private AutoFollowMetadata.AutoFollowPattern getAutoFollowPattern(final String name) { GetAutoFollowPatternAction.Request request = new GetAutoFollowPatternAction.Request(); request.setName(name); + if (randomBoolean()) { + request.masterNodeTimeout(randomFrom("10s", "20s", "30s")); + } GetAutoFollowPatternAction.Response response = followerClient().execute(GetAutoFollowPatternAction.INSTANCE, request).actionGet(); assertTrue(response.getAutoFollowPatterns().containsKey(name)); return response.getAutoFollowPatterns().get(name); diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportCcrStatsAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportCcrStatsAction.java index 6b324ae901370..b089c1a4cb84a 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportCcrStatsAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportCcrStatsAction.java @@ -80,6 +80,9 @@ protected void masterOperation( ) throws Exception { FollowStatsAction.StatsRequest statsRequest = new FollowStatsAction.StatsRequest(); statsRequest.setParentTask(clusterService.localNode().getId(), task.getId()); + if (request.getTimeout() != null) { + statsRequest.setTimeout(request.getTimeout()); + } client.execute(FollowStatsAction.INSTANCE, statsRequest, listener.delegateFailureAndWrap((l, statsResponse) -> { AutoFollowStats stats = autoFollowCoordinator.getStats(); l.onResponse(new CcrStatsAction.Response(stats, statsResponse)); diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPauseFollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPauseFollowAction.java index 99c532f3b077f..8207228f0d21c 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPauseFollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPauseFollowAction.java @@ -98,7 +98,7 @@ protected void masterOperation( final ResponseHandler responseHandler = new ResponseHandler(shardFollowTaskIds.size(), listener); for (String taskId : shardFollowTaskIds) { final int taskSlot = i++; - persistentTasksService.sendRemoveRequest(taskId, null, responseHandler.getActionListener(taskSlot)); + persistentTasksService.sendRemoveRequest(taskId, request.masterNodeTimeout(), responseHandler.getActionListener(taskSlot)); } } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java index 0eff80709ccd4..446e9abcd3e26 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java @@ -297,6 +297,7 @@ private void initiateFollowing( ResumeFollowAction.Request resumeFollowRequest = new ResumeFollowAction.Request(); resumeFollowRequest.setFollowerIndex(request.getFollowerIndex()); resumeFollowRequest.setParameters(new FollowParameters(parameters)); + resumeFollowRequest.masterNodeTimeout(request.masterNodeTimeout()); clientWithHeaders.execute( ResumeFollowAction.INSTANCE, resumeFollowRequest, diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java index 848060f102222..8fc901fee4010 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java @@ -211,7 +211,7 @@ void start( taskId, ShardFollowTask.NAME, shardFollowTask, - null, + request.masterNodeTimeout(), handler.getActionListener(shardId) ); } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportUnfollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportUnfollowAction.java index edcfc0d8d77d6..5e7204c7fb972 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportUnfollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportUnfollowAction.java @@ -92,7 +92,7 @@ protected void masterOperation( final ClusterState state, final ActionListener listener ) { - submitUnbatchedTask("unfollow_action", new ClusterStateUpdateTask() { + submitUnbatchedTask("unfollow_action", new ClusterStateUpdateTask(request.masterNodeTimeout()) { @Override public ClusterState execute(final ClusterState current) { diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestCcrStatsAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestCcrStatsAction.java index e394f708b07f5..139c6e04c32d4 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestCcrStatsAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestCcrStatsAction.java @@ -38,6 +38,10 @@ public String getName() { @Override protected RestChannelConsumer prepareRequest(final RestRequest restRequest, final NodeClient client) { final CcrStatsAction.Request request = new CcrStatsAction.Request(); + if (restRequest.hasParam("timeout")) { + request.setTimeout(restRequest.paramAsTime("timeout", null)); + } + request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout())); return channel -> client.execute( CcrStatsAction.INSTANCE, request, diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestDeleteAutoFollowPatternAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestDeleteAutoFollowPatternAction.java index a931a498b1bba..9e94e27f55811 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestDeleteAutoFollowPatternAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestDeleteAutoFollowPatternAction.java @@ -32,6 +32,7 @@ public String getName() { @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { Request request = new Request(restRequest.param("name")); + request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout())); return channel -> client.execute(INSTANCE, request, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestFollowInfoAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestFollowInfoAction.java index 4b3ac9f605d3f..86be9487417f8 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestFollowInfoAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestFollowInfoAction.java @@ -33,6 +33,7 @@ public String getName() { @Override protected RestChannelConsumer prepareRequest(final RestRequest restRequest, final NodeClient client) { final FollowInfoAction.Request request = new FollowInfoAction.Request(); + request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout())); request.setFollowerIndices(Strings.splitStringByCommaToArray(restRequest.param("index"))); return channel -> client.execute(FollowInfoAction.INSTANCE, request, new RestRefCountedChunkedToXContentListener<>(channel)); } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestFollowStatsAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestFollowStatsAction.java index 7592db0480b92..e7e4d34a82425 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestFollowStatsAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestFollowStatsAction.java @@ -40,6 +40,9 @@ public String getName() { protected RestChannelConsumer prepareRequest(final RestRequest restRequest, final NodeClient client) { final FollowStatsAction.StatsRequest request = new FollowStatsAction.StatsRequest(); request.setIndices(Strings.splitStringByCommaToArray(restRequest.param("index"))); + if (restRequest.hasParam("timeout")) { + request.setTimeout(restRequest.param("timeout")); + } return channel -> client.execute( FollowStatsAction.INSTANCE, request, diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestForgetFollowerAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestForgetFollowerAction.java index caa1b08203fbc..fc99b42680825 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestForgetFollowerAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestForgetFollowerAction.java @@ -40,7 +40,11 @@ protected RestChannelConsumer prepareRequest(final RestRequest restRequest, fina private static Request createRequest(final RestRequest restRequest, final String leaderIndex) throws IOException { try (XContentParser parser = restRequest.contentOrSourceParamParser()) { - return Request.fromXContent(parser, leaderIndex); + Request request = Request.fromXContent(parser, leaderIndex); + if (restRequest.hasParam("timeout")) { + request.timeout(restRequest.paramAsTime("timeout", null)); + } + return request; } } } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestGetAutoFollowPatternAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestGetAutoFollowPatternAction.java index ba59f00e20537..519888a93afdf 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestGetAutoFollowPatternAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestGetAutoFollowPatternAction.java @@ -33,6 +33,7 @@ public String getName() { protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { Request request = new Request(); request.setName(restRequest.param("name")); + request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout())); return channel -> client.execute(INSTANCE, request, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPauseAutoFollowPatternAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPauseAutoFollowPatternAction.java index 152a6d091d85f..6846d96a2f015 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPauseAutoFollowPatternAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPauseAutoFollowPatternAction.java @@ -32,6 +32,7 @@ public String getName() { @Override protected RestChannelConsumer prepareRequest(final RestRequest restRequest, final NodeClient client) { Request request = new Request(restRequest.param("name"), false); + request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout())); return channel -> client.execute(INSTANCE, request, new RestToXContentListener<>(channel)); } } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPauseFollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPauseFollowAction.java index 12a208e82a842..b08fbb039cbc3 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPauseFollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPauseFollowAction.java @@ -32,6 +32,7 @@ public String getName() { @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { Request request = new Request(restRequest.param("index")); + request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout())); return channel -> client.execute(INSTANCE, request, new RestToXContentListener<>(channel)); } } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPutAutoFollowPatternAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPutAutoFollowPatternAction.java index 9d536966a3ef8..dd432411014ab 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPutAutoFollowPatternAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPutAutoFollowPatternAction.java @@ -39,7 +39,9 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient private static Request createRequest(RestRequest restRequest) throws IOException { try (XContentParser parser = restRequest.contentOrSourceParamParser()) { - return Request.fromXContent(parser, restRequest.param("name")); + Request request = Request.fromXContent(parser, restRequest.param("name")); + request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout())); + return request; } } } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPutFollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPutFollowAction.java index c2bd39286711a..5ceef134090a4 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPutFollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPutFollowAction.java @@ -40,8 +40,11 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient private static Request createRequest(RestRequest restRequest) throws IOException { try (XContentParser parser = restRequest.contentOrSourceParamParser()) { - ActiveShardCount waitForActiveShards = ActiveShardCount.parseString(restRequest.param("wait_for_active_shards")); - return Request.fromXContent(parser, restRequest.param("index"), waitForActiveShards); + final Request request = Request.fromXContent(parser); + request.waitForActiveShards(ActiveShardCount.parseString(restRequest.param("wait_for_active_shards"))); + request.setFollowerIndex(restRequest.param("index")); + request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout())); + return request; } } } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestResumeAutoFollowPatternAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestResumeAutoFollowPatternAction.java index 9602ca339e295..a6ccf9fe1fa0b 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestResumeAutoFollowPatternAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestResumeAutoFollowPatternAction.java @@ -32,6 +32,7 @@ public String getName() { @Override protected RestChannelConsumer prepareRequest(final RestRequest restRequest, final NodeClient client) { Request request = new Request(restRequest.param("name"), true); + request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout())); return channel -> client.execute(INSTANCE, request, new RestToXContentListener<>(channel)); } } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestResumeFollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestResumeFollowAction.java index 66659d19c74d9..d3994c487e456 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestResumeFollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestResumeFollowAction.java @@ -38,14 +38,16 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient } static Request createRequest(RestRequest restRequest) throws IOException { + Request request; if (restRequest.hasContentOrSourceParam()) { try (XContentParser parser = restRequest.contentOrSourceParamParser()) { - return Request.fromXContent(parser, restRequest.param("index")); + request = Request.fromXContent(parser, restRequest.param("index")); } } else { - Request request = new Request(); + request = new Request(); request.setFollowerIndex(restRequest.param("index")); - return request; } + request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout())); + return request; } } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestUnfollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestUnfollowAction.java index 2fb0303f69cf3..5936fc05cb449 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestUnfollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestUnfollowAction.java @@ -33,6 +33,7 @@ public String getName() { @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { UnfollowAction.Request request = new UnfollowAction.Request(restRequest.param("index")); + request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout())); return channel -> client.execute(INSTANCE, request, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java index 4ce64bc41d6a1..4f453a2ad66f4 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java @@ -594,6 +594,9 @@ public static PutFollowAction.Request putFollow(String leaderIndex, String follo request.getParameters().setMaxReadRequestSize(ByteSizeValue.ofBytes(between(1, 32 * 1024 * 1024))); request.getParameters().setMaxReadRequestOperationCount(between(1, 10000)); request.waitForActiveShards(waitForActiveShards); + if (randomBoolean()) { + request.masterNodeTimeout(randomFrom("10s", "20s", "30s")); + } return request; } @@ -602,6 +605,9 @@ public static ResumeFollowAction.Request resumeFollow(String followerIndex) { request.setFollowerIndex(followerIndex); request.getParameters().setMaxRetryDelay(TimeValue.timeValueMillis(10)); request.getParameters().setReadPollTimeout(TimeValue.timeValueMillis(10)); + if (randomBoolean()) { + request.masterNodeTimeout(randomFrom("10s", "20s", "30s")); + } return request; } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrSingleNodeTestCase.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrSingleNodeTestCase.java index 9b6d81a206f71..da3f29fcef8d3 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrSingleNodeTestCase.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrSingleNodeTestCase.java @@ -103,6 +103,9 @@ protected ResumeFollowAction.Request getResumeFollowRequest(String followerIndex request.setFollowerIndex(followerIndex); request.getParameters().setMaxRetryDelay(TimeValue.timeValueMillis(1)); request.getParameters().setReadPollTimeout(TimeValue.timeValueMillis(1)); + if (randomBoolean()) { + request.masterNodeTimeout(randomFrom("10s", "20s", "30s")); + } return request; } @@ -114,6 +117,9 @@ protected PutFollowAction.Request getPutFollowRequest(String leaderIndex, String request.getParameters().setMaxRetryDelay(TimeValue.timeValueMillis(1)); request.getParameters().setReadPollTimeout(TimeValue.timeValueMillis(1)); request.waitForActiveShards(ActiveShardCount.ONE); + if (randomBoolean()) { + request.masterNodeTimeout(randomFrom("10s", "20s", "30s")); + } return request; } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/PutFollowActionRequestTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/PutFollowActionRequestTests.java index 27aaf1dea7db6..171727c3e0bc8 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/PutFollowActionRequestTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/PutFollowActionRequestTests.java @@ -61,7 +61,10 @@ protected PutFollowAction.Request createXContextTestInstance(XContentType xConte @Override protected PutFollowAction.Request doParseInstance(XContentParser parser) throws IOException { - return PutFollowAction.Request.fromXContent(parser, "followerIndex", ActiveShardCount.DEFAULT); + PutFollowAction.Request request = PutFollowAction.Request.fromXContent(parser); + request.waitForActiveShards(ActiveShardCount.DEFAULT); + request.setFollowerIndex("followerIndex"); + return request; } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/CcrStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/CcrStatsAction.java index b6c078134634b..a9954005b4486 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/CcrStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/CcrStatsAction.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.ccr.action; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; @@ -16,6 +17,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xpack.core.ccr.AutoFollowStats; @@ -34,8 +36,13 @@ private CcrStatsAction() { public static class Request extends MasterNodeRequest { + private TimeValue timeout; + public Request(StreamInput in) throws IOException { super(in); + if (in.getTransportVersion().onOrAfter(TransportVersions.CCR_STATS_API_TIMEOUT_PARAM)) { + timeout = in.readOptionalTimeValue(); + } } public Request() {} @@ -48,6 +55,43 @@ public ActionRequestValidationException validate() { @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); + if (out.getTransportVersion().onOrAfter(TransportVersions.CCR_STATS_API_TIMEOUT_PARAM)) { + out.writeOptionalTimeValue(timeout); + } + } + + public TimeValue getTimeout() { + return this.timeout; + } + + public void setTimeout(TimeValue timeout) { + this.timeout = timeout; + } + + public void setTimeout(String timeout) { + this.timeout = TimeValue.parseTimeValue(timeout, null, getClass().getSimpleName() + ".timeout"); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Request that = (Request) o; + return Objects.equals(this.timeout, that.timeout) && Objects.equals(this.masterNodeTimeout, that.masterNodeTimeout); + } + + @Override + public int hashCode() { + return Objects.hash(this.timeout, this.masterNodeTimeout); + } + + @Override + public String toString() { + return "CcrStatsAction.Request[timeout=" + timeout + ", masterNodeTimeout=" + masterNodeTimeout + "]"; } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java index c2e1048541a47..6570fb66a2755 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java @@ -64,13 +64,10 @@ public static final class Request extends AcknowledgedRequest implement FollowParameters.initParser(PARSER); } - public static Request fromXContent(final XContentParser parser, final String followerIndex, ActiveShardCount waitForActiveShards) - throws IOException { + public static Request fromXContent(final XContentParser parser) throws IOException { PutFollowParameters parameters = PARSER.parse(parser, null); Request request = new Request(); - request.waitForActiveShards(waitForActiveShards); - request.setFollowerIndex(followerIndex); request.setRemoteCluster(parameters.remoteCluster); request.setLeaderIndex(parameters.leaderIndex); request.setDataStreamName(parameters.dataStreamName); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ccr/action/CcrStatsActionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ccr/action/CcrStatsActionTests.java new file mode 100644 index 0000000000000..e40954b361ef6 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ccr/action/CcrStatsActionTests.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.ccr.action; + +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.test.TransportVersionUtils; + +import java.io.IOException; + +public class CcrStatsActionTests extends AbstractWireSerializingTestCase { + + @Override + protected Writeable.Reader instanceReader() { + return CcrStatsAction.Request::new; + } + + @Override + protected CcrStatsAction.Request createTestInstance() { + var request = new CcrStatsAction.Request(); + request.setTimeout(randomFrom("1s", "5s", "10s", "15s")); + request.masterNodeTimeout(randomFrom("1s", "5s", "10s", "15s")); + return request; + } + + @Override + protected CcrStatsAction.Request mutateInstance(CcrStatsAction.Request instance) throws IOException { + return switch (randomInt(1)) { + case 0 -> { + var mutatedInstance = new CcrStatsAction.Request(); + mutatedInstance.setTimeout(instance.getTimeout()); + mutatedInstance.masterNodeTimeout(randomFrom("20s", "25s", "30s")); + yield mutatedInstance; + } + case 1 -> { + var mutatedInstance = new CcrStatsAction.Request(); + mutatedInstance.setTimeout(randomFrom("20s", "25s", "30s")); + mutatedInstance.masterNodeTimeout(instance.masterNodeTimeout()); + yield mutatedInstance; + } + default -> throw new RuntimeException("Cannot happen"); + }; + } + + public void testSerializationBwc() throws IOException { + // In previous version `timeout` is not set + var request = new CcrStatsAction.Request(); + if (randomBoolean()) { + request.masterNodeTimeout(randomFrom("20s", "25s", "30s")); + } + assertSerialization(request, TransportVersionUtils.getPreviousVersion(TransportVersions.CCR_STATS_API_TIMEOUT_PARAM)); + assertSerialization(request, TransportVersions.MINIMUM_CCS_VERSION); + } +} From e39fd58c5d7fc7d96514f985e4efdff8a478023b Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 2 Apr 2024 13:40:33 -0700 Subject: [PATCH 069/264] Fix jvm ergonomics tests (#106969) closes #106554 --- .../server/cli/JvmErgonomicsTests.java | 34 +++++++++++-------- 1 file changed, 20 insertions(+), 14 deletions(-) diff --git a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/JvmErgonomicsTests.java b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/JvmErgonomicsTests.java index 0485697eb128c..a8a2ddbe14aab 100644 --- a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/JvmErgonomicsTests.java +++ b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/JvmErgonomicsTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.server.cli; -import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.tests.util.LuceneTestCase.SuppressFileSystems; import org.elasticsearch.cluster.node.DiscoveryNodeRole; import org.elasticsearch.common.settings.Settings; @@ -18,11 +17,13 @@ import org.elasticsearch.test.ESTestCase.WithoutSecurityManager; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.stream.IntStream; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsString; @@ -41,7 +42,6 @@ @WithoutSecurityManager @SuppressFileSystems("*") -@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106554") public class JvmErgonomicsTests extends ESTestCase { public void testExtractValidHeapSizeUsingXmx() throws Exception { @@ -193,7 +193,10 @@ public void testConcGCThreadsNotSetBasedOnProcessors() throws Exception { Settings.Builder nodeSettingsBuilder = Settings.builder() .put(NodeRoleSettings.NODE_ROLES_SETTING.getKey(), DiscoveryNodeRole.SEARCH_ROLE.roleName()); if (randomBoolean()) { - nodeSettingsBuilder.put(EsExecutors.NODE_PROCESSORS_SETTING.getKey(), randomBoolean() ? between(1, 3) : between(6, 100)); + int maxProcessors = Runtime.getRuntime().availableProcessors(); + List possibleProcessors = new ArrayList<>(); + IntStream.range(1, maxProcessors + 1).filter(i -> i < 4 || i > 5).forEach(possibleProcessors::add); + nodeSettingsBuilder.put(EsExecutors.NODE_PROCESSORS_SETTING.getKey(), randomFrom(possibleProcessors)); } assertThat(JvmErgonomics.choose(List.of(), nodeSettingsBuilder.build()), everyItem(not(startsWith("-XX:ConcGCThreads=")))); } @@ -201,10 +204,10 @@ public void testConcGCThreadsNotSetBasedOnProcessors() throws Exception { public void testConcGCThreadsNotSetBasedOnRoles() throws Exception { Settings.Builder nodeSettingsBuilder = Settings.builder().put(EsExecutors.NODE_PROCESSORS_SETTING.getKey(), between(4, 5)); if (randomBoolean()) { - nodeSettingsBuilder.put( - NodeRoleSettings.NODE_ROLES_SETTING.getKey(), - randomValueOtherThan(DiscoveryNodeRole.SEARCH_ROLE, () -> randomFrom(DiscoveryNodeRole.roles())).roleName() - ); + List possibleRoles = new ArrayList<>(DiscoveryNodeRole.roles()); + possibleRoles.remove(DiscoveryNodeRole.SEARCH_ROLE); + possibleRoles.remove(DiscoveryNodeRole.VOTING_ONLY_NODE_ROLE); + nodeSettingsBuilder.put(NodeRoleSettings.NODE_ROLES_SETTING.getKey(), randomFrom(possibleRoles).roleName()); } assertThat(JvmErgonomics.choose(List.of(), nodeSettingsBuilder.build()), everyItem(not(startsWith("-XX:ConcGCThreads=")))); @@ -228,14 +231,17 @@ public void testMinimumNewSizeNotSetBasedOnHeap() throws Exception { } public void testMinimumNewSizeNotSetBasedOnRoles() throws Exception { - Settings nodeSettings = randomBoolean() - ? Settings.EMPTY - : Settings.builder() - .put( - NodeRoleSettings.NODE_ROLES_SETTING.getKey(), - randomValueOtherThan(DiscoveryNodeRole.SEARCH_ROLE, () -> randomFrom(DiscoveryNodeRole.roles())).roleName() - ) + Settings nodeSettings; + if (randomBoolean()) { + nodeSettings = Settings.EMPTY; + } else { + List possibleRoles = new ArrayList<>(DiscoveryNodeRole.roles()); + possibleRoles.remove(DiscoveryNodeRole.SEARCH_ROLE); + possibleRoles.remove(DiscoveryNodeRole.VOTING_ONLY_NODE_ROLE); + nodeSettings = Settings.builder() + .put(NodeRoleSettings.NODE_ROLES_SETTING.getKey(), randomFrom(possibleRoles).roleName()) .build(); + } List chosen = JvmErgonomics.choose(List.of("-Xmx" + between(1, 4) + "g"), nodeSettings); assertThat(chosen, everyItem(not(is("-XX:+UnlockExperimentalVMOptions")))); assertThat(chosen, everyItem(not(startsWith("-XX:G1NewSizePercent=")))); From e6ccaf4aaba71a63611926ab12f9efb90b7fff35 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 2 Apr 2024 20:42:14 -0400 Subject: [PATCH 070/264] ESQL: Split the tests for META FUNCTIONS (#106954) This splits the tests for META FUNCTIONS into a few tests that fit better on the screen. There's an interesting bug in the CSV parser where if you end the last line with an empty string it'll consider it `null` which we think of as `null`. I bumped into that and it's caused me some trouble. I had to work around it here. Thus the assertion changes. Basically `[]` will force us to parse a list. So we can use `[""]` - a list of just the empty string - to force the testing framework to spit out an empty string. We only need this because the empty string is the last item in the list.... --- .../elasticsearch/xpack/esql/CsvAssert.java | 7 +- .../xpack/esql/CsvTestUtils.java | 30 +- .../src/main/resources/docs.csv-spec | 4 +- .../src/main/resources/meta.csv-spec | 432 +++++++++++++----- 4 files changed, 345 insertions(+), 128 deletions(-) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvAssert.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvAssert.java index 0b7e39608b9aa..112a5777073ee 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvAssert.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvAssert.java @@ -79,12 +79,7 @@ private static void assertMetadata( var expectedTypes = expected.columnTypes(); assertThat( - format( - null, - "Different number of columns returned; expected [{}] but actual was [{}]", - expectedNames.size(), - actualNames.size() - ), + format(null, "Different number of columns returned; expected {} but actual was {}", expectedNames, actualNames), actualNames, Matchers.hasSize(expectedNames.size()) ); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java index b495a6f1a6479..cb1a878bf333d 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java @@ -353,27 +353,33 @@ public static ExpectedResults loadCsvSpecValues(String csv) { List rowValues = new ArrayList<>(row.size()); for (int i = 0; i < row.size(); i++) { String value = row.get(i); - if (value == null || value.trim().equalsIgnoreCase(NULL_VALUE)) { + if (value == null) { rowValues.add(null); continue; } value = value.trim(); - if (value.startsWith("[") ^ value.endsWith("]")) { - throw new IllegalArgumentException("Incomplete multi-value (opening and closing square brackets) found " + value); + if (value.equalsIgnoreCase(NULL_VALUE)) { + rowValues.add(null); + continue; } - if (value.contains(",") && value.startsWith("[")) { + if (value.startsWith("[")) { + if (false == value.endsWith("]")) { + throw new IllegalArgumentException( + "Incomplete multi-value (opening and closing square brackets) found " + value + " on row " + values.size() + ); + } // split on commas but ignoring escaped commas String[] multiValues = value.substring(1, value.length() - 1).split(COMMA_ESCAPING_REGEX); - if (multiValues.length > 0) { - List listOfMvValues = new ArrayList<>(); - for (String mvValue : multiValues) { - listOfMvValues.add(columnTypes.get(i).convert(mvValue.trim().replace(ESCAPED_COMMA_SEQUENCE, ","))); - } - rowValues.add(listOfMvValues); - } else { - rowValues.add(columnTypes.get(i).convert(value.replace(ESCAPED_COMMA_SEQUENCE, ","))); + if (multiValues.length == 1) { + rowValues.add(columnTypes.get(i).convert(multiValues[0].replace(ESCAPED_COMMA_SEQUENCE, ","))); + continue; + } + List listOfMvValues = new ArrayList<>(); + for (String mvValue : multiValues) { + listOfMvValues.add(columnTypes.get(i).convert(mvValue.trim().replace(ESCAPED_COMMA_SEQUENCE, ","))); } + rowValues.add(listOfMvValues); } else { // The value considered here is the one where any potential escaped comma is kept as is (with the escape char) // TODO if we'd want escaped commas outside multi-values fields, we'd have to adjust this value here as well diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec index 71bb1e9f28723..d34620a9e118d 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec @@ -486,8 +486,8 @@ ROW message="[1998-08-10T17:15:42] [WARN]" ; // tag::dissectEmptyRightPaddingModifier-result[] -message:keyword | ts:keyword | level:keyword -[1998-08-10T17:15:42] [WARN]|1998-08-10T17:15:42 |WARN + message:keyword | ts:keyword |level:keyword +["[1998-08-10T17:15:42] [WARN]"]|1998-08-10T17:15:42 |WARN // end::dissectEmptyRightPaddingModifier-result[] ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec index 392d6f036111a..c72feaccfc622 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -1,110 +1,3 @@ -# TODO: switch this test to ``&format=csv&delimiter=|` output -metaFunctions#[skip:-8.13.99] -meta functions; - - name:keyword | synopsis:keyword | argNames:keyword | argTypes:keyword | argDescriptions:keyword |returnType:keyword | description:keyword | optionalArgs:boolean | variadic:boolean | isAggregation:boolean -abs |"double|integer|long|unsigned_long abs(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "Numeric expression. If `null`, the function returns `null`." |"double|integer|long|unsigned_long" | "Returns the absolute value." | false | false | false -acos |"double acos(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" |"Number between -1 and 1. If `null`, the function returns `null`." |double |"Returns the {wikipedia}/Inverse_trigonometric_functions[arccosine] of `n` as an angle, expressed in radians." | false | false | false -asin |"double asin(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" |"Number between -1 and 1. If `null`, the function returns `null`." |double |"Returns the {wikipedia}/Inverse_trigonometric_functions[arcsine] of the input numeric expression as an angle, expressed in radians." | false | false | false -atan |"double atan(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" |"Numeric expression. If `null`, the function returns `null`." |double |"Returns the {wikipedia}/Inverse_trigonometric_functions[arctangent] of the input numeric expression as an angle, expressed in radians." | false | false | false -atan2 |"double atan2(y_coordinate:double|integer|long|unsigned_long, x_coordinate:double|integer|long|unsigned_long)" |[y_coordinate, x_coordinate] |["double|integer|long|unsigned_long", "double|integer|long|unsigned_long"] |["y coordinate. If `null`\, the function returns `null`.", "x coordinate. If `null`\, the function returns `null`."] |double | "The {wikipedia}/Atan2[angle] between the positive x-axis and the ray from the origin to the point (x , y) in the Cartesian plane, expressed in radians." | [false, false] | false | false -auto_bucket |"double|date auto_bucket(field:integer|long|double|date, buckets:integer, from:integer|long|double|date|string, to:integer|long|double|date|string)" |[field, buckets, from, to] |["integer|long|double|date", "integer", "integer|long|double|date|string", "integer|long|double|date|string"] |["", "", "", ""] | "double|date" | "Creates human-friendly buckets and returns a datetime value for each row that corresponds to the resulting bucket the row falls into." | [false, false, false, false] | false | false -avg |"double avg(number:double|integer|long)" |number |"double|integer|long" | "" |double | "The average of a numeric field." | false | false | true -case |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version case(condition:boolean, trueValue...:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" |[condition, trueValue] |["boolean", "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version"] |["", ""] |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" | "Accepts pairs of conditions and values. The function returns the value that belongs to the first condition that evaluates to true." | [false, false] | true | false -ceil |"double|integer|long|unsigned_long ceil(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "Numeric expression. If `null`, the function returns `null`." | "double|integer|long|unsigned_long" | "Round a number up to the nearest integer." | false | false | false -cidr_match |"boolean cidr_match(ip:ip, blockX...:keyword|text)" |[ip, blockX] |[ip, "keyword|text"] |["", "CIDR block to test the IP against."] |boolean | "Returns true if the provided IP is contained in one of the provided CIDR blocks." | [false, false] | true | false -coalesce |"boolean|text|integer|keyword|long coalesce(first:boolean|text|integer|keyword|long, ?rest...:boolean|text|integer|keyword|long)" |first | "boolean|text|integer|keyword|long" | "Expression to evaluate" |"boolean|text|integer|keyword|long" | "Returns the first of its arguments that is not null. If all arguments are null, it returns `null`." | false | true | false -concat |"keyword concat(string1:keyword|text, string2...:keyword|text)" |[string1, string2] |["keyword|text", "keyword|text"] |["", ""] |keyword | "Concatenates two or more strings." | [false, false] | true | false -cos |"double cos(angle:double|integer|long|unsigned_long)" |angle |"double|integer|long|unsigned_long" | "An angle, in radians. If `null`, the function returns `null`." |double | "Returns the {wikipedia}/Sine_and_cosine[cosine] of an angle." | false | false | false -cosh |"double cosh(angle:double|integer|long|unsigned_long)" |angle |"double|integer|long|unsigned_long" | "An angle, in radians. If `null`, the function returns `null`." |double | "Returns the {wikipedia}/Hyperbolic_functions[hyperbolic cosine] of an angle." | false | false | false -count |"long count(?field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" |field |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" | "Column or literal for which to count the number of values." |long | "Returns the total number (count) of input values." | true | false | true -count_distinct |"long count_distinct(field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|version, ?precision:integer)" |[field, precision] |["boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|version, integer"] |["Column or literal for which to count the number of distinct values.", ""] |long | "Returns the approximate number of distinct values." | [false, true] | false | true -date_diff |"integer date_diff(unit:keyword|text, startTimestamp:date, endTimestamp:date)"|[unit, startTimestamp, endTimestamp] |["keyword|text", "date", "date"] |["A valid date unit", "A string representing a start timestamp", "A string representing an end timestamp"] |integer | "Subtract 2 dates and return their difference in multiples of a unit specified in the 1st argument" | [false, false, false] | false | false -date_extract |"long date_extract(datePart:keyword|text, date:date)" |[datePart, date] |["keyword|text", date] |["Part of the date to extract. Can be: aligned_day_of_week_in_month; aligned_day_of_week_in_year; aligned_week_of_month; aligned_week_of_year; ampm_of_day; clock_hour_of_ampm; clock_hour_of_day; day_of_month; day_of_week; day_of_year; epoch_day; era; hour_of_ampm; hour_of_day; instant_seconds; micro_of_day; micro_of_second; milli_of_day; milli_of_second; minute_of_day; minute_of_hour; month_of_year; nano_of_day; nano_of_second; offset_seconds; proleptic_month; second_of_day; second_of_minute; year; or year_of_era.", "Date expression"] |long | "Extracts parts of a date, like year, month, day, hour." | [false, false] | false | false -date_format |"keyword date_format(?dateFormat:keyword|text, date:date)" |[dateFormat, date] |["keyword|text", date] |["A valid date pattern", "Date expression"] |keyword | "Returns a string representation of a date, in the provided format." | [true, false] | false | false -date_parse |"date date_parse(?datePattern:keyword|text, dateString:keyword|text)"|[datePattern, dateString]|["keyword|text", "keyword|text"]|["A valid date pattern", "A string representing a date"]|date |Parses a string into a date value | [true, false] | false | false -date_trunc |"date date_trunc(interval:keyword, date:date)" |[interval, date] |["keyword", date] |["Interval; expressed using the timespan literal syntax.", "Date expression"] |date | "Rounds down a date to the closest interval." | [false, false] | false | false -e |double e() | null | null | null |double | "Euler’s number." | null | false | false -ends_with |"boolean ends_with(str:keyword|text, suffix:keyword|text)" |[str, suffix] |["keyword|text", "keyword|text"] |["", ""] |boolean | "Returns a boolean that indicates whether a keyword string ends with another string" | [false, false] | false | false -floor |"double|integer|long|unsigned_long floor(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "Round a number down to the nearest integer." | false | false | false -greatest |"integer|long|double|boolean|keyword|text|ip|version greatest(first:integer|long|double|boolean|keyword|text|ip|version, ?rest...:integer|long|double|boolean|keyword|text|ip|version)" | first |"integer|long|double|boolean|keyword|text|ip|version" |"" |"integer|long|double|boolean|keyword|text|ip|version" | "Returns the maximum value from many columns." | false | true | false -least |"integer|long|double|boolean|keyword|text|ip|version least(first:integer|long|double|boolean|keyword|text|ip|version, ?rest...:integer|long|double|boolean|keyword|text|ip|version)" | first |"integer|long|double|boolean|keyword|text|ip|version" |"" |"integer|long|double|boolean|keyword|text|ip|version" | "Returns the minimum value from many columns." | false | true | false -left |"keyword left(string:keyword|text, length:integer)" |[string, length] |["keyword|text", "integer"] |["The string from which to return a substring.", "The number of characters to return."] |keyword | "Returns the substring that extracts 'length' chars from 'string' starting from the left." | [false, false] | false | false -length |"integer length(string:keyword|text)" |string |"keyword|text" | "" |integer | "Returns the character length of a string." | false | false | false -log |"double log(?base:integer|unsigned_long|long|double, number:integer|unsigned_long|long|double)" |[base, number] |["integer|unsigned_long|long|double", "integer|unsigned_long|long|double"]| ["", ""] |double | "Returns the logarithm of a number to a base." | [true, false] | false | false -log10 |"double log10(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "" |double | "Returns the log base 10." | false | false | false -ltrim |"keyword|text ltrim(string:keyword|text)" |string |"keyword|text" | "" |"keyword|text" |Removes leading whitespaces from a string.| false | false | false -max |"double|integer|long max(number:double|integer|long)" |number |"double|integer|long" | "" |"double|integer|long" | "The maximum value of a numeric field." | false | false | true -median |"double|integer|long median(number:double|integer|long)" |number |"double|integer|long" | "" |"double|integer|long" | "The value that is greater than half of all values and less than half of all values." | false | false | true -median_absolute_deviation|"double|integer|long median_absolute_deviation(number:double|integer|long)" |number |"double|integer|long" | "" |"double|integer|long" | "The median absolute deviation, a measure of variability." | false | false | true -min |"double|integer|long min(number:double|integer|long)" |number |"double|integer|long" | "" |"double|integer|long" | "The minimum value of a numeric field." | false | false | true -mv_avg |"double mv_avg(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "" |double | "Converts a multivalued field into a single valued field containing the average of all of the values." | false | false | false -mv_concat |"keyword mv_concat(string:text|keyword, delim:text|keyword)" |[string, delim] |["text|keyword", "text|keyword"] |["values to join", "delimiter"] |keyword | "Reduce a multivalued string field to a single valued field by concatenating all values." | [false, false] | false | false -mv_count |"integer mv_count(field:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" |field | "boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | "" | integer | "Reduce a multivalued field to a single valued field containing the count of values." | false | false | false -mv_dedupe |"boolean|date|double|integer|ip|keyword|long|text|version mv_dedupe(field:boolean|date|double|integer|ip|keyword|long|text|version)" |field | "boolean|date|double|integer|ip|keyword|long|text|version" | "" |"boolean|date|double|integer|ip|keyword|long|text|version" | "Remove duplicate values from a multivalued field." | false | false | false -mv_first |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version mv_first(field:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" |field | "boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | "" |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | "Reduce a multivalued field to a single valued field containing the first value." | false | false | false -mv_last |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version mv_last(field:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" |field | "boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | "" |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | "Reduce a multivalued field to a single valued field containing the last value." | false | false | false -mv_max |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version mv_max(field:boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version)" |field | "boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" | "" |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" | "Reduce a multivalued field to a single valued field containing the maximum value." | false | false | false -mv_median |"double|integer|long|unsigned_long mv_median(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "Converts a multivalued field into a single valued field containing the median value." | false | false | false -mv_min |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version mv_min(field:boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version)" |field | "boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" | "" |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" | "Reduce a multivalued field to a single valued field containing the minimum value." | false | false | false -mv_slice |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version mv_slice(field:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version, start:integer, ?end:integer)" |[field, start, end] | "[boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version, integer, integer]" | "[A multivalued field, start index, end index (included)]" |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version" | "Returns a subset of the multivalued field using the start and end index values." | [false, false, true] | false | false -mv_sort |"boolean|date|double|integer|ip|keyword|long|text|version mv_sort(field:boolean|date|double|integer|ip|keyword|long|text|version, ?order:keyword)" | [field, order] | ["boolean|date|double|integer|ip|keyword|long|text|version", "keyword"] | ["A multivalued field", "sort order"] |"boolean|date|double|integer|ip|keyword|long|text|version" | "Sorts a multivalued field in lexicographical order." | [false, true] | false | false -mv_sum |"double|integer|long|unsigned_long mv_sum(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "Converts a multivalued field into a single valued field containing the sum of all of the values." | false | false | false -mv_zip |"keyword mv_zip(string1:keyword|text, string2:keyword|text, ?delim:keyword|text)" |[string1, string2, delim] | ["keyword|text", "keyword|text", "keyword|text"] | [A multivalued field, A multivalued field, delimiter] | "keyword" | "Combines the values from two multivalued fields with a delimiter that joins them together." | [false, false, true] | false | false -now |date now() | null |null | null |date | "Returns current date and time." | null | false | false -percentile |"double|integer|long percentile(number:double|integer|long, percentile:double|integer|long)" |[number, percentile] |["double|integer|long, double|integer|long"] |["", ""] |"double|integer|long" | "The value at which a certain percentage of observed values occur." | [false, false] | false | true -pi |double pi() | null | null | null |double | "The ratio of a circle’s circumference to its diameter." | null | false | false -pow |"double pow(base:double|integer|long|unsigned_long, exponent:double|integer|long|unsigned_long)" |[base, exponent] |["double|integer|long|unsigned_long", "double|integer|long|unsigned_long"] |["", ""] |double | "Returns the value of a base raised to the power of an exponent." | [false, false] | false | false -replace |"keyword replace(string:keyword|text, regex:keyword|text, newString:keyword|text)" | [string, regex, newString] | ["keyword|text", "keyword|text", "keyword|text"] |["", "", ""] |keyword | "The function substitutes in the string any match of the regular expression with the replacement string." | [false, false, false]| false | false -right |"keyword right(string:keyword|text, length:integer)" |[string, length] |["keyword|text", "integer"] |["", ""] |keyword | "Return the substring that extracts length chars from the string starting from the right." | [false, false] | false | false -round |"double round(number:double, ?decimals:integer)" |[number, decimals] |["double", "integer"] |["The numeric value to round", "The number of decimal places to round to. Defaults to 0."] |double | "Rounds a number to the closest number with the specified number of digits." | [false, true] | false | false -rtrim |"keyword|text rtrim(string:keyword|text)" |string |"keyword|text" | "" |"keyword|text" |Removes trailing whitespaces from a string.| false | false | false -sin |"double sin(angle:double|integer|long|unsigned_long)" |angle |"double|integer|long|unsigned_long" | "An angle, in radians. If `null`, the function returns `null`." | double | "Returns ths {wikipedia}/Sine_and_cosine[Sine] trigonometric function of an angle." | false | false | false -sinh |"double sinh(angle:double|integer|long|unsigned_long)" |angle |"double|integer|long|unsigned_long" | "An angle, in radians. If `null`, the function returns `null`." | double | "Returns the {wikipedia}/Hyperbolic_functions[hyperbolic sine] of an angle." | false | false | false -split |"keyword split(string:keyword|text, delim:keyword|text)" |[string, delim] |["keyword|text", "keyword|text"] |["", ""] |keyword | "Split a single valued string into multiple strings." | [false, false] | false | false -sqrt |"double sqrt(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "" |double | "Returns the square root of a number." | false | false | false -st_centroid |"geo_point|cartesian_point st_centroid(field:geo_point|cartesian_point)" |field |"geo_point|cartesian_point" | "" |"geo_point|cartesian_point" | "The centroid of a spatial field." | false | false | true -st_contains |"boolean st_contains(geomA:geo_point|cartesian_point|geo_shape|cartesian_shape, geomB:geo_point|cartesian_point|geo_shape|cartesian_shape)" |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |["Geometry column name or variable of geometry type", "Geometry column name or variable of geometry type"] |boolean | "Returns whether the first geometry contains the second geometry." | [false, false] | false | false -st_intersects |"boolean st_intersects(geomA:geo_point|cartesian_point|geo_shape|cartesian_shape, geomB:geo_point|cartesian_point|geo_shape|cartesian_shape)" |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |["Geometry column name or variable of geometry type", "Geometry column name or variable of geometry type"] |boolean | "Returns whether the two geometries or geometry columns intersect." | [false, false] | false | false -st_within |"boolean st_within(geomA:geo_point|cartesian_point|geo_shape|cartesian_shape, geomB:geo_point|cartesian_point|geo_shape|cartesian_shape)" |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |["Geometry column name or variable of geometry type", "Geometry column name or variable of geometry type"] |boolean | "Returns whether the first geometry is within the second geometry." | [false, false] | false | false -st_x |"double st_x(point:geo_point|cartesian_point)" |point |"geo_point|cartesian_point" | "" |double | "Extracts the x-coordinate from a point geometry." | false | false | false -st_y |"double st_y(point:geo_point|cartesian_point)" |point |"geo_point|cartesian_point" | "" |double | "Extracts the y-coordinate from a point geometry." | false | false | false -starts_with |"boolean starts_with(str:keyword|text, prefix:keyword|text)" |[str, prefix] |["keyword|text", "keyword|text"] |["", ""] |boolean | "Returns a boolean that indicates whether a keyword string starts with another string" | [false, false] | false | false -substring |"keyword substring(string:keyword|text, start:integer, ?length:integer)" |[string, start, length] |["keyword|text", "integer", "integer"] |["", "", ""] |keyword | "Returns a substring of a string, specified by a start position and an optional length" | [false, false, true]| false | false -sum |"long sum(number:double|integer|long)" |number |"double|integer|long" | "" |long | "The sum of a numeric field." | false | false | true -tan |"double tan(angle:double|integer|long|unsigned_long)" |angle |"double|integer|long|unsigned_long" | "An angle, in radians. If `null`, the function returns `null`." |double | "Returns the {wikipedia}/Sine_and_cosine[Tangent] trigonometric function of an angle." | false | false | false -tanh |"double tanh(angle:double|integer|long|unsigned_long)" |angle |"double|integer|long|unsigned_long" | "An angle, in radians. If `null`, the function returns `null`." |double | "Returns the {wikipedia}/Hyperbolic_functions[Tangent] hyperbolic function of an angle." | false | false | false -tau |double tau() | null | null | null |double | "The ratio of a circle’s circumference to its radius." | null | false | false -to_bool |"boolean to_bool(field:boolean|keyword|text|double|long|unsigned_long|integer)" |field |"boolean|keyword|text|double|long|unsigned_long|integer" | |boolean | "Converts an input value to a boolean value." |false |false | false -to_boolean |"boolean to_boolean(field:boolean|keyword|text|double|long|unsigned_long|integer)" |field |"boolean|keyword|text|double|long|unsigned_long|integer" | |boolean | "Converts an input value to a boolean value." |false |false | false -to_cartesianpoint |"cartesian_point to_cartesianpoint(field:cartesian_point|keyword|text)" |field |"cartesian_point|keyword|text" | |cartesian_point | "Converts an input value to a point value." |false |false | false -to_cartesianshape |"cartesian_shape to_cartesianshape(field:cartesian_point|cartesian_shape|keyword|text)" |field |"cartesian_point|cartesian_shape|keyword|text" | |cartesian_shape | "Converts an input value to a shape value." |false |false | false -to_datetime |"date to_datetime(field:date|keyword|text|double|long|unsigned_long|integer)" |field |"date|keyword|text|double|long|unsigned_long|integer" | |date | "Converts an input value to a date value." |false |false | false -to_dbl |"double to_dbl(field:boolean|date|keyword|text|double|long|unsigned_long|integer)" |field |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |double | "Converts an input value to a double value." |false |false | false -to_degrees |"double to_degrees(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | |double | "Converts a number in radians to degrees." |false |false | false -to_double |"double to_double(field:boolean|date|keyword|text|double|long|unsigned_long|integer)" |field |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |double | "Converts an input value to a double value." |false |false | false -to_dt |"date to_dt(field:date|keyword|text|double|long|unsigned_long|integer)" |field |"date|keyword|text|double|long|unsigned_long|integer" | |date | "Converts an input value to a date value." |false |false | false -to_geopoint |"geo_point to_geopoint(field:geo_point|keyword|text)" |field |"geo_point|keyword|text" | |geo_point | "Converts an input value to a geo_point value." |false |false | false -to_geoshape |"geo_shape to_geoshape(field:geo_point|geo_shape|keyword|text)" |field |"geo_point|geo_shape|keyword|text" | |geo_shape | "Converts an input value to a geo_shape value." |false |false | false -to_int |"integer to_int(field:boolean|date|keyword|text|double|long|unsigned_long|integer)" |field |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |integer | "Converts an input value to an integer value." |false |false | false -to_integer |"integer to_integer(field:boolean|date|keyword|text|double|long|unsigned_long|integer)" |field |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |integer | "Converts an input value to an integer value." |false |false | false -to_ip |"ip to_ip(field:ip|keyword|text)" |field |"ip|keyword|text" | |ip | "Converts an input string to an IP value." |false |false | false -to_long |"long to_long(field:boolean|date|keyword|text|double|long|unsigned_long|integer)" |field |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |long | "Converts an input value to a long value." |false |false | false -to_lower |"keyword|text to_lower(str:keyword|text)" |str |"keyword|text" | "The input string" |"keyword|text" | "Returns a new string representing the input string converted to lower case." |false |false | false -to_radians |"double to_radians(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | |double | "Converts a number in degrees to radians." |false |false | false -to_str |"keyword to_str(field:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" |field |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | |keyword | "Converts a field into a string." |false |false | false -to_string |"keyword to_string(field:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" |field |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | |keyword | "Converts a field into a string." |false |false | false -to_ul |"unsigned_long to_ul(field:boolean|date|keyword|text|double|long|unsigned_long|integer)" |field |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |unsigned_long | "Converts an input value to an unsigned long value." |false |false | false -to_ulong |"unsigned_long to_ulong(field:boolean|date|keyword|text|double|long|unsigned_long|integer)" |field |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |unsigned_long | "Converts an input value to an unsigned long value." |false |false | false -to_unsigned_long |"unsigned_long to_unsigned_long(field:boolean|date|keyword|text|double|long|unsigned_long|integer)" |field |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |unsigned_long | "Converts an input value to an unsigned long value." |false |false | false -to_upper |"keyword|text to_upper(str:keyword|text)" |str |"keyword|text" | "The input string" |"keyword|text" | "Returns a new string representing the input string converted to upper case." |false |false | false -to_ver |"version to_ver(field:keyword|text|version)" |field |"keyword|text|version" | |version | "Converts an input string to a version value." |false |false | false -to_version |"version to_version(field:keyword|text|version)" |field |"keyword|text|version" | |version | "Converts an input string to a version value." |false |false | false -trim |"keyword|text trim(string:keyword|text)" |string |"keyword|text" | "" |"keyword|text" | "Removes leading and trailing whitespaces from a string." | false | false | false -values |"boolean|date|double|integer|ip|keyword|long|text|version values(field:boolean|date|double|integer|ip|keyword|long|text|version)" |field |"boolean|date|double|integer|ip|keyword|long|text|version" | |"boolean|date|double|integer|ip|keyword|long|text|version" |"Collect values for a field." |false |false |true -; - - metaFunctionsSynopsis#[skip:-8.13.99] meta functions | keep synopsis; @@ -210,9 +103,332 @@ double tau() "boolean|date|double|integer|ip|keyword|long|text|version values(field:boolean|date|double|integer|ip|keyword|long|text|version)" ; +metaFunctionsArgs#[skip:-8.13.99] + META functions +| EVAL name = SUBSTRING(name, 0, 14) +| KEEP name, argNames, argTypes, argDescriptions; + + name:keyword | argNames:keyword | argTypes:keyword | argDescriptions:keyword +abs |number |"double|integer|long|unsigned_long" |Numeric expression. If `null`, the function returns `null`. +acos |number |"double|integer|long|unsigned_long" |Number between -1 and 1. If `null`, the function returns `null`. +asin |number |"double|integer|long|unsigned_long" |Number between -1 and 1. If `null`, the function returns `null`. +atan |number |"double|integer|long|unsigned_long" |Numeric expression. If `null`, the function returns `null`. +atan2 |[y_coordinate, x_coordinate] |["double|integer|long|unsigned_long", "double|integer|long|unsigned_long"] |[y coordinate. If `null`\, the function returns `null`., x coordinate. If `null`\, the function returns `null`.] +auto_bucket |[field, buckets, from, to] |["integer|long|double|date", integer, "integer|long|double|date|string", "integer|long|double|date|string"] |["", "", "", ""] +avg |number |"double|integer|long" |[""] +case |[condition, trueValue] |[boolean, "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version"] |["", ""] +ceil |number |"double|integer|long|unsigned_long" |Numeric expression. If `null`, the function returns `null`. +cidr_match |[ip, blockX] |[ip, "keyword|text"] |[, CIDR block to test the IP against.] +coalesce |first |"boolean|text|integer|keyword|long" |Expression to evaluate +concat |[string1, string2] |["keyword|text", "keyword|text"] |[, ] +cos |angle |"double|integer|long|unsigned_long" |An angle, in radians. If `null`, the function returns `null`. +cosh |angle |"double|integer|long|unsigned_long" |An angle, in radians. If `null`, the function returns `null`. +count |field |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" |Column or literal for which to count the number of values. +count_distinct|[field, precision] |["boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|version", integer] |[Column or literal for which to count the number of distinct values., ] +date_diff |[unit, startTimestamp, endTimestamp]|["keyword|text", date, date] |[A valid date unit, A string representing a start timestamp, A string representing an end timestamp] +date_extract |[datePart, date] |["keyword|text", date] |[Part of the date to extract. Can be: aligned_day_of_week_in_month; aligned_day_of_week_in_year; aligned_week_of_month; aligned_week_of_year; ampm_of_day; clock_hour_of_ampm; clock_hour_of_day; day_of_month; day_of_week; day_of_year; epoch_day; era; hour_of_ampm; hour_of_day; instant_seconds; micro_of_day; micro_of_second; milli_of_day; milli_of_second; minute_of_day; minute_of_hour; month_of_year; nano_of_day; nano_of_second; offset_seconds; proleptic_month; second_of_day; second_of_minute; year; or year_of_era., Date expression] +date_format |[dateFormat, date] |["keyword|text", date] |[A valid date pattern, Date expression] +date_parse |[datePattern, dateString] |["keyword|text", "keyword|text"] |[A valid date pattern, A string representing a date] +date_trunc |[interval, date] |[keyword, date] |[Interval; expressed using the timespan literal syntax., Date expression] +e |null |null |null +ends_with |[str, suffix] |["keyword|text", "keyword|text"] |[, ] +floor |number |"double|integer|long|unsigned_long" |[""] +greatest |first |"integer|long|double|boolean|keyword|text|ip|version" |[""] +least |first |"integer|long|double|boolean|keyword|text|ip|version" |[""] +left |[string, length] |["keyword|text", integer] |[The string from which to return a substring., The number of characters to return.] +length |string |"keyword|text" |[""] +log |[base, number] |["integer|unsigned_long|long|double", "integer|unsigned_long|long|double"] |[, ] +log10 |number |"double|integer|long|unsigned_long" |[""] +ltrim |string |"keyword|text" |[""] +max |number |"double|integer|long" |[""] +median |number |"double|integer|long" |[""] +median_absolut|number |"double|integer|long" |[""] +min |number |"double|integer|long" |[""] +mv_avg |number |"double|integer|long|unsigned_long" |[""] +mv_concat |[string, delim] |["text|keyword", "text|keyword"] |[values to join, delimiter] +mv_count |field |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" |[""] +mv_dedupe |field |"boolean|date|double|integer|ip|keyword|long|text|version" |[""] +mv_first |field |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" |[""] +mv_last |field |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" |[""] +mv_max |field |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" |[""] +mv_median |number |"double|integer|long|unsigned_long" |[""] +mv_min |field |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" |[""] +mv_slice |[field, start, end] |["boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version", integer, integer]|[A multivalued field, start index, end index (included)] +mv_sort |[field, order] |["boolean|date|double|integer|ip|keyword|long|text|version", keyword] |[A multivalued field, sort order] +mv_sum |number |"double|integer|long|unsigned_long" |[""] +mv_zip |[string1, string2, delim] |["keyword|text", "keyword|text", "keyword|text"] |[A multivalued field, A multivalued field, delimiter] +now |null |null |null +percentile |[number, percentile] |["double|integer|long", "double|integer|long"] |[, ] +pi |null |null |null +pow |[base, exponent] |["double|integer|long|unsigned_long", "double|integer|long|unsigned_long"] |[, ] +replace |[string, regex, newString] |["keyword|text", "keyword|text", "keyword|text"] |[, , ] +right |[string, length] |["keyword|text", integer] |[, ] +round |[number, decimals] |[double, integer] |[The numeric value to round, The number of decimal places to round to. Defaults to 0.] +rtrim |string |"keyword|text" |[""] +sin |angle |"double|integer|long|unsigned_long" |An angle, in radians. If `null`, the function returns `null`. +sinh |angle |"double|integer|long|unsigned_long" |An angle, in radians. If `null`, the function returns `null`. +split |[string, delim] |["keyword|text", "keyword|text"] |[, ] +sqrt |number |"double|integer|long|unsigned_long" |[""] +st_centroid |field |"geo_point|cartesian_point" |[""] +st_contains |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Geometry column name or variable of geometry type, Geometry column name or variable of geometry type] +st_intersects |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Geometry column name or variable of geometry type, Geometry column name or variable of geometry type] +st_within |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Geometry column name or variable of geometry type, Geometry column name or variable of geometry type] +st_x |point |"geo_point|cartesian_point" |[""] +st_y |point |"geo_point|cartesian_point" |[""] +starts_with |[str, prefix] |["keyword|text", "keyword|text"] |[, ] +substring |[string, start, length] |["keyword|text", integer, integer] |[, , ] +sum |number |"double|integer|long" |[""] +tan |angle |"double|integer|long|unsigned_long" |An angle, in radians. If `null`, the function returns `null`. +tanh |angle |"double|integer|long|unsigned_long" |An angle, in radians. If `null`, the function returns `null`. +tau |null |null |null +to_bool |field |"boolean|keyword|text|double|long|unsigned_long|integer" |[""] +to_boolean |field |"boolean|keyword|text|double|long|unsigned_long|integer" |[""] +to_cartesianpo|field |"cartesian_point|keyword|text" |[""] +to_cartesiansh|field |"cartesian_point|cartesian_shape|keyword|text" |[""] +to_datetime |field |"date|keyword|text|double|long|unsigned_long|integer" |[""] +to_dbl |field |"boolean|date|keyword|text|double|long|unsigned_long|integer" |[""] +to_degrees |number |"double|integer|long|unsigned_long" |[""] +to_double |field |"boolean|date|keyword|text|double|long|unsigned_long|integer" |[""] +to_dt |field |"date|keyword|text|double|long|unsigned_long|integer" |[""] +to_geopoint |field |"geo_point|keyword|text" |[""] +to_geoshape |field |"geo_point|geo_shape|keyword|text" |[""] +to_int |field |"boolean|date|keyword|text|double|long|unsigned_long|integer" |[""] +to_integer |field |"boolean|date|keyword|text|double|long|unsigned_long|integer" |[""] +to_ip |field |"ip|keyword|text" |[""] +to_long |field |"boolean|date|keyword|text|double|long|unsigned_long|integer" |[""] +to_lower |str |"keyword|text" |The input string +to_radians |number |"double|integer|long|unsigned_long" |[""] +to_str |field |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" |[""] +to_string |field |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" |[""] +to_ul |field |"boolean|date|keyword|text|double|long|unsigned_long|integer" |[""] +to_ulong |field |"boolean|date|keyword|text|double|long|unsigned_long|integer" |[""] +to_unsigned_lo|field |"boolean|date|keyword|text|double|long|unsigned_long|integer" |[""] +to_upper |str |"keyword|text" |The input string +to_ver |field |"keyword|text|version" |[""] +to_version |field |"keyword|text|version" |[""] +trim |string |"keyword|text" |[""] +values |field |"boolean|date|double|integer|ip|keyword|long|text|version" |[""] +; + +metaFunctionsDescription#[skip:-8.13.99] + META functions +| EVAL name = SUBSTRING(name, 0, 14) +| KEEP name, description +; + + name:keyword | description:keyword +abs |Returns the absolute value. +acos |Returns the {wikipedia}/Inverse_trigonometric_functions[arccosine] of `n` as an angle, expressed in radians. +asin |Returns the {wikipedia}/Inverse_trigonometric_functions[arcsine] of the input numeric expression as an angle, expressed in radians. +atan |Returns the {wikipedia}/Inverse_trigonometric_functions[arctangent] of the input numeric expression as an angle, expressed in radians. +atan2 |The {wikipedia}/Atan2[angle] between the positive x-axis and the ray from the origin to the point (x , y) in the Cartesian plane, expressed in radians. +auto_bucket |Creates human-friendly buckets and returns a datetime value for each row that corresponds to the resulting bucket the row falls into. +avg |The average of a numeric field. +case |Accepts pairs of conditions and values. The function returns the value that belongs to the first condition that evaluates to true. +ceil |Round a number up to the nearest integer. +cidr_match |Returns true if the provided IP is contained in one of the provided CIDR blocks. +coalesce |Returns the first of its arguments that is not null. If all arguments are null, it returns `null`. +concat |Concatenates two or more strings. +cos |Returns the {wikipedia}/Sine_and_cosine[cosine] of an angle. +cosh |Returns the {wikipedia}/Hyperbolic_functions[hyperbolic cosine] of an angle. +count |Returns the total number (count) of input values. +count_distinct|Returns the approximate number of distinct values. +date_diff |Subtract 2 dates and return their difference in multiples of a unit specified in the 1st argument +date_extract |Extracts parts of a date, like year, month, day, hour. +date_format |Returns a string representation of a date, in the provided format. +date_parse |Parses a string into a date value +date_trunc |Rounds down a date to the closest interval. +e |Euler’s number. +ends_with |Returns a boolean that indicates whether a keyword string ends with another string +floor |Round a number down to the nearest integer. +greatest |Returns the maximum value from many columns. +least |Returns the minimum value from many columns. +left |Returns the substring that extracts 'length' chars from 'string' starting from the left. +length |Returns the character length of a string. +log |Returns the logarithm of a number to a base. +log10 |Returns the log base 10. +ltrim |Removes leading whitespaces from a string. +max |The maximum value of a numeric field. +median |The value that is greater than half of all values and less than half of all values. +median_absolut|The median absolute deviation, a measure of variability. +min |The minimum value of a numeric field. +mv_avg |Converts a multivalued field into a single valued field containing the average of all of the values. +mv_concat |Reduce a multivalued string field to a single valued field by concatenating all values. +mv_count |Reduce a multivalued field to a single valued field containing the count of values. +mv_dedupe |Remove duplicate values from a multivalued field. +mv_first |Reduce a multivalued field to a single valued field containing the first value. +mv_last |Reduce a multivalued field to a single valued field containing the last value. +mv_max |Reduce a multivalued field to a single valued field containing the maximum value. +mv_median |Converts a multivalued field into a single valued field containing the median value. +mv_min |Reduce a multivalued field to a single valued field containing the minimum value. +mv_slice |Returns a subset of the multivalued field using the start and end index values. +mv_sort |Sorts a multivalued field in lexicographical order. +mv_sum |Converts a multivalued field into a single valued field containing the sum of all of the values. +mv_zip |Combines the values from two multivalued fields with a delimiter that joins them together. +now |Returns current date and time. +percentile |The value at which a certain percentage of observed values occur. +pi |The ratio of a circle’s circumference to its diameter. +pow |Returns the value of a base raised to the power of an exponent. +replace |The function substitutes in the string any match of the regular expression with the replacement string. +right |Return the substring that extracts length chars from the string starting from the right. +round |Rounds a number to the closest number with the specified number of digits. +rtrim |Removes trailing whitespaces from a string. +sin |Returns ths {wikipedia}/Sine_and_cosine[Sine] trigonometric function of an angle. +sinh |Returns the {wikipedia}/Hyperbolic_functions[hyperbolic sine] of an angle. +split |Split a single valued string into multiple strings. +sqrt |Returns the square root of a number. +st_centroid |The centroid of a spatial field. +st_contains |Returns whether the first geometry contains the second geometry. +st_intersects |Returns whether the two geometries or geometry columns intersect. +st_within |Returns whether the first geometry is within the second geometry. +st_x |Extracts the x-coordinate from a point geometry. +st_y |Extracts the y-coordinate from a point geometry. +starts_with |Returns a boolean that indicates whether a keyword string starts with another string +substring |Returns a substring of a string, specified by a start position and an optional length +sum |The sum of a numeric field. +tan |Returns the {wikipedia}/Sine_and_cosine[Tangent] trigonometric function of an angle. +tanh |Returns the {wikipedia}/Hyperbolic_functions[Tangent] hyperbolic function of an angle. +tau |The ratio of a circle’s circumference to its radius. +to_bool |Converts an input value to a boolean value. +to_boolean |Converts an input value to a boolean value. +to_cartesianpo|Converts an input value to a point value. +to_cartesiansh|Converts an input value to a shape value. +to_datetime |Converts an input value to a date value. +to_dbl |Converts an input value to a double value. +to_degrees |Converts a number in radians to degrees. +to_double |Converts an input value to a double value. +to_dt |Converts an input value to a date value. +to_geopoint |Converts an input value to a geo_point value. +to_geoshape |Converts an input value to a geo_shape value. +to_int |Converts an input value to an integer value. +to_integer |Converts an input value to an integer value. +to_ip |Converts an input string to an IP value. +to_long |Converts an input value to a long value. +to_lower |Returns a new string representing the input string converted to lower case. +to_radians |Converts a number in degrees to radians. +to_str |Converts a field into a string. +to_string |Converts a field into a string. +to_ul |Converts an input value to an unsigned long value. +to_ulong |Converts an input value to an unsigned long value. +to_unsigned_lo|Converts an input value to an unsigned long value. +to_upper |Returns a new string representing the input string converted to upper case. +to_ver |Converts an input string to a version value. +to_version |Converts an input string to a version value. +trim |Removes leading and trailing whitespaces from a string. +values |Collect values for a field. +; + +metaFunctionsRemaining#[skip:-8.13.99] + META functions +| EVAL name = SUBSTRING(name, 0, 14) +| KEEP name, * +| DROP synopsis, description, argNames, argTypes, argDescriptions +; + + name:keyword | returnType:keyword | optionalArgs:boolean |variadic:boolean|isAggregation:boolean +abs |"double|integer|long|unsigned_long" |false |false |false +acos |double |false |false |false +asin |double |false |false |false +atan |double |false |false |false +atan2 |double |[false, false] |false |false +auto_bucket |"double|date" |[false, false, false, false]|false |false +avg |double |false |false |true +case |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" |[false, false] |true |false +ceil |"double|integer|long|unsigned_long" |false |false |false +cidr_match |boolean |[false, false] |true |false +coalesce |"boolean|text|integer|keyword|long" |false |true |false +concat |keyword |[false, false] |true |false +cos |double |false |false |false +cosh |double |false |false |false +count |long |true |false |true +count_distinct|long |[false, true] |false |true +date_diff |integer |[false, false, false] |false |false +date_extract |long |[false, false] |false |false +date_format |keyword |[true, false] |false |false +date_parse |date |[true, false] |false |false +date_trunc |date |[false, false] |false |false +e |double |null |false |false +ends_with |boolean |[false, false] |false |false +floor |"double|integer|long|unsigned_long" |false |false |false +greatest |"integer|long|double|boolean|keyword|text|ip|version" |false |true |false +least |"integer|long|double|boolean|keyword|text|ip|version" |false |true |false +left |keyword |[false, false] |false |false +length |integer |false |false |false +log |double |[true, false] |false |false +log10 |double |false |false |false +ltrim |"keyword|text" |false |false |false +max |"double|integer|long" |false |false |true +median |"double|integer|long" |false |false |true +median_absolut|"double|integer|long" |false |false |true +min |"double|integer|long" |false |false |true +mv_avg |double |false |false |false +mv_concat |keyword |[false, false] |false |false +mv_count |integer |false |false |false +mv_dedupe |"boolean|date|double|integer|ip|keyword|long|text|version" |false |false |false +mv_first |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version"|false |false |false +mv_last |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version"|false |false |false +mv_max |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" |false |false |false +mv_median |"double|integer|long|unsigned_long" |false |false |false +mv_min |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" |false |false |false +mv_slice |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version" |[false, false, true] |false |false +mv_sort |"boolean|date|double|integer|ip|keyword|long|text|version" |[false, true] |false |false +mv_sum |"double|integer|long|unsigned_long" |false |false |false +mv_zip |keyword |[false, false, true] |false |false +now |date |null |false |false +percentile |"double|integer|long" |[false, false] |false |true +pi |double |null |false |false +pow |double |[false, false] |false |false +replace |keyword |[false, false, false] |false |false +right |keyword |[false, false] |false |false +round |double |[false, true] |false |false +rtrim |"keyword|text" |false |false |false +sin |double |false |false |false +sinh |double |false |false |false +split |keyword |[false, false] |false |false +sqrt |double |false |false |false +st_centroid |"geo_point|cartesian_point" |false |false |true +st_contains |boolean |[false, false] |false |false +st_intersects |boolean |[false, false] |false |false +st_within |boolean |[false, false] |false |false +st_x |double |false |false |false +st_y |double |false |false |false +starts_with |boolean |[false, false] |false |false +substring |keyword |[false, false, true] |false |false +sum |long |false |false |true +tan |double |false |false |false +tanh |double |false |false |false +tau |double |null |false |false +to_bool |boolean |false |false |false +to_boolean |boolean |false |false |false +to_cartesianpo|cartesian_point |false |false |false +to_cartesiansh|cartesian_shape |false |false |false +to_datetime |date |false |false |false +to_dbl |double |false |false |false +to_degrees |double |false |false |false +to_double |double |false |false |false +to_dt |date |false |false |false +to_geopoint |geo_point |false |false |false +to_geoshape |geo_shape |false |false |false +to_int |integer |false |false |false +to_integer |integer |false |false |false +to_ip |ip |false |false |false +to_long |long |false |false |false +to_lower |"keyword|text" |false |false |false +to_radians |double |false |false |false +to_str |keyword |false |false |false +to_string |keyword |false |false |false +to_ul |unsigned_long |false |false |false +to_ulong |unsigned_long |false |false |false +to_unsigned_lo|unsigned_long |false |false |false +to_upper |"keyword|text" |false |false |false +to_ver |version |false |false |false +to_version |version |false |false |false +trim |"keyword|text" |false |false |false +values |"boolean|date|double|integer|ip|keyword|long|text|version" |false |false |true +; metaFunctionsFiltered#[skip:-8.13.99] -META FUNCTIONS +META FUNCTIONS | WHERE STARTS_WITH(name, "sin") ; From f930544dccc7aaa8e5e8b978ae1967a2d06d867d Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Tue, 2 Apr 2024 19:35:48 -0700 Subject: [PATCH 071/264] Introduce ordinal bytesref block (#106852) This PR introduces ordinal-based blocks for BytesRef. These blocks consist of a pair: an IntBlock for ordinals and a BytesRefVector for the dictionary. Compared to the regular BytesRefBlock, these blocks are slower due to indirect access and consume more memory because of the additional ordinals block. However, they offer significant speed improvements and reduced memory usage when byte values are frequently repeated. There are several use cases where these blocks can be beneficial. --- docs/changelog/106852.yaml | 6 + .../org/elasticsearch/TransportVersions.java | 1 + .../compute/data/BooleanBlock.java | 2 +- .../compute/data/BytesRefBlock.java | 10 +- .../compute/data/BytesRefVector.java | 7 +- .../compute/data/DoubleBlock.java | 2 +- .../elasticsearch/compute/data/IntBlock.java | 2 +- .../elasticsearch/compute/data/LongBlock.java | 2 +- .../org/elasticsearch/compute/data/Block.java | 1 + .../compute/data/OrdinalBytesRefBlock.java | 203 ++++++++++++++++++ .../compute/data/OrdinalBytesRefVector.java | 129 +++++++++++ .../data/SingletonOrdinalsBuilder.java | 61 +++++- .../elasticsearch/compute/data/Vector.java | 1 + .../compute/data/X-Block.java.st | 19 +- .../compute/data/X-Vector.java.st | 13 +- .../compute/data/BlockSerializationTests.java | 102 +++++++++ .../data/SingletonOrdinalsBuilderTests.java | 12 +- 17 files changed, 553 insertions(+), 20 deletions(-) create mode 100644 docs/changelog/106852.yaml create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/OrdinalBytesRefBlock.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/OrdinalBytesRefVector.java diff --git a/docs/changelog/106852.yaml b/docs/changelog/106852.yaml new file mode 100644 index 0000000000000..2161b1ea22f30 --- /dev/null +++ b/docs/changelog/106852.yaml @@ -0,0 +1,6 @@ +pr: 106852 +summary: Introduce ordinal bytesref block +area: ES|QL +type: enhancement +issues: + - 106387 diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index e072bfc799e5d..5614d9c1dba12 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -161,6 +161,7 @@ static TransportVersion def(int id) { public static final TransportVersion ESQL_REDUCER_NODE_FRAGMENT = def(8_620_00_0); public static final TransportVersion FAILURE_STORE_ROLLOVER = def(8_621_00_0); public static final TransportVersion CCR_STATS_API_TIMEOUT_PARAM = def(8_622_00_0); + public static final TransportVersion ESQL_ORDINAL_BLOCK = def(8_623_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java index 617cb731da656..43181a344e268 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java @@ -52,7 +52,7 @@ private static BooleanBlock readFrom(StreamInput in) throws IOException { return readFrom((BlockStreamInput) in); } - private static BooleanBlock readFrom(BlockStreamInput in) throws IOException { + static BooleanBlock readFrom(BlockStreamInput in) throws IOException { final byte serializationType = in.readByte(); return switch (serializationType) { case SERIALIZE_BLOCK_VALUES -> BooleanBlock.readValues(in); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java index 0b70fa0f378eb..5f5e1f9caa488 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java @@ -20,8 +20,8 @@ * Block that stores BytesRef values. * This class is generated. Do not edit it. */ -public sealed interface BytesRefBlock extends Block permits BytesRefArrayBlock, BytesRefVectorBlock, ConstantNullBlock { - +public sealed interface BytesRefBlock extends Block permits BytesRefArrayBlock, BytesRefVectorBlock, ConstantNullBlock, + OrdinalBytesRefBlock { BytesRef NULL_VALUE = new BytesRef(); /** @@ -56,12 +56,13 @@ private static BytesRefBlock readFrom(StreamInput in) throws IOException { return readFrom((BlockStreamInput) in); } - private static BytesRefBlock readFrom(BlockStreamInput in) throws IOException { + static BytesRefBlock readFrom(BlockStreamInput in) throws IOException { final byte serializationType = in.readByte(); return switch (serializationType) { case SERIALIZE_BLOCK_VALUES -> BytesRefBlock.readValues(in); case SERIALIZE_BLOCK_VECTOR -> BytesRefVector.readFrom(in.blockFactory(), in).asBlock(); case SERIALIZE_BLOCK_ARRAY -> BytesRefArrayBlock.readArrayBlock(in.blockFactory(), in); + case SERIALIZE_BLOCK_ORDINAL -> OrdinalBytesRefBlock.readOrdinalBlock(in.blockFactory(), in); default -> { assert false : "invalid block serialization type " + serializationType; throw new IllegalStateException("invalid serialization type " + serializationType); @@ -98,6 +99,9 @@ default void writeTo(StreamOutput out) throws IOException { } else if (version.onOrAfter(TransportVersions.ESQL_SERIALIZE_ARRAY_BLOCK) && this instanceof BytesRefArrayBlock b) { out.writeByte(SERIALIZE_BLOCK_ARRAY); b.writeArrayBlock(out); + } else if (version.onOrAfter(TransportVersions.ESQL_ORDINAL_BLOCK) && this instanceof OrdinalBytesRefBlock b && b.isDense()) { + out.writeByte(SERIALIZE_BLOCK_ORDINAL); + b.writeOrdinalBlock(out); } else { out.writeByte(SERIALIZE_BLOCK_VALUES); BytesRefBlock.writeValues(this, out); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java index c0b107065f43c..11daa4a4f768d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java @@ -18,7 +18,8 @@ * Vector that stores BytesRef values. * This class is generated. Do not edit it. */ -public sealed interface BytesRefVector extends Vector permits ConstantBytesRefVector, BytesRefArrayVector, ConstantNullVector { +public sealed interface BytesRefVector extends Vector permits ConstantBytesRefVector, BytesRefArrayVector, ConstantNullVector, + OrdinalBytesRefVector { BytesRef getBytesRef(int position, BytesRef dest); @Override @@ -80,6 +81,7 @@ static BytesRefVector readFrom(BlockFactory blockFactory, StreamInput in) throws case SERIALIZE_VECTOR_VALUES -> readValues(positions, in, blockFactory); case SERIALIZE_VECTOR_CONSTANT -> blockFactory.newConstantBytesRefVector(in.readBytesRef(), positions); case SERIALIZE_VECTOR_ARRAY -> BytesRefArrayVector.readArrayVector(positions, in, blockFactory); + case SERIALIZE_VECTOR_ORDINAL -> OrdinalBytesRefVector.readOrdinalVector(blockFactory, in); default -> { assert false : "invalid vector serialization type [" + serializationType + "]"; throw new IllegalStateException("invalid vector serialization type [" + serializationType + "]"); @@ -98,6 +100,9 @@ default void writeTo(StreamOutput out) throws IOException { } else if (version.onOrAfter(TransportVersions.ESQL_SERIALIZE_ARRAY_VECTOR) && this instanceof BytesRefArrayVector v) { out.writeByte(SERIALIZE_VECTOR_ARRAY); v.writeArrayVector(positions, out); + } else if (version.onOrAfter(TransportVersions.ESQL_ORDINAL_BLOCK) && this instanceof OrdinalBytesRefVector v && v.isDense()) { + out.writeByte(SERIALIZE_VECTOR_ORDINAL); + v.writeOrdinalVector(out); } else { out.writeByte(SERIALIZE_VECTOR_VALUES); writeValues(this, positions, out); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java index 5fe36081478f3..27d70caaa18fe 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java @@ -52,7 +52,7 @@ private static DoubleBlock readFrom(StreamInput in) throws IOException { return readFrom((BlockStreamInput) in); } - private static DoubleBlock readFrom(BlockStreamInput in) throws IOException { + static DoubleBlock readFrom(BlockStreamInput in) throws IOException { final byte serializationType = in.readByte(); return switch (serializationType) { case SERIALIZE_BLOCK_VALUES -> DoubleBlock.readValues(in); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java index 057fcd066ad76..a34d50bf6ff55 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java @@ -52,7 +52,7 @@ private static IntBlock readFrom(StreamInput in) throws IOException { return readFrom((BlockStreamInput) in); } - private static IntBlock readFrom(BlockStreamInput in) throws IOException { + static IntBlock readFrom(BlockStreamInput in) throws IOException { final byte serializationType = in.readByte(); return switch (serializationType) { case SERIALIZE_BLOCK_VALUES -> IntBlock.readValues(in); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java index fb75ac3303201..21c3eb4257b8d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java @@ -52,7 +52,7 @@ private static LongBlock readFrom(StreamInput in) throws IOException { return readFrom((BlockStreamInput) in); } - private static LongBlock readFrom(BlockStreamInput in) throws IOException { + static LongBlock readFrom(BlockStreamInput in) throws IOException { final byte serializationType = in.readByte(); return switch (serializationType) { case SERIALIZE_BLOCK_VALUES -> LongBlock.readValues(in); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java index 7c04ef57f9e2e..7fc92da1943ac 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java @@ -247,4 +247,5 @@ static List getNamedWriteables() { byte SERIALIZE_BLOCK_VECTOR = 1; byte SERIALIZE_BLOCK_ARRAY = 2; byte SERIALIZE_BLOCK_BIG_ARRAY = 3; + byte SERIALIZE_BLOCK_ORDINAL = 3; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/OrdinalBytesRefBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/OrdinalBytesRefBlock.java new file mode 100644 index 0000000000000..4e409a7d214ef --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/OrdinalBytesRefBlock.java @@ -0,0 +1,203 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.data; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Releasables; + +import java.io.IOException; + +/** + * A {@link BytesRefBlock} consists of a pair: an {@link IntBlock} for ordinals and a {@link BytesRefVector} for the dictionary. + * Compared to the regular {@link BytesRefBlock}, this block is slower due to indirect access and consume more memory because of + * the additional ordinals block. However, they offer significant speed improvements and reduced memory usage when byte values are + * frequently repeated + */ +public final class OrdinalBytesRefBlock extends AbstractNonThreadSafeRefCounted implements BytesRefBlock { + private final IntBlock ordinals; + private final BytesRefVector bytes; + + public OrdinalBytesRefBlock(IntBlock ordinals, BytesRefVector bytes) { + this.ordinals = ordinals; + this.bytes = bytes; + } + + static OrdinalBytesRefBlock readOrdinalBlock(BlockFactory blockFactory, BlockStreamInput in) throws IOException { + BytesRefVector bytes = null; + OrdinalBytesRefBlock result = null; + IntBlock ordinals = IntBlock.readFrom(in); + try { + bytes = BytesRefVector.readFrom(blockFactory, in); + result = new OrdinalBytesRefBlock(ordinals, bytes); + } finally { + if (result == null) { + Releasables.close(ordinals, bytes); + } + } + return result; + } + + void writeOrdinalBlock(StreamOutput out) throws IOException { + ordinals.writeTo(out); + bytes.writeTo(out); + } + + /** + * Returns true if this ordinal block is dense enough to enable optimizations using its ordinals + */ + public boolean isDense() { + return ordinals.getTotalValueCount() * 2 / 3 >= bytes.getPositionCount(); + } + + @Override + public BytesRef getBytesRef(int valueIndex, BytesRef dest) { + return bytes.getBytesRef(ordinals.getInt(valueIndex), dest); + } + + @Override + public BytesRefVector asVector() { + IntVector vector = ordinals.asVector(); + if (vector != null) { + return new OrdinalBytesRefVector(vector, bytes); + } else { + return null; + } + } + + @Override + public BytesRefBlock filter(int... positions) { + if (positions.length * ordinals.getTotalValueCount() >= bytes.getPositionCount() * ordinals.getPositionCount()) { + OrdinalBytesRefBlock result = null; + IntBlock filteredOrdinals = ordinals.filter(positions); + try { + result = new OrdinalBytesRefBlock(filteredOrdinals, bytes); + bytes.incRef(); + } finally { + if (result == null) { + filteredOrdinals.close(); + } + } + return result; + } else { + // TODO: merge this BytesRefArrayBlock#filter + BytesRef scratch = new BytesRef(); + try (BytesRefBlock.Builder builder = blockFactory().newBytesRefBlockBuilder(positions.length)) { + for (int pos : positions) { + if (isNull(pos)) { + builder.appendNull(); + continue; + } + int valueCount = getValueCount(pos); + int first = getFirstValueIndex(pos); + if (valueCount == 1) { + builder.appendBytesRef(getBytesRef(getFirstValueIndex(pos), scratch)); + } else { + builder.beginPositionEntry(); + for (int c = 0; c < valueCount; c++) { + builder.appendBytesRef(getBytesRef(first + c, scratch)); + } + builder.endPositionEntry(); + } + } + return builder.mvOrdering(mvOrdering()).build(); + } + } + } + + @Override + protected void closeInternal() { + Releasables.close(ordinals, bytes); + } + + @Override + public int getTotalValueCount() { + return ordinals.getTotalValueCount(); + } + + @Override + public int getPositionCount() { + return ordinals.getPositionCount(); + } + + @Override + public int getFirstValueIndex(int position) { + return ordinals.getFirstValueIndex(position); + } + + @Override + public int getValueCount(int position) { + return ordinals.getValueCount(position); + } + + @Override + public ElementType elementType() { + return bytes.elementType(); + } + + @Override + public BlockFactory blockFactory() { + return ordinals.blockFactory(); + } + + @Override + public void allowPassingToDifferentDriver() { + ordinals.allowPassingToDifferentDriver(); + bytes.allowPassingToDifferentDriver(); + } + + @Override + public boolean isNull(int position) { + return ordinals.isNull(position); + } + + @Override + public int nullValuesCount() { + return ordinals.nullValuesCount(); + } + + @Override + public boolean mayHaveNulls() { + return ordinals.mayHaveNulls(); + } + + @Override + public boolean areAllValuesNull() { + return ordinals.areAllValuesNull(); + } + + @Override + public boolean mayHaveMultivaluedFields() { + return ordinals.mayHaveMultivaluedFields(); + } + + @Override + public MvOrdering mvOrdering() { + return ordinals.mvOrdering(); + } + + @Override + public OrdinalBytesRefBlock expand() { + OrdinalBytesRefBlock result = null; + IntBlock expandedOrdinals = ordinals.expand(); + try { + result = new OrdinalBytesRefBlock(expandedOrdinals, bytes); + bytes.incRef(); + } finally { + if (result == null) { + expandedOrdinals.close(); + } + } + return result; + } + + @Override + public long ramBytesUsed() { + return ordinals.ramBytesUsed() + bytes.ramBytesUsed(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/OrdinalBytesRefVector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/OrdinalBytesRefVector.java new file mode 100644 index 0000000000000..f353961454b02 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/OrdinalBytesRefVector.java @@ -0,0 +1,129 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.data; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Releasables; + +import java.io.IOException; + +/** + * A {@link BytesRefVector} consists of a pair: an {@link IntVector} for ordinals and a {@link BytesRefVector} for the dictionary. + * Compared to the regular {@link BytesRefVector}, this block is slower due to indirect access and consume more memory because of + * the additional ordinals vector. However, they offer significant speed improvements and reduced memory usage when byte values are + * frequently repeated + */ +public final class OrdinalBytesRefVector extends AbstractNonThreadSafeRefCounted implements BytesRefVector { + private final IntVector ordinals; + private final BytesRefVector bytes; + + public OrdinalBytesRefVector(IntVector ordinals, BytesRefVector bytes) { + this.ordinals = ordinals; + this.bytes = bytes; + } + + static OrdinalBytesRefVector readOrdinalVector(BlockFactory blockFactory, StreamInput in) throws IOException { + IntVector ordinals = IntVector.readFrom(blockFactory, in); + BytesRefVector bytes = null; + OrdinalBytesRefVector result = null; + try { + bytes = BytesRefVector.readFrom(blockFactory, in); + result = new OrdinalBytesRefVector(ordinals, bytes); + } finally { + if (result == null) { + Releasables.close(ordinals, bytes); + } + } + return result; + } + + void writeOrdinalVector(StreamOutput out) throws IOException { + ordinals.writeTo(out); + bytes.writeTo(out); + } + + /** + * Returns true if this ordinal vector is dense enough to enable optimizations using its ordinals + */ + public boolean isDense() { + return ordinals.getPositionCount() * 2 / 3 >= bytes.getPositionCount(); + } + + @Override + public int getPositionCount() { + return ordinals.getPositionCount(); + } + + @Override + public BlockFactory blockFactory() { + return ordinals.blockFactory(); + } + + @Override + public void allowPassingToDifferentDriver() { + ordinals.allowPassingToDifferentDriver(); + bytes.allowPassingToDifferentDriver(); + } + + @Override + public BytesRef getBytesRef(int position, BytesRef dest) { + return bytes.getBytesRef(ordinals.getInt(position), dest); + } + + @Override + public BytesRefBlock asBlock() { + return new BytesRefVectorBlock(this); + } + + @Override + public BytesRefVector filter(int... positions) { + if (positions.length >= ordinals.getPositionCount()) { + OrdinalBytesRefVector result = null; + IntVector filteredOrdinals = ordinals.filter(positions); + try { + result = new OrdinalBytesRefVector(filteredOrdinals, bytes); + bytes.incRef(); + } finally { + if (result == null) { + filteredOrdinals.close(); + } + } + return result; + } else { + final BytesRef scratch = new BytesRef(); + try (BytesRefVector.Builder builder = blockFactory().newBytesRefVectorBuilder(positions.length)) { + for (int p : positions) { + builder.appendBytesRef(getBytesRef(p, scratch)); + } + return builder.build(); + } + } + } + + @Override + public ElementType elementType() { + return bytes.elementType(); + } + + @Override + public boolean isConstant() { + return bytes.isConstant() || ordinals.isConstant(); + } + + @Override + public long ramBytesUsed() { + return ordinals.ramBytesUsed() + bytes.ramBytesUsed(); + } + + @Override + protected void closeInternal() { + Releasables.close(ordinals, bytes); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/SingletonOrdinalsBuilder.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/SingletonOrdinalsBuilder.java index 10e9237ef7071..8616d7a7e1bc6 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/SingletonOrdinalsBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/SingletonOrdinalsBuilder.java @@ -12,6 +12,7 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; import org.elasticsearch.index.mapper.BlockLoader; import java.io.IOException; @@ -21,7 +22,7 @@ public class SingletonOrdinalsBuilder implements BlockLoader.SingletonOrdinalsBuilder, Releasable, Block.Builder { private final BlockFactory blockFactory; private final SortedDocValues docValues; - private int[] ords; + private final int[] ords; private int count; public SingletonOrdinalsBuilder(BlockFactory blockFactory, SortedDocValues docValues, int count) { @@ -53,8 +54,53 @@ public SingletonOrdinalsBuilder endPositionEntry() { throw new UnsupportedOperationException("should only have one value per doc"); } - @Override - public BytesRefBlock build() { + BytesRefBlock buildOrdinal() { + int valueCount = docValues.getValueCount(); + long breakerSize = ordsSize(valueCount); + blockFactory.adjustBreaker(breakerSize); + BytesRefVector bytesVector = null; + IntBlock ordinalBlock = null; + try { + int[] newOrds = new int[valueCount]; + Arrays.fill(newOrds, -1); + for (int ord : ords) { + if (ord != -1) { + newOrds[ord] = 0; + } + } + // resolve the ordinals and remaps the ordinals + int nextOrd = -1; + try (BytesRefVector.Builder bytesBuilder = blockFactory.newBytesRefVectorBuilder(Math.min(valueCount, ords.length))) { + for (int i = 0; i < newOrds.length; i++) { + if (newOrds[i] != -1) { + newOrds[i] = ++nextOrd; + bytesBuilder.appendBytesRef(docValues.lookupOrd(i)); + } + } + bytesVector = bytesBuilder.build(); + } catch (IOException e) { + throw new UncheckedIOException("error resolving ordinals", e); + } + try (IntBlock.Builder ordinalsBuilder = blockFactory.newIntBlockBuilder(ords.length)) { + for (int ord : ords) { + if (ord == -1) { + ordinalsBuilder.appendNull(); + } else { + ordinalsBuilder.appendInt(newOrds[ord]); + } + } + ordinalBlock = ordinalsBuilder.build(); + } + final OrdinalBytesRefBlock result = new OrdinalBytesRefBlock(ordinalBlock, bytesVector); + bytesVector = null; + ordinalBlock = null; + return result; + } finally { + Releasables.close(() -> blockFactory.adjustBreaker(-breakerSize), ordinalBlock, bytesVector); + } + } + + BytesRefBlock buildRegularBlock() { try { long breakerSize = ordsSize(ords.length); // Increment breaker for sorted ords. @@ -105,6 +151,15 @@ public BytesRefBlock build() { } } + @Override + public BytesRefBlock build() { + if (ords.length >= 2 * docValues.getValueCount() && ords.length >= 32) { + return buildOrdinal(); + } else { + return buildRegularBlock(); + } + } + @Override public void close() { blockFactory.adjustBreaker(-ordsSize(ords.length)); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java index 9461a3e066df3..84722fad93b7f 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java @@ -80,4 +80,5 @@ interface Builder extends Releasable { byte SERIALIZE_VECTOR_CONSTANT = 1; byte SERIALIZE_VECTOR_ARRAY = 2; byte SERIALIZE_VECTOR_BIG_ARRAY = 3; + byte SERIALIZE_VECTOR_ORDINAL = 4; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st index f86f86500529c..3850e3da7c796 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st @@ -22,7 +22,12 @@ import java.io.IOException; * Block that stores $type$ values. * This class is generated. Do not edit it. */ -public sealed interface $Type$Block extends Block permits $Type$ArrayBlock, $Type$VectorBlock, ConstantNullBlock$if(BytesRef)$$else$, $Type$BigArrayBlock$endif$ { +$if(BytesRef)$ +public sealed interface BytesRefBlock extends Block permits BytesRefArrayBlock, BytesRefVectorBlock, ConstantNullBlock, + OrdinalBytesRefBlock { +$else$ +public sealed interface $Type$Block extends Block permits $Type$ArrayBlock, $Type$VectorBlock, ConstantNullBlock, $Type$BigArrayBlock { +$endif$ $if(BytesRef)$ BytesRef NULL_VALUE = new BytesRef(); @@ -67,13 +72,15 @@ $endif$ return readFrom((BlockStreamInput) in); } - private static $Type$Block readFrom(BlockStreamInput in) throws IOException { + static $Type$Block readFrom(BlockStreamInput in) throws IOException { final byte serializationType = in.readByte(); return switch (serializationType) { case SERIALIZE_BLOCK_VALUES -> $Type$Block.readValues(in); case SERIALIZE_BLOCK_VECTOR -> $Type$Vector.readFrom(in.blockFactory(), in).asBlock(); case SERIALIZE_BLOCK_ARRAY -> $Type$ArrayBlock.readArrayBlock(in.blockFactory(), in); -$if(BytesRef)$$else$ +$if(BytesRef)$ + case SERIALIZE_BLOCK_ORDINAL -> OrdinalBytesRefBlock.readOrdinalBlock(in.blockFactory(), in); +$else$ case SERIALIZE_BLOCK_BIG_ARRAY -> $Type$BigArrayBlock.readArrayBlock(in.blockFactory(), in); $endif$ default -> { @@ -112,7 +119,11 @@ $endif$ } else if (version.onOrAfter(TransportVersions.ESQL_SERIALIZE_ARRAY_BLOCK) && this instanceof $Type$ArrayBlock b) { out.writeByte(SERIALIZE_BLOCK_ARRAY); b.writeArrayBlock(out); -$if(BytesRef)$$else$ +$if(BytesRef)$ + } else if (version.onOrAfter(TransportVersions.ESQL_ORDINAL_BLOCK) && this instanceof OrdinalBytesRefBlock b && b.isDense()) { + out.writeByte(SERIALIZE_BLOCK_ORDINAL); + b.writeOrdinalBlock(out); +$else$ } else if (version.onOrAfter(TransportVersions.ESQL_SERIALIZE_BIG_ARRAY) && this instanceof $Type$BigArrayBlock b) { out.writeByte(SERIALIZE_BLOCK_BIG_ARRAY); b.writeArrayBlock(out); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st index 01090b6cab18a..6979883534323 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st @@ -21,7 +21,8 @@ import java.io.IOException; * This class is generated. Do not edit it. */ $if(BytesRef)$ -public sealed interface $Type$Vector extends Vector permits Constant$Type$Vector, $Type$ArrayVector, ConstantNullVector { +public sealed interface BytesRefVector extends Vector permits ConstantBytesRefVector, BytesRefArrayVector, ConstantNullVector, + OrdinalBytesRefVector { $elseif(boolean)$ public sealed interface $Type$Vector extends Vector permits Constant$Type$Vector, $Type$ArrayVector, $Type$BigArrayVector, ConstantNullVector { @@ -114,7 +115,9 @@ $endif$ case SERIALIZE_VECTOR_VALUES -> readValues(positions, in, blockFactory); case SERIALIZE_VECTOR_CONSTANT -> blockFactory.newConstant$Type$Vector(in.read$Type$(), positions); case SERIALIZE_VECTOR_ARRAY -> $Type$ArrayVector.readArrayVector(positions, in, blockFactory); -$if(BytesRef)$$else$ +$if(BytesRef)$ + case SERIALIZE_VECTOR_ORDINAL -> OrdinalBytesRefVector.readOrdinalVector(blockFactory, in); +$else$ case SERIALIZE_VECTOR_BIG_ARRAY -> $Type$BigArrayVector.readArrayVector(positions, in, blockFactory); $endif$ default -> { @@ -139,7 +142,11 @@ $endif$ } else if (version.onOrAfter(TransportVersions.ESQL_SERIALIZE_ARRAY_VECTOR) && this instanceof $Type$ArrayVector v) { out.writeByte(SERIALIZE_VECTOR_ARRAY); v.writeArrayVector(positions, out); -$if(BytesRef)$$else$ +$if(BytesRef)$ + } else if (version.onOrAfter(TransportVersions.ESQL_ORDINAL_BLOCK) && this instanceof OrdinalBytesRefVector v && v.isDense()) { + out.writeByte(SERIALIZE_VECTOR_ORDINAL); + v.writeOrdinalVector(out); +$else$ } else if (version.onOrAfter(TransportVersions.ESQL_SERIALIZE_BIG_VECTOR) && this instanceof $Type$BigArrayVector v) { out.writeByte(SERIALIZE_VECTOR_BIG_ARRAY); v.writeArrayVector(positions, out); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockSerializationTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockSerializationTests.java index b13aa040f307d..0dfb72274b9d9 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockSerializationTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockSerializationTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.BytesRefHash; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.aggregation.SumLongAggregatorFunction; @@ -234,6 +235,107 @@ public void testSimulateAggs() { } } + public void testOrdinalVector() throws Exception { + int numValues = randomIntBetween(1, 1000); + BlockFactory blockFactory = driverContext().blockFactory(); + BytesRef scratch = new BytesRef(); + try ( + BytesRefVector.Builder regular = blockFactory.newBytesRefVectorBuilder(between(1, numValues * 3)); + BytesRefHash hash = new BytesRefHash(1, blockFactory.bigArrays()); + IntVector.Builder ordinals = blockFactory.newIntVectorBuilder(between(1, numValues * 3)); + BytesRefVector.Builder dictionary = blockFactory.newBytesRefVectorBuilder(between(1, numValues * 3)); + ) { + BytesRef v = new BytesRef("value-" + randomIntBetween(1, 20)); + int ord = Math.toIntExact(hash.add(v)); + ord = ord < 0 ? -1 - ord : ord; + ordinals.appendInt(ord); + regular.appendBytesRef(v); + for (long l = 0; l < hash.size(); l++) { + dictionary.appendBytesRef(hash.get(l, scratch)); + } + try (BytesRefVector v1 = regular.build(); BytesRefVector v2 = new OrdinalBytesRefVector(ordinals.build(), dictionary.build())) { + BytesRefVector.equals(v1, v2); + for (BytesRefVector vector : List.of(v1, v2)) { + try (BytesRefBlock deserBlock = serializeDeserializeBlock(vector.asBlock())) { + EqualsHashCodeTestUtils.checkEqualsAndHashCode(deserBlock, unused -> deserBlock); + } + } + for (int p = 0; p < v1.getPositionCount(); p++) { + try (BytesRefVector f1 = v1.filter(p); BytesRefVector f2 = v2.filter(p)) { + BytesRefVector.equals(f1, f2); + for (BytesRefVector vector : List.of(f1, f2)) { + try (BytesRefBlock deserBlock = serializeDeserializeBlock(vector.asBlock())) { + EqualsHashCodeTestUtils.checkEqualsAndHashCode(deserBlock, unused -> deserBlock); + } + } + } + } + } + } + } + + public void testOrdinalBlock() throws Exception { + int numValues = randomIntBetween(1, 1000); + BlockFactory blockFactory = driverContext().blockFactory(); + BytesRef scratch = new BytesRef(); + try ( + BytesRefBlock.Builder regular = blockFactory.newBytesRefBlockBuilder(between(1, numValues * 3)); + BytesRefHash hash = new BytesRefHash(1, blockFactory.bigArrays()); + IntBlock.Builder ordinals = blockFactory.newIntBlockBuilder(between(1, numValues * 3)); + BytesRefVector.Builder dictionary = blockFactory.newBytesRefVectorBuilder(between(1, numValues * 3)); + ) { + int valueCount = randomIntBetween(0, 3); + if (valueCount == 0) { + regular.appendNull(); + ordinals.appendNull(); + } + if (valueCount > 1) { + regular.beginPositionEntry(); + ordinals.beginPositionEntry(); + } + for (int v = 0; v < valueCount; v++) { + BytesRef bytes = new BytesRef("value-" + randomIntBetween(1, 20)); + int ord = Math.toIntExact(hash.add(bytes)); + ord = ord < 0 ? -1 - ord : ord; + ordinals.appendInt(ord); + regular.appendBytesRef(bytes); + } + if (valueCount > 1) { + regular.endPositionEntry(); + ordinals.endPositionEntry(); + } + for (long l = 0; l < hash.size(); l++) { + dictionary.appendBytesRef(hash.get(l, scratch)); + } + try (BytesRefBlock b1 = regular.build(); BytesRefBlock b2 = new OrdinalBytesRefBlock(ordinals.build(), dictionary.build())) { + BytesRefBlock.equals(b1, b2); + for (BytesRefBlock block : List.of(b1, b2)) { + try (BytesRefBlock deserBlock = serializeDeserializeBlock(block)) { + EqualsHashCodeTestUtils.checkEqualsAndHashCode(deserBlock, unused -> deserBlock); + } + } + for (int p = 0; p < b1.getPositionCount(); p++) { + try (BytesRefBlock f1 = b1.filter(p); BytesRefBlock f2 = b2.filter(p)) { + BytesRefBlock.equals(f1, f2); + for (BytesRefBlock block : List.of(f1, f2)) { + try (BytesRefBlock deserBlock = serializeDeserializeBlock(block)) { + EqualsHashCodeTestUtils.checkEqualsAndHashCode(deserBlock, unused -> deserBlock); + } + } + } + } + try (BytesRefBlock e1 = b1.expand(); BytesRefBlock e2 = b2.expand()) { + BytesRefBlock.equals(e1, e2); + for (BytesRefBlock block : List.of(e1, e2)) { + try (BytesRefBlock deserBlock = serializeDeserializeBlock(block)) { + EqualsHashCodeTestUtils.checkEqualsAndHashCode(deserBlock, unused -> deserBlock); + } + } + } + } + } + } + static BytesRef randomBytesRef() { return new BytesRef(randomAlphaOfLengthBetween(0, 10)); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/SingletonOrdinalsBuilderTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/SingletonOrdinalsBuilderTests.java index 016d74aa6c299..ffe13375bc941 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/SingletonOrdinalsBuilderTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/SingletonOrdinalsBuilderTests.java @@ -74,7 +74,7 @@ private void testRead(BlockFactory factory) throws IOException { builder.appendOrd(docValues.ordValue()); } } - try (BytesRefBlock build = builder.build()) { + try (BytesRefBlock build = buildOrdinalsBuilder(builder)) { for (int i = 0; i < build.getPositionCount(); i++) { counts.merge(build.getBytesRef(i, new BytesRef()).utf8ToString(), 1, (lhs, rhs) -> lhs + rhs); } @@ -141,7 +141,7 @@ public void testAllNull() throws IOException { builder.appendNull(); } } - try (BytesRefBlock built = builder.build()) { + try (BytesRefBlock built = buildOrdinalsBuilder(builder)) { for (int p = 0; p < built.getPositionCount(); p++) { assertThat(built.isNull(p), equalTo(true)); } @@ -153,6 +153,14 @@ public void testAllNull() throws IOException { } } + static BytesRefBlock buildOrdinalsBuilder(SingletonOrdinalsBuilder builder) { + if (randomBoolean()) { + return builder.buildRegularBlock(); + } else { + return builder.buildOrdinal(); + } + } + @After public void allBreakersEmpty() throws Exception { // first check that all big arrays are released, which can affect breakers From 431f73f94e86062f6ff0d476986ac84080b41197 Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Wed, 3 Apr 2024 09:02:23 +0300 Subject: [PATCH 072/264] [TEST] Wait for downsampling op to complete on master side (#106918) * unmute test, remove failing assert * Wait for downsampling op to complete on master side * read asserted value properly * add invalid config status --- .../xpack/downsample/DownsampleMetrics.java | 3 +- .../downsample/TransportDownsampleAction.java | 23 ++++++-- .../DownsampleActionSingleNodeTests.java | 58 ++++++++++++------- 3 files changed, 56 insertions(+), 28 deletions(-) diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleMetrics.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleMetrics.java index 797b89ecf11a0..628191d22f50a 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleMetrics.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleMetrics.java @@ -55,7 +55,8 @@ enum ActionStatus { SUCCESS("success"), MISSING_DOCS("missing_docs"), - FAILED("failed"); + FAILED("failed"), + INVALID_CONFIGURATION("invalid_configuration"); static final String NAME = "status"; diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java index c526561999497..cf794fe584a63 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java @@ -180,16 +180,21 @@ public TransportDownsampleAction( } private void recordLatencyOnSuccess(long startTime) { - downsampleMetrics.recordLatencyTotal( - TimeValue.timeValueMillis(client.threadPool().relativeTimeInMillis() - startTime).getMillis(), - DownsampleMetrics.ActionStatus.SUCCESS - ); + recordLatency(startTime, DownsampleMetrics.ActionStatus.SUCCESS); } private void recordLatencyOnFailure(long startTime) { + recordLatency(startTime, DownsampleMetrics.ActionStatus.FAILED); + } + + private void recordLatencyOnInvalidConfiguration(long startTime) { + recordLatency(startTime, DownsampleMetrics.ActionStatus.INVALID_CONFIGURATION); + } + + private void recordLatency(long startTime, DownsampleMetrics.ActionStatus status) { downsampleMetrics.recordLatencyTotal( TimeValue.timeValueMillis(client.threadPool().relativeTimeInMillis() - startTime).getMillis(), - DownsampleMetrics.ActionStatus.FAILED + status ); } @@ -210,6 +215,7 @@ protected void masterOperation( boolean hasDocumentLevelPermissions = indexPermissions.getDocumentPermissions().hasDocumentLevelPermissions(); boolean hasFieldLevelSecurity = indexPermissions.getFieldPermissions().hasFieldLevelSecurity(); if (hasDocumentLevelPermissions || hasFieldLevelSecurity) { + recordLatencyOnInvalidConfiguration(startTime); listener.onFailure( new ElasticsearchException( "Rollup forbidden for index [" + sourceIndexName + "] with document level or field level security settings." @@ -222,12 +228,14 @@ protected void masterOperation( // Assert source index exists IndexMetadata sourceIndexMetadata = state.getMetadata().index(sourceIndexName); if (sourceIndexMetadata == null) { + recordLatencyOnInvalidConfiguration(startTime); listener.onFailure(new IndexNotFoundException(sourceIndexName)); return; } // Assert source index is a time_series index if (IndexSettings.MODE.get(sourceIndexMetadata.getSettings()) != IndexMode.TIME_SERIES) { + recordLatencyOnInvalidConfiguration(startTime); listener.onFailure( new ElasticsearchException( "Rollup requires setting [" @@ -244,6 +252,7 @@ protected void masterOperation( // Assert source index is read-only if (state.blocks().indexBlocked(ClusterBlockLevel.WRITE, sourceIndexName) == false) { + recordLatencyOnInvalidConfiguration(startTime); listener.onFailure( new ElasticsearchException( "Downsample requires setting [" + IndexMetadata.SETTING_BLOCKS_WRITE + " = true] for index [" + sourceIndexName + "]" @@ -318,6 +327,7 @@ protected void masterOperation( } if (validationException.validationErrors().isEmpty() == false) { + recordLatencyOnInvalidConfiguration(startTime); delegate.onFailure(validationException); return; } @@ -326,6 +336,7 @@ protected void masterOperation( try { mapping = createDownsampleIndexMapping(helper, request.getDownsampleConfig(), mapperService, sourceIndexMappings); } catch (IOException e) { + recordLatencyOnFailure(startTime); delegate.onFailure(e); return; } @@ -350,6 +361,7 @@ protected void masterOperation( dimensionFields ); } else { + recordLatencyOnFailure(startTime); delegate.onFailure(new ElasticsearchException("Failed to create downsample index [" + downsampleIndexName + "]")); } }, e -> { @@ -378,6 +390,7 @@ protected void masterOperation( dimensionFields ); } else { + recordLatencyOnFailure(startTime); delegate.onFailure(e); } }) diff --git a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java index 2bcbca537ecd3..3c4be50b25a73 100644 --- a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java +++ b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java @@ -247,7 +247,7 @@ public void setup() throws IOException { assertAcked(indicesAdmin().prepareCreate(sourceIndex).setSettings(settings.build()).setMapping(mapping).get()); } - public void testDownsampleIndex() throws IOException { + public void testDownsampleIndex() throws Exception { DownsampleConfig config = new DownsampleConfig(randomInterval()); SourceSupplier sourceSupplier = () -> { String ts = randomDateForInterval(config.getInterval()); @@ -306,7 +306,7 @@ public void testDownsampleIndex() throws IOException { assertDownsampleIndex(sourceIndex, downsampleIndex, config); } - public void testDownsampleOfDownsample() throws IOException { + public void testDownsampleOfDownsample() throws Exception { int intervalMinutes = randomIntBetween(10, 120); DownsampleConfig config = new DownsampleConfig(DateHistogramInterval.minutes(intervalMinutes)); SourceSupplier sourceSupplier = () -> { @@ -429,7 +429,7 @@ public void testNullDownsampleConfig() { assertThat(exception.getMessage(), containsString("downsample configuration is missing")); } - public void testDownsampleSparseMetrics() throws IOException { + public void testDownsampleSparseMetrics() throws Exception { DownsampleConfig config = new DownsampleConfig(randomInterval()); SourceSupplier sourceSupplier = () -> { XContentBuilder builder = XContentFactory.jsonBuilder() @@ -463,7 +463,7 @@ public void testCannotDownsampleToExistingIndex() throws Exception { assertThat(exception.getMessage(), containsString(downsampleIndex)); } - public void testDownsampleEmptyIndex() throws IOException { + public void testDownsampleEmptyIndex() throws Exception { DownsampleConfig config = new DownsampleConfig(randomInterval()); // Source index has been created in the setup() method prepareSourceIndex(sourceIndex, true); @@ -471,7 +471,7 @@ public void testDownsampleEmptyIndex() throws IOException { assertDownsampleIndex(sourceIndex, downsampleIndex, config); } - public void testDownsampleIndexWithNoMetrics() throws IOException { + public void testDownsampleIndexWithNoMetrics() throws Exception { // Create a source index that contains no metric fields in its mapping String sourceIndex = "no-metrics-idx-" + randomAlphaOfLength(5).toLowerCase(Locale.ROOT); indicesAdmin().prepareCreate(sourceIndex) @@ -764,7 +764,7 @@ public void testTooManyBytesInFlight() throws IOException { indexer.execute(); } - public void testDownsampleStats() throws IOException { + public void testDownsampleStats() throws Exception { final PersistentTasksService persistentTasksService = mock(PersistentTasksService.class); final DownsampleConfig config = new DownsampleConfig(randomInterval()); final SourceSupplier sourceSupplier = () -> XContentFactory.jsonBuilder() @@ -819,19 +819,6 @@ public void testDownsampleStats() throws IOException { assertDownsampleIndexer(indexService, shardNum, task, executeResponse, task.getTotalShardDocCount()); } - - // Check that metrics get collected as expected. - final TestTelemetryPlugin plugin = getInstanceFromNode(PluginsService.class).filterPlugins(TestTelemetryPlugin.class) - .findFirst() - .orElseThrow(); - - List measurements = plugin.getLongHistogramMeasurement(DownsampleMetrics.LATENCY_SHARD); - assertFalse(measurements.isEmpty()); - for (Measurement measurement : measurements) { - assertTrue(measurement.value().toString(), measurement.value().longValue() >= 0 && measurement.value().longValue() < 1000_000); - assertEquals(1, measurement.attributes().size()); - assertThat(measurement.attributes().get("status"), Matchers.in(List.of("success", "failed", "missing_docs"))); - } } public void testResumeDownsample() throws IOException { @@ -1116,7 +1103,7 @@ private InternalAggregations aggregate(final String index, AggregationBuilder ag } @SuppressWarnings("unchecked") - private void assertDownsampleIndex(String sourceIndex, String downsampleIndex, DownsampleConfig config) throws IOException { + private void assertDownsampleIndex(String sourceIndex, String downsampleIndex, DownsampleConfig config) throws Exception { // Retrieve field information for the metric fields final GetMappingsResponse getMappingsResponse = indicesAdmin().prepareGetMappings(sourceIndex).get(); final Map sourceIndexMappings = getMappingsResponse.mappings() @@ -1175,6 +1162,33 @@ private void assertDownsampleIndex(String sourceIndex, String downsampleIndex, D .filter(entry -> labelFields.containsKey(entry.getKey())) .toList()); assertEquals(labelFieldDownsampleIndexCloneProperties, labelFieldSourceIndexProperties); + + // Check that metrics get collected as expected. + final TestTelemetryPlugin plugin = getInstanceFromNode(PluginsService.class).filterPlugins(TestTelemetryPlugin.class) + .findFirst() + .orElseThrow(); + + final List latencyShardMetrics = plugin.getLongHistogramMeasurement(DownsampleMetrics.LATENCY_SHARD); + assertFalse(latencyShardMetrics.isEmpty()); + for (Measurement measurement : latencyShardMetrics) { + assertTrue(measurement.value().toString(), measurement.value().longValue() >= 0 && measurement.value().longValue() < 1000_000); + assertEquals(1, measurement.attributes().size()); + assertThat(measurement.attributes().get("status"), Matchers.in(List.of("success", "failed", "missing_docs"))); + } + + // Total latency gets recorded after reindex and force-merge complete. + assertBusy(() -> { + final List latencyTotalMetrics = plugin.getLongHistogramMeasurement(DownsampleMetrics.LATENCY_TOTAL); + assertFalse(latencyTotalMetrics.isEmpty()); + for (Measurement measurement : latencyTotalMetrics) { + assertTrue( + measurement.value().toString(), + measurement.value().longValue() >= 0 && measurement.value().longValue() < 1000_000 + ); + assertEquals(1, measurement.attributes().size()); + assertThat(measurement.attributes().get("status"), Matchers.in(List.of("success", "invalid_configuration"))); + } + }, 10, TimeUnit.SECONDS); } private void assertDownsampleIndexAggregations( @@ -1497,7 +1511,7 @@ private String createDataStream() throws Exception { return dataStreamName; } - public void testConcurrentDownsample() throws IOException, InterruptedException { + public void testConcurrentDownsample() throws Exception { final DownsampleConfig config = new DownsampleConfig(randomInterval()); SourceSupplier sourceSupplier = () -> { String ts = randomDateForInterval(config.getInterval()); @@ -1576,7 +1590,7 @@ public void testConcurrentDownsample() throws IOException, InterruptedException } } - public void testDuplicateDownsampleRequest() throws IOException, InterruptedException { + public void testDuplicateDownsampleRequest() throws Exception { final DownsampleConfig config = new DownsampleConfig(randomInterval()); SourceSupplier sourceSupplier = () -> { String ts = randomDateForInterval(config.getInterval()); From 80bba1a2eb8f767034278923f4e92f7c7fa5e788 Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Wed, 3 Apr 2024 09:58:52 +0200 Subject: [PATCH 073/264] ESQL: Use SearchExecutionContext in SearchStats (#106999) This is a lighter dependency than requiring SearchContext. --- .../xpack/esql/planner/PlannerUtils.java | 4 +- .../xpack/esql/plugin/ComputeService.java | 9 ++++- .../xpack/esql/stats/SearchStats.java | 37 +++++++++---------- 3 files changed, 26 insertions(+), 24 deletions(-) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java index 1e64a6f136310..206f428bdcb71 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java @@ -15,7 +15,7 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.optimizer.LocalLogicalOptimizerContext; import org.elasticsearch.xpack.esql.optimizer.LocalLogicalPlanOptimizer; @@ -139,7 +139,7 @@ public static String[] planOriginalIndices(PhysicalPlan plan) { return indices.toArray(String[]::new); } - public static PhysicalPlan localPlan(List searchContexts, EsqlConfiguration configuration, PhysicalPlan plan) { + public static PhysicalPlan localPlan(List searchContexts, EsqlConfiguration configuration, PhysicalPlan plan) { return localPlan(configuration, plan, new SearchStats(searchContexts)); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index 959783d2f5235..7b38197dde95a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -40,6 +40,7 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.index.Index; import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardNotFoundException; @@ -433,7 +434,7 @@ void runCompute(CancellableTask task, ComputeContext context, PhysicalPlan plan, ); LOGGER.debug("Received physical plan:\n{}", plan); - plan = PlannerUtils.localPlan(context.searchContexts, context.configuration, plan); + plan = PlannerUtils.localPlan(context.searchExecutionContexts(), context.configuration, plan); // the planner will also set the driver parallelism in LocalExecutionPlanner.LocalExecutionPlan (used down below) // it's doing this in the planning of EsQueryExec (the source of the data) // see also EsPhysicalOperationProviders.sourcePhysicalOperation @@ -899,5 +900,9 @@ record ComputeContext( EsqlConfiguration configuration, ExchangeSourceHandler exchangeSource, ExchangeSinkHandler exchangeSink - ) {} + ) { + public List searchExecutionContexts() { + return searchContexts.stream().map(ctx -> ctx.getSearchExecutionContext()).toList(); + } + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/SearchStats.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/SearchStats.java index 679781a40c869..e8c547c55a373 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/SearchStats.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/SearchStats.java @@ -26,7 +26,7 @@ import org.elasticsearch.index.mapper.NumberFieldMapper.NumberFieldType; import org.elasticsearch.index.mapper.SeqNoFieldMapper; import org.elasticsearch.index.mapper.TextFieldMapper; -import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.ql.type.DataType; @@ -41,7 +41,7 @@ public class SearchStats { - private final List contexts; + private final List contexts; private static class FieldStat { private Long count; @@ -64,7 +64,7 @@ protected boolean removeEldestEntry(Map.Entry eldest) { } }; - public SearchStats(List contexts) { + public SearchStats(List contexts) { this.contexts = contexts; } @@ -106,8 +106,8 @@ public boolean exists(String field) { stat.exists = false; // even if there are deleted documents, check the existence of a field // since if it's missing, deleted documents won't change that - for (SearchContext context : contexts) { - if (context.getSearchExecutionContext().isFieldMapped(field)) { + for (SearchExecutionContext context : contexts) { + if (context.isFieldMapped(field)) { stat.exists = true; break; } @@ -126,9 +126,9 @@ public boolean hasIdenticalDelegate(String field) { var stat = cache.computeIfAbsent(field, s -> new FieldStat()); if (stat.hasIdenticalDelegate == null) { stat.hasIdenticalDelegate = true; - for (SearchContext context : contexts) { - if (context.getSearchExecutionContext().isFieldMapped(field)) { - MappedFieldType type = context.getSearchExecutionContext().getFieldType(field); + for (SearchExecutionContext context : contexts) { + if (context.isFieldMapped(field)) { + MappedFieldType type = context.getFieldType(field); if (type instanceof TextFieldMapper.TextFieldType t) { if (t.canUseSyntheticSourceDelegateForQuerying() == false) { stat.hasIdenticalDelegate = false; @@ -197,9 +197,8 @@ public boolean isSingleValue(String field) { } else { // fields are MV per default var sv = new boolean[] { false }; - for (SearchContext context : contexts) { - var sec = context.getSearchExecutionContext(); - MappedFieldType mappedType = sec.isFieldMapped(field) ? null : sec.getFieldType(field); + for (SearchExecutionContext context : contexts) { + MappedFieldType mappedType = context.isFieldMapped(field) ? null : context.getFieldType(field); if (mappedType != null) { doWithContexts(r -> { sv[0] &= detectSingleValue(r, mappedType, field); @@ -219,10 +218,9 @@ public boolean isRuntimeField(String field) { if (stat.runtime == null) { stat.runtime = false; if (exists(field)) { - for (SearchContext context : contexts) { - var sec = context.getSearchExecutionContext(); - if (sec.isFieldMapped(field)) { - if (sec.getFieldType(field) instanceof AbstractScriptFieldType) { + for (SearchExecutionContext context : contexts) { + if (context.isFieldMapped(field)) { + if (context.getFieldType(field) instanceof AbstractScriptFieldType) { stat.runtime = true; break; } @@ -239,10 +237,9 @@ public boolean isIndexed(String field) { stat.indexed = false; if (exists(field)) { boolean indexed = true; - for (SearchContext context : contexts) { - var sec = context.getSearchExecutionContext(); - if (sec.isFieldMapped(field)) { - if (sec.getFieldType(field).isIndexed() == false) { + for (SearchExecutionContext context : contexts) { + if (context.isFieldMapped(field)) { + if (context.getFieldType(field).isIndexed() == false) { indexed = false; break; } @@ -351,7 +348,7 @@ private interface IndexReaderConsumer { private boolean doWithContexts(IndexReaderConsumer consumer, boolean acceptsDeletions) { try { - for (SearchContext context : contexts) { + for (SearchExecutionContext context : contexts) { for (LeafReaderContext leafContext : context.searcher().getLeafContexts()) { var reader = leafContext.reader(); if (acceptsDeletions == false && reader.hasDeletions()) { From 99138c141c070170a56a39cc6846ded6841161ca Mon Sep 17 00:00:00 2001 From: Mary Gouseti Date: Wed, 3 Apr 2024 11:14:47 +0300 Subject: [PATCH 074/264] [Failure store] Refactoring data stream lifecycle to facilitate managing failure store. (#106981) In this PR we refactor data stream and data stream lifecycle related code. The goal of this small refactoring is to make it easier to extend DSL to manage failure store. - The retrieval of backing indices past the retention period. - The rollover execution during the DSL run to handle more than one write index. --- .../lifecycle/DataStreamLifecycleService.java | 96 ++++++++++--------- .../cluster/metadata/DataStream.java | 37 ++++--- 2 files changed, 72 insertions(+), 61 deletions(-) diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java index 52753f00a39c1..8049363f26799 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java @@ -343,35 +343,15 @@ void run(ClusterState state) { continue; } - /* - * This is the pre-rollover write index. It may or may not be the write index after maybeExecuteRollover has executed, depending - * on rollover criteria. We're keeping a reference to it because regardless of whether it's rolled over or not we want to - * exclude it from force merging later in this data stream lifecycle run. - */ - Index currentRunWriteIndex = dataStream.getWriteIndex(); - try { - maybeExecuteRollover(state, dataStream); - } catch (Exception e) { - logger.error( - () -> String.format(Locale.ROOT, "Data stream lifecycle failed to rollover data stream [%s]", dataStream.getName()), - e - ); - DataStream latestDataStream = clusterService.state().metadata().dataStreams().get(dataStream.getName()); - if (latestDataStream != null) { - if (latestDataStream.getWriteIndex().getName().equals(currentRunWriteIndex.getName())) { - // data stream has not been rolled over in the meantime so record the error against the write index we - // attempted the rollover - errorStore.recordError(currentRunWriteIndex.getName(), e); - } - } - } - + // the following indices should not be considered for the remainder of this service run, for various reasons. Set indicesToExcludeForRemainingRun = new HashSet<>(); - // the following indices should not be considered for the remainder of this service run: - // 1) the write index as it's still getting writes and we'll have to roll it over when the conditions are met - // 2) tsds indices that are still within their time bounds (i.e. now < time_series.end_time) - we don't want these indices to be + + // This is the pre-rollover write index. It may or may not be the write index after maybeExecuteRollover has executed, + // depending on rollover criteria, for this reason we exclude it for the remaining run. + indicesToExcludeForRemainingRun.addAll(maybeExecuteRollover(state, dataStream)); + + // tsds indices that are still within their time bounds (i.e. now < time_series.end_time) - we don't want these indices to be // deleted, forcemerged, or downsampled as they're still expected to receive large amounts of writes - indicesToExcludeForRemainingRun.add(currentRunWriteIndex); indicesToExcludeForRemainingRun.addAll( timeSeriesIndicesStillWithinTimeBounds( state.metadata(), @@ -791,26 +771,50 @@ private void clearErrorStoreForUnmanagedIndices(DataStream dataStream) { } } - private void maybeExecuteRollover(ClusterState state, DataStream dataStream) { - Index writeIndex = dataStream.getWriteIndex(); - if (dataStream.isIndexManagedByDataStreamLifecycle(writeIndex, state.metadata()::index)) { - RolloverRequest rolloverRequest = getDefaultRolloverRequest( - rolloverConfiguration, - dataStream.getName(), - dataStream.getLifecycle().getEffectiveDataRetention(DataStreamGlobalRetention.getFromClusterState(state)) - ); - transportActionsDeduplicator.executeOnce( - rolloverRequest, - new ErrorRecordingActionListener( - RolloverAction.NAME, - writeIndex.getName(), - errorStore, - Strings.format("Data stream lifecycle encountered an error trying to rollover data steam [%s]", dataStream.getName()), - signallingErrorRetryInterval - ), - (req, reqListener) -> rolloverDataStream(writeIndex.getName(), rolloverRequest, reqListener) + /** + * This method will attempt to rollover the write index of a data stream. The rollover will occur only if the conditions + * apply. In any case, we return the write backing index back to the caller, so it can be excluded from the next steps. + * @return the write index of this data stream before rollover was requested. + */ + private Set maybeExecuteRollover(ClusterState state, DataStream dataStream) { + Index currentRunWriteIndex = dataStream.getWriteIndex(); + try { + if (dataStream.isIndexManagedByDataStreamLifecycle(currentRunWriteIndex, state.metadata()::index)) { + RolloverRequest rolloverRequest = getDefaultRolloverRequest( + rolloverConfiguration, + dataStream.getName(), + dataStream.getLifecycle().getEffectiveDataRetention(DataStreamGlobalRetention.getFromClusterState(state)) + ); + transportActionsDeduplicator.executeOnce( + rolloverRequest, + new ErrorRecordingActionListener( + RolloverAction.NAME, + currentRunWriteIndex.getName(), + errorStore, + Strings.format( + "Data stream lifecycle encountered an error trying to rollover data steam [%s]", + dataStream.getName() + ), + signallingErrorRetryInterval + ), + (req, reqListener) -> rolloverDataStream(currentRunWriteIndex.getName(), rolloverRequest, reqListener) + ); + } + } catch (Exception e) { + logger.error( + () -> String.format(Locale.ROOT, "Data stream lifecycle failed to rollover data stream [%s]", dataStream.getName()), + e ); + DataStream latestDataStream = clusterService.state().metadata().dataStreams().get(dataStream.getName()); + if (latestDataStream != null) { + if (latestDataStream.getWriteIndex().getName().equals(currentRunWriteIndex.getName())) { + // data stream has not been rolled over in the meantime so record the error against the write index we + // attempted the rollover + errorStore.recordError(currentRunWriteIndex.getName(), e); + } + } } + return Set.of(currentRunWriteIndex); } /** @@ -818,7 +822,7 @@ private void maybeExecuteRollover(ClusterState state, DataStream dataStream) { * it has sent delete requests for. * * @param state The cluster state from which to get index metadata - * @param dataStream The datastream + * @param dataStream The data stream * @param indicesToExcludeForRemainingRun Indices to exclude from retention even if it would be time for them to be deleted * @return The set of indices that delete requests have been sent for */ diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java index f1a508b803d4d..57ab7c431f7ea 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java @@ -883,32 +883,39 @@ public List getDownsamplingRoundsFor( * the backing indices that are managed by the data stream lifecycle". */ public List getNonWriteIndicesOlderThan( - TimeValue age, + TimeValue retentionPeriod, Function indexMetadataSupplier, @Nullable Predicate indicesPredicate, LongSupplier nowSupplier ) { List olderIndices = new ArrayList<>(); for (Index index : indices) { - IndexMetadata indexMetadata = indexMetadataSupplier.apply(index.getName()); - if (indexMetadata == null) { - // we would normally throw exception in a situation like this however, this is meant to be a helper method - // so let's ignore deleted indices - continue; - } - TimeValue indexLifecycleDate = getGenerationLifecycleDate(indexMetadata); - if (indexLifecycleDate != null) { - long nowMillis = nowSupplier.getAsLong(); - if (nowMillis >= indexLifecycleDate.getMillis() + age.getMillis()) { - if (indicesPredicate == null || indicesPredicate.test(indexMetadata)) { - olderIndices.add(index); - } - } + if (isIndexOderThan(index, retentionPeriod.getMillis(), nowSupplier.getAsLong(), indicesPredicate, indexMetadataSupplier)) { + olderIndices.add(index); } } return olderIndices; } + private boolean isIndexOderThan( + Index index, + long retentionPeriod, + long now, + Predicate indicesPredicate, + Function indexMetadataSupplier + ) { + IndexMetadata indexMetadata = indexMetadataSupplier.apply(index.getName()); + if (indexMetadata == null) { + // we would normally throw exception in a situation like this however, this is meant to be a helper method + // so let's ignore deleted indices + return false; + } + TimeValue indexLifecycleDate = getGenerationLifecycleDate(indexMetadata); + return indexLifecycleDate != null + && now >= indexLifecycleDate.getMillis() + retentionPeriod + && (indicesPredicate == null || indicesPredicate.test(indexMetadata)); + } + /** * Checks if the provided backing index is managed by the data stream lifecycle as part of this data stream. * If the index is not a backing index of this data stream, or we cannot supply its metadata From 573c03262f9a178a0b769d07d342c0643bb39555 Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Wed, 3 Apr 2024 10:54:49 +0200 Subject: [PATCH 075/264] [Docs] Fix CCS matrix for 8.13 (#107028) --- .../ccs-version-compat-matrix.asciidoc | 25 +++++++++---------- 1 file changed, 12 insertions(+), 13 deletions(-) diff --git a/docs/reference/search/search-your-data/ccs-version-compat-matrix.asciidoc b/docs/reference/search/search-your-data/ccs-version-compat-matrix.asciidoc index 5af3c997251dd..c1d279d3163e8 100644 --- a/docs/reference/search/search-your-data/ccs-version-compat-matrix.asciidoc +++ b/docs/reference/search/search-your-data/ccs-version-compat-matrix.asciidoc @@ -2,23 +2,22 @@ |==== | 17+^h| Remote cluster version h| Local cluster version - | 6.8 | 7.1–7.16 | 7.17 | 8.0 | 8.1 | 8.2 | 8.3 | 8.4 | 8.5 |8.6 |8.7 |8.8 |8.9 |8.10 |8.11 |8.12 |8.13 + | 6.8 | 7.1–7.16 | 7.17 | 8.0 | 8.1 | 8.2 | 8.3 | 8.4 | 8.5 |8.6 |8.7 |8.8 |8.9 |8.10 |8.11 |8.12 | 8.13 | 6.8 | {yes-icon} | {yes-icon} | {yes-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | 7.1–7.16 | {yes-icon} | {yes-icon} | {yes-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} -| 7.17 | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} +| 7.17 | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} | 8.0 | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} | 8.1 | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} | 8.2 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} | 8.3 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon}|{yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} | 8.4 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} |{yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} -| 8.5 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} -| 8.6 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} -| 8.7 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} -| 8.8 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} -| 8.9 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} -| 8.10 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} -| 8.11 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} -| 8.12 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} -| 8.13 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} -|==== - +| 8.5 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} |{yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} +| 8.6 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} +| 8.7 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} +| 8.8 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} +| 8.9 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} +| 8.10 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} +| 8.11 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} +| 8.12 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon}| {yes-icon} | {yes-icon} +| 8.13 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {yes-icon} | {yes-icon} +|==== \ No newline at end of file From 0c493e430bb6c8b1d04e75fad415088136c85f21 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Wed, 3 Apr 2024 11:21:06 +0200 Subject: [PATCH 076/264] Make requestSize final in SearchExecutionContext (#107026) --- .../org/elasticsearch/index/query/SearchExecutionContext.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java b/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java index 86af6d21b7a09..638a04fb2e47d 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java @@ -74,7 +74,6 @@ import java.util.function.Predicate; import static org.elasticsearch.index.IndexService.parseRuntimeMappings; -import static org.elasticsearch.search.SearchService.DEFAULT_SIZE; /** * The context used to execute a search request on a shard. It provides access @@ -102,7 +101,7 @@ public class SearchExecutionContext extends QueryRewriteContext { private QueryBuilder aliasFilter; private boolean rewriteToNamedQueries = false; - private Integer requestSize = DEFAULT_SIZE; + private final Integer requestSize; /** * Build a {@linkplain SearchExecutionContext}. From 331d7831ff796b5c09e69a71d3fd56666f609690 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Wed, 3 Apr 2024 11:30:14 +0200 Subject: [PATCH 077/264] [DOCS] Fixes search request in semantic search tutorial. (#107034) --- .../tab-widgets/inference-api/infer-api-search.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/tab-widgets/inference-api/infer-api-search.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-search.asciidoc index 843c351648c63..1e8470471491f 100644 --- a/docs/reference/tab-widgets/inference-api/infer-api-search.asciidoc +++ b/docs/reference/tab-widgets/inference-api/infer-api-search.asciidoc @@ -8,7 +8,7 @@ GET cohere-embeddings/_search "field": "content_embedding", "query_vector_builder": { "text_embedding": { - "inference_id": "cohere_embeddings", + "model_id": "cohere_embeddings", "model_text": "Muscles in human body" } }, @@ -83,7 +83,7 @@ GET openai-embeddings/_search "field": "content_embedding", "query_vector_builder": { "text_embedding": { - "inference_id": "openai_embeddings", + "model_id": "openai_embeddings", "model_text": "Calculate fuel cost" } }, From a23e29cdf9fb71a71f4254bde4578944bc5ace2e Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Wed, 3 Apr 2024 20:34:58 +1100 Subject: [PATCH 078/264] [Test] Explicitly specify number of shards to 1 (#107032) The test assumes number of shards defaults to 1 which may not true in certain deployment types. This PR makes it explicity configure it to 1. Relates: #106707 --- .../xpack/sql/qa/single_node/JdbcShardFailureIT.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcShardFailureIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcShardFailureIT.java index eace1b5ad1ced..dc9989b26c3b2 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcShardFailureIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcShardFailureIT.java @@ -99,6 +99,9 @@ public void testAllowPartialSearchResults() throws Exception { "aliases": { "test": {} }, + "settings": { + "number_of_shards": 1 + }, "mappings": { "properties": { "bool": { From aa03bc8017540100a67bbe6a65a5a9cb92db52e0 Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Wed, 3 Apr 2024 13:30:09 +0200 Subject: [PATCH 079/264] ESQL: Swap `FROM` `METADATA` and `OPTIONS` directives (#107012) This swaps the two clauses, foreseeing a more frequent use of METADATA one, over OPTIONS. --- .../plugin/esql/src/main/antlr/EsqlBaseParser.g4 | 2 +- .../xpack/esql/parser/EsqlBaseLexer.interp | 2 +- .../xpack/esql/parser/EsqlBaseLexer.java | 2 +- .../xpack/esql/parser/EsqlBaseParser.interp | 2 +- .../xpack/esql/parser/EsqlBaseParser.java | 14 +++++++------- .../xpack/esql/parser/StatementParserTests.java | 2 +- 6 files changed, 12 insertions(+), 12 deletions(-) diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 index 06a15adb3ecbe..9f700f3905111 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 @@ -99,7 +99,7 @@ field ; fromCommand - : FROM fromIdentifier (COMMA fromIdentifier)* fromOptions? metadata? + : FROM fromIdentifier (COMMA fromIdentifier)* metadata? fromOptions? ; fromIdentifier diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp index 0a115745c0b23..b77d229cb5b9e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp @@ -311,7 +311,7 @@ FROM_OPENING_BRACKET FROM_CLOSING_BRACKET FROM_COMMA FROM_ASSIGN -FROM_QUTED_STRING +FROM_QUOTED_STRING OPTIONS METADATA FROM_UNQUOTED_IDENTIFIER_PART diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java index 358bb431e468e..23beb8c26be5b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java @@ -68,7 +68,7 @@ private static String[] makeRuleNames() { "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_ID", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", "FROM_PIPE", "FROM_OPENING_BRACKET", "FROM_CLOSING_BRACKET", "FROM_COMMA", "FROM_ASSIGN", - "FROM_QUTED_STRING", "OPTIONS", "METADATA", "FROM_UNQUOTED_IDENTIFIER_PART", + "FROM_QUOTED_STRING", "OPTIONS", "METADATA", "FROM_UNQUOTED_IDENTIFIER_PART", "FROM_UNQUOTED_IDENTIFIER", "FROM_QUOTED_IDENTIFIER", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", "FROM_WS", "PROJECT_PIPE", "PROJECT_DOT", "PROJECT_COMMA", "UNQUOTED_ID_BODY_WITH_PATTERN", "UNQUOTED_ID_PATTERN", "ID_PATTERN", diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp index ff5de4e348db0..0e3df1df3978e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp @@ -279,4 +279,4 @@ enrichWithClause atn: -[4, 1, 109, 530, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 116, 8, 1, 10, 1, 12, 1, 119, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 126, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 141, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 153, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 160, 8, 5, 10, 5, 12, 5, 163, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 170, 8, 5, 1, 5, 1, 5, 3, 5, 174, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 182, 8, 5, 10, 5, 12, 5, 185, 9, 5, 1, 6, 1, 6, 3, 6, 189, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 196, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 201, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, 7, 208, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 214, 8, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 5, 8, 222, 8, 8, 10, 8, 12, 8, 225, 9, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 234, 8, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 5, 10, 242, 8, 10, 10, 10, 12, 10, 245, 9, 10, 3, 10, 247, 8, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 5, 12, 257, 8, 12, 10, 12, 12, 12, 260, 9, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 3, 13, 267, 8, 13, 1, 14, 1, 14, 1, 14, 1, 14, 5, 14, 273, 8, 14, 10, 14, 12, 14, 276, 9, 14, 1, 14, 3, 14, 279, 8, 14, 1, 14, 3, 14, 282, 8, 14, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 5, 16, 290, 8, 16, 10, 16, 12, 16, 293, 9, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 3, 18, 301, 8, 18, 1, 19, 1, 19, 1, 19, 1, 19, 5, 19, 307, 8, 19, 10, 19, 12, 19, 310, 9, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 3, 22, 321, 8, 22, 1, 22, 1, 22, 3, 22, 325, 8, 22, 1, 23, 1, 23, 1, 23, 1, 23, 3, 23, 331, 8, 23, 1, 24, 1, 24, 1, 24, 5, 24, 336, 8, 24, 10, 24, 12, 24, 339, 9, 24, 1, 25, 1, 25, 1, 25, 5, 25, 344, 8, 25, 10, 25, 12, 25, 347, 9, 25, 1, 26, 1, 26, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 5, 28, 366, 8, 28, 10, 28, 12, 28, 369, 9, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 5, 28, 377, 8, 28, 10, 28, 12, 28, 380, 9, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 5, 28, 388, 8, 28, 10, 28, 12, 28, 391, 9, 28, 1, 28, 1, 28, 3, 28, 395, 8, 28, 1, 29, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 30, 5, 30, 404, 8, 30, 10, 30, 12, 30, 407, 9, 30, 1, 31, 1, 31, 3, 31, 411, 8, 31, 1, 31, 1, 31, 3, 31, 415, 8, 31, 1, 32, 1, 32, 1, 32, 1, 32, 5, 32, 421, 8, 32, 10, 32, 12, 32, 424, 9, 32, 1, 33, 1, 33, 1, 33, 1, 33, 5, 33, 430, 8, 33, 10, 33, 12, 33, 433, 9, 33, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 439, 8, 34, 10, 34, 12, 34, 442, 9, 34, 1, 35, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 1, 36, 3, 36, 452, 8, 36, 1, 37, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 5, 39, 464, 8, 39, 10, 39, 12, 39, 467, 9, 39, 1, 40, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 42, 1, 42, 3, 42, 477, 8, 42, 1, 43, 3, 43, 480, 8, 43, 1, 43, 1, 43, 1, 44, 3, 44, 485, 8, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 3, 51, 510, 8, 51, 1, 51, 1, 51, 1, 51, 1, 51, 5, 51, 516, 8, 51, 10, 51, 12, 51, 519, 9, 51, 3, 51, 521, 8, 51, 1, 52, 1, 52, 1, 52, 3, 52, 526, 8, 52, 1, 52, 1, 52, 1, 52, 0, 3, 2, 10, 16, 53, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 0, 8, 1, 0, 59, 60, 1, 0, 61, 63, 2, 0, 67, 67, 73, 73, 1, 0, 66, 67, 2, 0, 32, 32, 35, 35, 1, 0, 38, 39, 2, 0, 37, 37, 51, 51, 2, 0, 52, 52, 54, 58, 555, 0, 106, 1, 0, 0, 0, 2, 109, 1, 0, 0, 0, 4, 125, 1, 0, 0, 0, 6, 140, 1, 0, 0, 0, 8, 142, 1, 0, 0, 0, 10, 173, 1, 0, 0, 0, 12, 200, 1, 0, 0, 0, 14, 207, 1, 0, 0, 0, 16, 213, 1, 0, 0, 0, 18, 233, 1, 0, 0, 0, 20, 235, 1, 0, 0, 0, 22, 250, 1, 0, 0, 0, 24, 253, 1, 0, 0, 0, 26, 266, 1, 0, 0, 0, 28, 268, 1, 0, 0, 0, 30, 283, 1, 0, 0, 0, 32, 285, 1, 0, 0, 0, 34, 294, 1, 0, 0, 0, 36, 300, 1, 0, 0, 0, 38, 302, 1, 0, 0, 0, 40, 311, 1, 0, 0, 0, 42, 315, 1, 0, 0, 0, 44, 318, 1, 0, 0, 0, 46, 326, 1, 0, 0, 0, 48, 332, 1, 0, 0, 0, 50, 340, 1, 0, 0, 0, 52, 348, 1, 0, 0, 0, 54, 350, 1, 0, 0, 0, 56, 394, 1, 0, 0, 0, 58, 396, 1, 0, 0, 0, 60, 399, 1, 0, 0, 0, 62, 408, 1, 0, 0, 0, 64, 416, 1, 0, 0, 0, 66, 425, 1, 0, 0, 0, 68, 434, 1, 0, 0, 0, 70, 443, 1, 0, 0, 0, 72, 447, 1, 0, 0, 0, 74, 453, 1, 0, 0, 0, 76, 457, 1, 0, 0, 0, 78, 460, 1, 0, 0, 0, 80, 468, 1, 0, 0, 0, 82, 472, 1, 0, 0, 0, 84, 476, 1, 0, 0, 0, 86, 479, 1, 0, 0, 0, 88, 484, 1, 0, 0, 0, 90, 488, 1, 0, 0, 0, 92, 490, 1, 0, 0, 0, 94, 492, 1, 0, 0, 0, 96, 495, 1, 0, 0, 0, 98, 499, 1, 0, 0, 0, 100, 502, 1, 0, 0, 0, 102, 505, 1, 0, 0, 0, 104, 525, 1, 0, 0, 0, 106, 107, 3, 2, 1, 0, 107, 108, 5, 0, 0, 1, 108, 1, 1, 0, 0, 0, 109, 110, 6, 1, -1, 0, 110, 111, 3, 4, 2, 0, 111, 117, 1, 0, 0, 0, 112, 113, 10, 1, 0, 0, 113, 114, 5, 26, 0, 0, 114, 116, 3, 6, 3, 0, 115, 112, 1, 0, 0, 0, 116, 119, 1, 0, 0, 0, 117, 115, 1, 0, 0, 0, 117, 118, 1, 0, 0, 0, 118, 3, 1, 0, 0, 0, 119, 117, 1, 0, 0, 0, 120, 126, 3, 94, 47, 0, 121, 126, 3, 28, 14, 0, 122, 126, 3, 22, 11, 0, 123, 126, 3, 98, 49, 0, 124, 126, 3, 100, 50, 0, 125, 120, 1, 0, 0, 0, 125, 121, 1, 0, 0, 0, 125, 122, 1, 0, 0, 0, 125, 123, 1, 0, 0, 0, 125, 124, 1, 0, 0, 0, 126, 5, 1, 0, 0, 0, 127, 141, 3, 42, 21, 0, 128, 141, 3, 46, 23, 0, 129, 141, 3, 58, 29, 0, 130, 141, 3, 64, 32, 0, 131, 141, 3, 60, 30, 0, 132, 141, 3, 44, 22, 0, 133, 141, 3, 8, 4, 0, 134, 141, 3, 66, 33, 0, 135, 141, 3, 68, 34, 0, 136, 141, 3, 72, 36, 0, 137, 141, 3, 74, 37, 0, 138, 141, 3, 102, 51, 0, 139, 141, 3, 76, 38, 0, 140, 127, 1, 0, 0, 0, 140, 128, 1, 0, 0, 0, 140, 129, 1, 0, 0, 0, 140, 130, 1, 0, 0, 0, 140, 131, 1, 0, 0, 0, 140, 132, 1, 0, 0, 0, 140, 133, 1, 0, 0, 0, 140, 134, 1, 0, 0, 0, 140, 135, 1, 0, 0, 0, 140, 136, 1, 0, 0, 0, 140, 137, 1, 0, 0, 0, 140, 138, 1, 0, 0, 0, 140, 139, 1, 0, 0, 0, 141, 7, 1, 0, 0, 0, 142, 143, 5, 18, 0, 0, 143, 144, 3, 10, 5, 0, 144, 9, 1, 0, 0, 0, 145, 146, 6, 5, -1, 0, 146, 147, 5, 44, 0, 0, 147, 174, 3, 10, 5, 7, 148, 174, 3, 14, 7, 0, 149, 174, 3, 12, 6, 0, 150, 152, 3, 14, 7, 0, 151, 153, 5, 44, 0, 0, 152, 151, 1, 0, 0, 0, 152, 153, 1, 0, 0, 0, 153, 154, 1, 0, 0, 0, 154, 155, 5, 41, 0, 0, 155, 156, 5, 40, 0, 0, 156, 161, 3, 14, 7, 0, 157, 158, 5, 34, 0, 0, 158, 160, 3, 14, 7, 0, 159, 157, 1, 0, 0, 0, 160, 163, 1, 0, 0, 0, 161, 159, 1, 0, 0, 0, 161, 162, 1, 0, 0, 0, 162, 164, 1, 0, 0, 0, 163, 161, 1, 0, 0, 0, 164, 165, 5, 50, 0, 0, 165, 174, 1, 0, 0, 0, 166, 167, 3, 14, 7, 0, 167, 169, 5, 42, 0, 0, 168, 170, 5, 44, 0, 0, 169, 168, 1, 0, 0, 0, 169, 170, 1, 0, 0, 0, 170, 171, 1, 0, 0, 0, 171, 172, 5, 45, 0, 0, 172, 174, 1, 0, 0, 0, 173, 145, 1, 0, 0, 0, 173, 148, 1, 0, 0, 0, 173, 149, 1, 0, 0, 0, 173, 150, 1, 0, 0, 0, 173, 166, 1, 0, 0, 0, 174, 183, 1, 0, 0, 0, 175, 176, 10, 4, 0, 0, 176, 177, 5, 31, 0, 0, 177, 182, 3, 10, 5, 5, 178, 179, 10, 3, 0, 0, 179, 180, 5, 47, 0, 0, 180, 182, 3, 10, 5, 4, 181, 175, 1, 0, 0, 0, 181, 178, 1, 0, 0, 0, 182, 185, 1, 0, 0, 0, 183, 181, 1, 0, 0, 0, 183, 184, 1, 0, 0, 0, 184, 11, 1, 0, 0, 0, 185, 183, 1, 0, 0, 0, 186, 188, 3, 14, 7, 0, 187, 189, 5, 44, 0, 0, 188, 187, 1, 0, 0, 0, 188, 189, 1, 0, 0, 0, 189, 190, 1, 0, 0, 0, 190, 191, 5, 43, 0, 0, 191, 192, 3, 90, 45, 0, 192, 201, 1, 0, 0, 0, 193, 195, 3, 14, 7, 0, 194, 196, 5, 44, 0, 0, 195, 194, 1, 0, 0, 0, 195, 196, 1, 0, 0, 0, 196, 197, 1, 0, 0, 0, 197, 198, 5, 49, 0, 0, 198, 199, 3, 90, 45, 0, 199, 201, 1, 0, 0, 0, 200, 186, 1, 0, 0, 0, 200, 193, 1, 0, 0, 0, 201, 13, 1, 0, 0, 0, 202, 208, 3, 16, 8, 0, 203, 204, 3, 16, 8, 0, 204, 205, 3, 92, 46, 0, 205, 206, 3, 16, 8, 0, 206, 208, 1, 0, 0, 0, 207, 202, 1, 0, 0, 0, 207, 203, 1, 0, 0, 0, 208, 15, 1, 0, 0, 0, 209, 210, 6, 8, -1, 0, 210, 214, 3, 18, 9, 0, 211, 212, 7, 0, 0, 0, 212, 214, 3, 16, 8, 3, 213, 209, 1, 0, 0, 0, 213, 211, 1, 0, 0, 0, 214, 223, 1, 0, 0, 0, 215, 216, 10, 2, 0, 0, 216, 217, 7, 1, 0, 0, 217, 222, 3, 16, 8, 3, 218, 219, 10, 1, 0, 0, 219, 220, 7, 0, 0, 0, 220, 222, 3, 16, 8, 2, 221, 215, 1, 0, 0, 0, 221, 218, 1, 0, 0, 0, 222, 225, 1, 0, 0, 0, 223, 221, 1, 0, 0, 0, 223, 224, 1, 0, 0, 0, 224, 17, 1, 0, 0, 0, 225, 223, 1, 0, 0, 0, 226, 234, 3, 56, 28, 0, 227, 234, 3, 48, 24, 0, 228, 234, 3, 20, 10, 0, 229, 230, 5, 40, 0, 0, 230, 231, 3, 10, 5, 0, 231, 232, 5, 50, 0, 0, 232, 234, 1, 0, 0, 0, 233, 226, 1, 0, 0, 0, 233, 227, 1, 0, 0, 0, 233, 228, 1, 0, 0, 0, 233, 229, 1, 0, 0, 0, 234, 19, 1, 0, 0, 0, 235, 236, 3, 52, 26, 0, 236, 246, 5, 40, 0, 0, 237, 247, 5, 61, 0, 0, 238, 243, 3, 10, 5, 0, 239, 240, 5, 34, 0, 0, 240, 242, 3, 10, 5, 0, 241, 239, 1, 0, 0, 0, 242, 245, 1, 0, 0, 0, 243, 241, 1, 0, 0, 0, 243, 244, 1, 0, 0, 0, 244, 247, 1, 0, 0, 0, 245, 243, 1, 0, 0, 0, 246, 237, 1, 0, 0, 0, 246, 238, 1, 0, 0, 0, 246, 247, 1, 0, 0, 0, 247, 248, 1, 0, 0, 0, 248, 249, 5, 50, 0, 0, 249, 21, 1, 0, 0, 0, 250, 251, 5, 14, 0, 0, 251, 252, 3, 24, 12, 0, 252, 23, 1, 0, 0, 0, 253, 258, 3, 26, 13, 0, 254, 255, 5, 34, 0, 0, 255, 257, 3, 26, 13, 0, 256, 254, 1, 0, 0, 0, 257, 260, 1, 0, 0, 0, 258, 256, 1, 0, 0, 0, 258, 259, 1, 0, 0, 0, 259, 25, 1, 0, 0, 0, 260, 258, 1, 0, 0, 0, 261, 267, 3, 10, 5, 0, 262, 263, 3, 48, 24, 0, 263, 264, 5, 33, 0, 0, 264, 265, 3, 10, 5, 0, 265, 267, 1, 0, 0, 0, 266, 261, 1, 0, 0, 0, 266, 262, 1, 0, 0, 0, 267, 27, 1, 0, 0, 0, 268, 269, 5, 6, 0, 0, 269, 274, 3, 30, 15, 0, 270, 271, 5, 34, 0, 0, 271, 273, 3, 30, 15, 0, 272, 270, 1, 0, 0, 0, 273, 276, 1, 0, 0, 0, 274, 272, 1, 0, 0, 0, 274, 275, 1, 0, 0, 0, 275, 278, 1, 0, 0, 0, 276, 274, 1, 0, 0, 0, 277, 279, 3, 32, 16, 0, 278, 277, 1, 0, 0, 0, 278, 279, 1, 0, 0, 0, 279, 281, 1, 0, 0, 0, 280, 282, 3, 36, 18, 0, 281, 280, 1, 0, 0, 0, 281, 282, 1, 0, 0, 0, 282, 29, 1, 0, 0, 0, 283, 284, 7, 2, 0, 0, 284, 31, 1, 0, 0, 0, 285, 286, 5, 71, 0, 0, 286, 291, 3, 34, 17, 0, 287, 288, 5, 34, 0, 0, 288, 290, 3, 34, 17, 0, 289, 287, 1, 0, 0, 0, 290, 293, 1, 0, 0, 0, 291, 289, 1, 0, 0, 0, 291, 292, 1, 0, 0, 0, 292, 33, 1, 0, 0, 0, 293, 291, 1, 0, 0, 0, 294, 295, 3, 90, 45, 0, 295, 296, 5, 33, 0, 0, 296, 297, 3, 90, 45, 0, 297, 35, 1, 0, 0, 0, 298, 301, 3, 38, 19, 0, 299, 301, 3, 40, 20, 0, 300, 298, 1, 0, 0, 0, 300, 299, 1, 0, 0, 0, 301, 37, 1, 0, 0, 0, 302, 303, 5, 72, 0, 0, 303, 308, 3, 30, 15, 0, 304, 305, 5, 34, 0, 0, 305, 307, 3, 30, 15, 0, 306, 304, 1, 0, 0, 0, 307, 310, 1, 0, 0, 0, 308, 306, 1, 0, 0, 0, 308, 309, 1, 0, 0, 0, 309, 39, 1, 0, 0, 0, 310, 308, 1, 0, 0, 0, 311, 312, 5, 64, 0, 0, 312, 313, 3, 38, 19, 0, 313, 314, 5, 65, 0, 0, 314, 41, 1, 0, 0, 0, 315, 316, 5, 4, 0, 0, 316, 317, 3, 24, 12, 0, 317, 43, 1, 0, 0, 0, 318, 320, 5, 17, 0, 0, 319, 321, 3, 24, 12, 0, 320, 319, 1, 0, 0, 0, 320, 321, 1, 0, 0, 0, 321, 324, 1, 0, 0, 0, 322, 323, 5, 30, 0, 0, 323, 325, 3, 24, 12, 0, 324, 322, 1, 0, 0, 0, 324, 325, 1, 0, 0, 0, 325, 45, 1, 0, 0, 0, 326, 327, 5, 8, 0, 0, 327, 330, 3, 24, 12, 0, 328, 329, 5, 30, 0, 0, 329, 331, 3, 24, 12, 0, 330, 328, 1, 0, 0, 0, 330, 331, 1, 0, 0, 0, 331, 47, 1, 0, 0, 0, 332, 337, 3, 52, 26, 0, 333, 334, 5, 36, 0, 0, 334, 336, 3, 52, 26, 0, 335, 333, 1, 0, 0, 0, 336, 339, 1, 0, 0, 0, 337, 335, 1, 0, 0, 0, 337, 338, 1, 0, 0, 0, 338, 49, 1, 0, 0, 0, 339, 337, 1, 0, 0, 0, 340, 345, 3, 54, 27, 0, 341, 342, 5, 36, 0, 0, 342, 344, 3, 54, 27, 0, 343, 341, 1, 0, 0, 0, 344, 347, 1, 0, 0, 0, 345, 343, 1, 0, 0, 0, 345, 346, 1, 0, 0, 0, 346, 51, 1, 0, 0, 0, 347, 345, 1, 0, 0, 0, 348, 349, 7, 3, 0, 0, 349, 53, 1, 0, 0, 0, 350, 351, 5, 77, 0, 0, 351, 55, 1, 0, 0, 0, 352, 395, 5, 45, 0, 0, 353, 354, 3, 88, 44, 0, 354, 355, 5, 66, 0, 0, 355, 395, 1, 0, 0, 0, 356, 395, 3, 86, 43, 0, 357, 395, 3, 88, 44, 0, 358, 395, 3, 82, 41, 0, 359, 395, 5, 48, 0, 0, 360, 395, 3, 90, 45, 0, 361, 362, 5, 64, 0, 0, 362, 367, 3, 84, 42, 0, 363, 364, 5, 34, 0, 0, 364, 366, 3, 84, 42, 0, 365, 363, 1, 0, 0, 0, 366, 369, 1, 0, 0, 0, 367, 365, 1, 0, 0, 0, 367, 368, 1, 0, 0, 0, 368, 370, 1, 0, 0, 0, 369, 367, 1, 0, 0, 0, 370, 371, 5, 65, 0, 0, 371, 395, 1, 0, 0, 0, 372, 373, 5, 64, 0, 0, 373, 378, 3, 82, 41, 0, 374, 375, 5, 34, 0, 0, 375, 377, 3, 82, 41, 0, 376, 374, 1, 0, 0, 0, 377, 380, 1, 0, 0, 0, 378, 376, 1, 0, 0, 0, 378, 379, 1, 0, 0, 0, 379, 381, 1, 0, 0, 0, 380, 378, 1, 0, 0, 0, 381, 382, 5, 65, 0, 0, 382, 395, 1, 0, 0, 0, 383, 384, 5, 64, 0, 0, 384, 389, 3, 90, 45, 0, 385, 386, 5, 34, 0, 0, 386, 388, 3, 90, 45, 0, 387, 385, 1, 0, 0, 0, 388, 391, 1, 0, 0, 0, 389, 387, 1, 0, 0, 0, 389, 390, 1, 0, 0, 0, 390, 392, 1, 0, 0, 0, 391, 389, 1, 0, 0, 0, 392, 393, 5, 65, 0, 0, 393, 395, 1, 0, 0, 0, 394, 352, 1, 0, 0, 0, 394, 353, 1, 0, 0, 0, 394, 356, 1, 0, 0, 0, 394, 357, 1, 0, 0, 0, 394, 358, 1, 0, 0, 0, 394, 359, 1, 0, 0, 0, 394, 360, 1, 0, 0, 0, 394, 361, 1, 0, 0, 0, 394, 372, 1, 0, 0, 0, 394, 383, 1, 0, 0, 0, 395, 57, 1, 0, 0, 0, 396, 397, 5, 10, 0, 0, 397, 398, 5, 28, 0, 0, 398, 59, 1, 0, 0, 0, 399, 400, 5, 16, 0, 0, 400, 405, 3, 62, 31, 0, 401, 402, 5, 34, 0, 0, 402, 404, 3, 62, 31, 0, 403, 401, 1, 0, 0, 0, 404, 407, 1, 0, 0, 0, 405, 403, 1, 0, 0, 0, 405, 406, 1, 0, 0, 0, 406, 61, 1, 0, 0, 0, 407, 405, 1, 0, 0, 0, 408, 410, 3, 10, 5, 0, 409, 411, 7, 4, 0, 0, 410, 409, 1, 0, 0, 0, 410, 411, 1, 0, 0, 0, 411, 414, 1, 0, 0, 0, 412, 413, 5, 46, 0, 0, 413, 415, 7, 5, 0, 0, 414, 412, 1, 0, 0, 0, 414, 415, 1, 0, 0, 0, 415, 63, 1, 0, 0, 0, 416, 417, 5, 9, 0, 0, 417, 422, 3, 50, 25, 0, 418, 419, 5, 34, 0, 0, 419, 421, 3, 50, 25, 0, 420, 418, 1, 0, 0, 0, 421, 424, 1, 0, 0, 0, 422, 420, 1, 0, 0, 0, 422, 423, 1, 0, 0, 0, 423, 65, 1, 0, 0, 0, 424, 422, 1, 0, 0, 0, 425, 426, 5, 2, 0, 0, 426, 431, 3, 50, 25, 0, 427, 428, 5, 34, 0, 0, 428, 430, 3, 50, 25, 0, 429, 427, 1, 0, 0, 0, 430, 433, 1, 0, 0, 0, 431, 429, 1, 0, 0, 0, 431, 432, 1, 0, 0, 0, 432, 67, 1, 0, 0, 0, 433, 431, 1, 0, 0, 0, 434, 435, 5, 13, 0, 0, 435, 440, 3, 70, 35, 0, 436, 437, 5, 34, 0, 0, 437, 439, 3, 70, 35, 0, 438, 436, 1, 0, 0, 0, 439, 442, 1, 0, 0, 0, 440, 438, 1, 0, 0, 0, 440, 441, 1, 0, 0, 0, 441, 69, 1, 0, 0, 0, 442, 440, 1, 0, 0, 0, 443, 444, 3, 50, 25, 0, 444, 445, 5, 81, 0, 0, 445, 446, 3, 50, 25, 0, 446, 71, 1, 0, 0, 0, 447, 448, 5, 1, 0, 0, 448, 449, 3, 18, 9, 0, 449, 451, 3, 90, 45, 0, 450, 452, 3, 78, 39, 0, 451, 450, 1, 0, 0, 0, 451, 452, 1, 0, 0, 0, 452, 73, 1, 0, 0, 0, 453, 454, 5, 7, 0, 0, 454, 455, 3, 18, 9, 0, 455, 456, 3, 90, 45, 0, 456, 75, 1, 0, 0, 0, 457, 458, 5, 12, 0, 0, 458, 459, 3, 48, 24, 0, 459, 77, 1, 0, 0, 0, 460, 465, 3, 80, 40, 0, 461, 462, 5, 34, 0, 0, 462, 464, 3, 80, 40, 0, 463, 461, 1, 0, 0, 0, 464, 467, 1, 0, 0, 0, 465, 463, 1, 0, 0, 0, 465, 466, 1, 0, 0, 0, 466, 79, 1, 0, 0, 0, 467, 465, 1, 0, 0, 0, 468, 469, 3, 52, 26, 0, 469, 470, 5, 33, 0, 0, 470, 471, 3, 56, 28, 0, 471, 81, 1, 0, 0, 0, 472, 473, 7, 6, 0, 0, 473, 83, 1, 0, 0, 0, 474, 477, 3, 86, 43, 0, 475, 477, 3, 88, 44, 0, 476, 474, 1, 0, 0, 0, 476, 475, 1, 0, 0, 0, 477, 85, 1, 0, 0, 0, 478, 480, 7, 0, 0, 0, 479, 478, 1, 0, 0, 0, 479, 480, 1, 0, 0, 0, 480, 481, 1, 0, 0, 0, 481, 482, 5, 29, 0, 0, 482, 87, 1, 0, 0, 0, 483, 485, 7, 0, 0, 0, 484, 483, 1, 0, 0, 0, 484, 485, 1, 0, 0, 0, 485, 486, 1, 0, 0, 0, 486, 487, 5, 28, 0, 0, 487, 89, 1, 0, 0, 0, 488, 489, 5, 27, 0, 0, 489, 91, 1, 0, 0, 0, 490, 491, 7, 7, 0, 0, 491, 93, 1, 0, 0, 0, 492, 493, 5, 5, 0, 0, 493, 494, 3, 96, 48, 0, 494, 95, 1, 0, 0, 0, 495, 496, 5, 64, 0, 0, 496, 497, 3, 2, 1, 0, 497, 498, 5, 65, 0, 0, 498, 97, 1, 0, 0, 0, 499, 500, 5, 15, 0, 0, 500, 501, 5, 97, 0, 0, 501, 99, 1, 0, 0, 0, 502, 503, 5, 11, 0, 0, 503, 504, 5, 101, 0, 0, 504, 101, 1, 0, 0, 0, 505, 506, 5, 3, 0, 0, 506, 509, 5, 87, 0, 0, 507, 508, 5, 85, 0, 0, 508, 510, 3, 50, 25, 0, 509, 507, 1, 0, 0, 0, 509, 510, 1, 0, 0, 0, 510, 520, 1, 0, 0, 0, 511, 512, 5, 86, 0, 0, 512, 517, 3, 104, 52, 0, 513, 514, 5, 34, 0, 0, 514, 516, 3, 104, 52, 0, 515, 513, 1, 0, 0, 0, 516, 519, 1, 0, 0, 0, 517, 515, 1, 0, 0, 0, 517, 518, 1, 0, 0, 0, 518, 521, 1, 0, 0, 0, 519, 517, 1, 0, 0, 0, 520, 511, 1, 0, 0, 0, 520, 521, 1, 0, 0, 0, 521, 103, 1, 0, 0, 0, 522, 523, 3, 50, 25, 0, 523, 524, 5, 33, 0, 0, 524, 526, 1, 0, 0, 0, 525, 522, 1, 0, 0, 0, 525, 526, 1, 0, 0, 0, 526, 527, 1, 0, 0, 0, 527, 528, 3, 50, 25, 0, 528, 105, 1, 0, 0, 0, 51, 117, 125, 140, 152, 161, 169, 173, 181, 183, 188, 195, 200, 207, 213, 221, 223, 233, 243, 246, 258, 266, 274, 278, 281, 291, 300, 308, 320, 324, 330, 337, 345, 367, 378, 389, 394, 405, 410, 414, 422, 431, 440, 451, 465, 476, 479, 484, 509, 517, 520, 525] \ No newline at end of file +[4, 1, 109, 530, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 116, 8, 1, 10, 1, 12, 1, 119, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 126, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 141, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 153, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 160, 8, 5, 10, 5, 12, 5, 163, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 170, 8, 5, 1, 5, 1, 5, 3, 5, 174, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 182, 8, 5, 10, 5, 12, 5, 185, 9, 5, 1, 6, 1, 6, 3, 6, 189, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 196, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 201, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, 7, 208, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 214, 8, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 5, 8, 222, 8, 8, 10, 8, 12, 8, 225, 9, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 234, 8, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 5, 10, 242, 8, 10, 10, 10, 12, 10, 245, 9, 10, 3, 10, 247, 8, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 5, 12, 257, 8, 12, 10, 12, 12, 12, 260, 9, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 3, 13, 267, 8, 13, 1, 14, 1, 14, 1, 14, 1, 14, 5, 14, 273, 8, 14, 10, 14, 12, 14, 276, 9, 14, 1, 14, 3, 14, 279, 8, 14, 1, 14, 3, 14, 282, 8, 14, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 5, 16, 290, 8, 16, 10, 16, 12, 16, 293, 9, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 3, 18, 301, 8, 18, 1, 19, 1, 19, 1, 19, 1, 19, 5, 19, 307, 8, 19, 10, 19, 12, 19, 310, 9, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 3, 22, 321, 8, 22, 1, 22, 1, 22, 3, 22, 325, 8, 22, 1, 23, 1, 23, 1, 23, 1, 23, 3, 23, 331, 8, 23, 1, 24, 1, 24, 1, 24, 5, 24, 336, 8, 24, 10, 24, 12, 24, 339, 9, 24, 1, 25, 1, 25, 1, 25, 5, 25, 344, 8, 25, 10, 25, 12, 25, 347, 9, 25, 1, 26, 1, 26, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 5, 28, 366, 8, 28, 10, 28, 12, 28, 369, 9, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 5, 28, 377, 8, 28, 10, 28, 12, 28, 380, 9, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 5, 28, 388, 8, 28, 10, 28, 12, 28, 391, 9, 28, 1, 28, 1, 28, 3, 28, 395, 8, 28, 1, 29, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 30, 5, 30, 404, 8, 30, 10, 30, 12, 30, 407, 9, 30, 1, 31, 1, 31, 3, 31, 411, 8, 31, 1, 31, 1, 31, 3, 31, 415, 8, 31, 1, 32, 1, 32, 1, 32, 1, 32, 5, 32, 421, 8, 32, 10, 32, 12, 32, 424, 9, 32, 1, 33, 1, 33, 1, 33, 1, 33, 5, 33, 430, 8, 33, 10, 33, 12, 33, 433, 9, 33, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 439, 8, 34, 10, 34, 12, 34, 442, 9, 34, 1, 35, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 1, 36, 3, 36, 452, 8, 36, 1, 37, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 5, 39, 464, 8, 39, 10, 39, 12, 39, 467, 9, 39, 1, 40, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 42, 1, 42, 3, 42, 477, 8, 42, 1, 43, 3, 43, 480, 8, 43, 1, 43, 1, 43, 1, 44, 3, 44, 485, 8, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 3, 51, 510, 8, 51, 1, 51, 1, 51, 1, 51, 1, 51, 5, 51, 516, 8, 51, 10, 51, 12, 51, 519, 9, 51, 3, 51, 521, 8, 51, 1, 52, 1, 52, 1, 52, 3, 52, 526, 8, 52, 1, 52, 1, 52, 1, 52, 0, 3, 2, 10, 16, 53, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 0, 8, 1, 0, 59, 60, 1, 0, 61, 63, 2, 0, 67, 67, 73, 73, 1, 0, 66, 67, 2, 0, 32, 32, 35, 35, 1, 0, 38, 39, 2, 0, 37, 37, 51, 51, 2, 0, 52, 52, 54, 58, 555, 0, 106, 1, 0, 0, 0, 2, 109, 1, 0, 0, 0, 4, 125, 1, 0, 0, 0, 6, 140, 1, 0, 0, 0, 8, 142, 1, 0, 0, 0, 10, 173, 1, 0, 0, 0, 12, 200, 1, 0, 0, 0, 14, 207, 1, 0, 0, 0, 16, 213, 1, 0, 0, 0, 18, 233, 1, 0, 0, 0, 20, 235, 1, 0, 0, 0, 22, 250, 1, 0, 0, 0, 24, 253, 1, 0, 0, 0, 26, 266, 1, 0, 0, 0, 28, 268, 1, 0, 0, 0, 30, 283, 1, 0, 0, 0, 32, 285, 1, 0, 0, 0, 34, 294, 1, 0, 0, 0, 36, 300, 1, 0, 0, 0, 38, 302, 1, 0, 0, 0, 40, 311, 1, 0, 0, 0, 42, 315, 1, 0, 0, 0, 44, 318, 1, 0, 0, 0, 46, 326, 1, 0, 0, 0, 48, 332, 1, 0, 0, 0, 50, 340, 1, 0, 0, 0, 52, 348, 1, 0, 0, 0, 54, 350, 1, 0, 0, 0, 56, 394, 1, 0, 0, 0, 58, 396, 1, 0, 0, 0, 60, 399, 1, 0, 0, 0, 62, 408, 1, 0, 0, 0, 64, 416, 1, 0, 0, 0, 66, 425, 1, 0, 0, 0, 68, 434, 1, 0, 0, 0, 70, 443, 1, 0, 0, 0, 72, 447, 1, 0, 0, 0, 74, 453, 1, 0, 0, 0, 76, 457, 1, 0, 0, 0, 78, 460, 1, 0, 0, 0, 80, 468, 1, 0, 0, 0, 82, 472, 1, 0, 0, 0, 84, 476, 1, 0, 0, 0, 86, 479, 1, 0, 0, 0, 88, 484, 1, 0, 0, 0, 90, 488, 1, 0, 0, 0, 92, 490, 1, 0, 0, 0, 94, 492, 1, 0, 0, 0, 96, 495, 1, 0, 0, 0, 98, 499, 1, 0, 0, 0, 100, 502, 1, 0, 0, 0, 102, 505, 1, 0, 0, 0, 104, 525, 1, 0, 0, 0, 106, 107, 3, 2, 1, 0, 107, 108, 5, 0, 0, 1, 108, 1, 1, 0, 0, 0, 109, 110, 6, 1, -1, 0, 110, 111, 3, 4, 2, 0, 111, 117, 1, 0, 0, 0, 112, 113, 10, 1, 0, 0, 113, 114, 5, 26, 0, 0, 114, 116, 3, 6, 3, 0, 115, 112, 1, 0, 0, 0, 116, 119, 1, 0, 0, 0, 117, 115, 1, 0, 0, 0, 117, 118, 1, 0, 0, 0, 118, 3, 1, 0, 0, 0, 119, 117, 1, 0, 0, 0, 120, 126, 3, 94, 47, 0, 121, 126, 3, 28, 14, 0, 122, 126, 3, 22, 11, 0, 123, 126, 3, 98, 49, 0, 124, 126, 3, 100, 50, 0, 125, 120, 1, 0, 0, 0, 125, 121, 1, 0, 0, 0, 125, 122, 1, 0, 0, 0, 125, 123, 1, 0, 0, 0, 125, 124, 1, 0, 0, 0, 126, 5, 1, 0, 0, 0, 127, 141, 3, 42, 21, 0, 128, 141, 3, 46, 23, 0, 129, 141, 3, 58, 29, 0, 130, 141, 3, 64, 32, 0, 131, 141, 3, 60, 30, 0, 132, 141, 3, 44, 22, 0, 133, 141, 3, 8, 4, 0, 134, 141, 3, 66, 33, 0, 135, 141, 3, 68, 34, 0, 136, 141, 3, 72, 36, 0, 137, 141, 3, 74, 37, 0, 138, 141, 3, 102, 51, 0, 139, 141, 3, 76, 38, 0, 140, 127, 1, 0, 0, 0, 140, 128, 1, 0, 0, 0, 140, 129, 1, 0, 0, 0, 140, 130, 1, 0, 0, 0, 140, 131, 1, 0, 0, 0, 140, 132, 1, 0, 0, 0, 140, 133, 1, 0, 0, 0, 140, 134, 1, 0, 0, 0, 140, 135, 1, 0, 0, 0, 140, 136, 1, 0, 0, 0, 140, 137, 1, 0, 0, 0, 140, 138, 1, 0, 0, 0, 140, 139, 1, 0, 0, 0, 141, 7, 1, 0, 0, 0, 142, 143, 5, 18, 0, 0, 143, 144, 3, 10, 5, 0, 144, 9, 1, 0, 0, 0, 145, 146, 6, 5, -1, 0, 146, 147, 5, 44, 0, 0, 147, 174, 3, 10, 5, 7, 148, 174, 3, 14, 7, 0, 149, 174, 3, 12, 6, 0, 150, 152, 3, 14, 7, 0, 151, 153, 5, 44, 0, 0, 152, 151, 1, 0, 0, 0, 152, 153, 1, 0, 0, 0, 153, 154, 1, 0, 0, 0, 154, 155, 5, 41, 0, 0, 155, 156, 5, 40, 0, 0, 156, 161, 3, 14, 7, 0, 157, 158, 5, 34, 0, 0, 158, 160, 3, 14, 7, 0, 159, 157, 1, 0, 0, 0, 160, 163, 1, 0, 0, 0, 161, 159, 1, 0, 0, 0, 161, 162, 1, 0, 0, 0, 162, 164, 1, 0, 0, 0, 163, 161, 1, 0, 0, 0, 164, 165, 5, 50, 0, 0, 165, 174, 1, 0, 0, 0, 166, 167, 3, 14, 7, 0, 167, 169, 5, 42, 0, 0, 168, 170, 5, 44, 0, 0, 169, 168, 1, 0, 0, 0, 169, 170, 1, 0, 0, 0, 170, 171, 1, 0, 0, 0, 171, 172, 5, 45, 0, 0, 172, 174, 1, 0, 0, 0, 173, 145, 1, 0, 0, 0, 173, 148, 1, 0, 0, 0, 173, 149, 1, 0, 0, 0, 173, 150, 1, 0, 0, 0, 173, 166, 1, 0, 0, 0, 174, 183, 1, 0, 0, 0, 175, 176, 10, 4, 0, 0, 176, 177, 5, 31, 0, 0, 177, 182, 3, 10, 5, 5, 178, 179, 10, 3, 0, 0, 179, 180, 5, 47, 0, 0, 180, 182, 3, 10, 5, 4, 181, 175, 1, 0, 0, 0, 181, 178, 1, 0, 0, 0, 182, 185, 1, 0, 0, 0, 183, 181, 1, 0, 0, 0, 183, 184, 1, 0, 0, 0, 184, 11, 1, 0, 0, 0, 185, 183, 1, 0, 0, 0, 186, 188, 3, 14, 7, 0, 187, 189, 5, 44, 0, 0, 188, 187, 1, 0, 0, 0, 188, 189, 1, 0, 0, 0, 189, 190, 1, 0, 0, 0, 190, 191, 5, 43, 0, 0, 191, 192, 3, 90, 45, 0, 192, 201, 1, 0, 0, 0, 193, 195, 3, 14, 7, 0, 194, 196, 5, 44, 0, 0, 195, 194, 1, 0, 0, 0, 195, 196, 1, 0, 0, 0, 196, 197, 1, 0, 0, 0, 197, 198, 5, 49, 0, 0, 198, 199, 3, 90, 45, 0, 199, 201, 1, 0, 0, 0, 200, 186, 1, 0, 0, 0, 200, 193, 1, 0, 0, 0, 201, 13, 1, 0, 0, 0, 202, 208, 3, 16, 8, 0, 203, 204, 3, 16, 8, 0, 204, 205, 3, 92, 46, 0, 205, 206, 3, 16, 8, 0, 206, 208, 1, 0, 0, 0, 207, 202, 1, 0, 0, 0, 207, 203, 1, 0, 0, 0, 208, 15, 1, 0, 0, 0, 209, 210, 6, 8, -1, 0, 210, 214, 3, 18, 9, 0, 211, 212, 7, 0, 0, 0, 212, 214, 3, 16, 8, 3, 213, 209, 1, 0, 0, 0, 213, 211, 1, 0, 0, 0, 214, 223, 1, 0, 0, 0, 215, 216, 10, 2, 0, 0, 216, 217, 7, 1, 0, 0, 217, 222, 3, 16, 8, 3, 218, 219, 10, 1, 0, 0, 219, 220, 7, 0, 0, 0, 220, 222, 3, 16, 8, 2, 221, 215, 1, 0, 0, 0, 221, 218, 1, 0, 0, 0, 222, 225, 1, 0, 0, 0, 223, 221, 1, 0, 0, 0, 223, 224, 1, 0, 0, 0, 224, 17, 1, 0, 0, 0, 225, 223, 1, 0, 0, 0, 226, 234, 3, 56, 28, 0, 227, 234, 3, 48, 24, 0, 228, 234, 3, 20, 10, 0, 229, 230, 5, 40, 0, 0, 230, 231, 3, 10, 5, 0, 231, 232, 5, 50, 0, 0, 232, 234, 1, 0, 0, 0, 233, 226, 1, 0, 0, 0, 233, 227, 1, 0, 0, 0, 233, 228, 1, 0, 0, 0, 233, 229, 1, 0, 0, 0, 234, 19, 1, 0, 0, 0, 235, 236, 3, 52, 26, 0, 236, 246, 5, 40, 0, 0, 237, 247, 5, 61, 0, 0, 238, 243, 3, 10, 5, 0, 239, 240, 5, 34, 0, 0, 240, 242, 3, 10, 5, 0, 241, 239, 1, 0, 0, 0, 242, 245, 1, 0, 0, 0, 243, 241, 1, 0, 0, 0, 243, 244, 1, 0, 0, 0, 244, 247, 1, 0, 0, 0, 245, 243, 1, 0, 0, 0, 246, 237, 1, 0, 0, 0, 246, 238, 1, 0, 0, 0, 246, 247, 1, 0, 0, 0, 247, 248, 1, 0, 0, 0, 248, 249, 5, 50, 0, 0, 249, 21, 1, 0, 0, 0, 250, 251, 5, 14, 0, 0, 251, 252, 3, 24, 12, 0, 252, 23, 1, 0, 0, 0, 253, 258, 3, 26, 13, 0, 254, 255, 5, 34, 0, 0, 255, 257, 3, 26, 13, 0, 256, 254, 1, 0, 0, 0, 257, 260, 1, 0, 0, 0, 258, 256, 1, 0, 0, 0, 258, 259, 1, 0, 0, 0, 259, 25, 1, 0, 0, 0, 260, 258, 1, 0, 0, 0, 261, 267, 3, 10, 5, 0, 262, 263, 3, 48, 24, 0, 263, 264, 5, 33, 0, 0, 264, 265, 3, 10, 5, 0, 265, 267, 1, 0, 0, 0, 266, 261, 1, 0, 0, 0, 266, 262, 1, 0, 0, 0, 267, 27, 1, 0, 0, 0, 268, 269, 5, 6, 0, 0, 269, 274, 3, 30, 15, 0, 270, 271, 5, 34, 0, 0, 271, 273, 3, 30, 15, 0, 272, 270, 1, 0, 0, 0, 273, 276, 1, 0, 0, 0, 274, 272, 1, 0, 0, 0, 274, 275, 1, 0, 0, 0, 275, 278, 1, 0, 0, 0, 276, 274, 1, 0, 0, 0, 277, 279, 3, 36, 18, 0, 278, 277, 1, 0, 0, 0, 278, 279, 1, 0, 0, 0, 279, 281, 1, 0, 0, 0, 280, 282, 3, 32, 16, 0, 281, 280, 1, 0, 0, 0, 281, 282, 1, 0, 0, 0, 282, 29, 1, 0, 0, 0, 283, 284, 7, 2, 0, 0, 284, 31, 1, 0, 0, 0, 285, 286, 5, 71, 0, 0, 286, 291, 3, 34, 17, 0, 287, 288, 5, 34, 0, 0, 288, 290, 3, 34, 17, 0, 289, 287, 1, 0, 0, 0, 290, 293, 1, 0, 0, 0, 291, 289, 1, 0, 0, 0, 291, 292, 1, 0, 0, 0, 292, 33, 1, 0, 0, 0, 293, 291, 1, 0, 0, 0, 294, 295, 3, 90, 45, 0, 295, 296, 5, 33, 0, 0, 296, 297, 3, 90, 45, 0, 297, 35, 1, 0, 0, 0, 298, 301, 3, 38, 19, 0, 299, 301, 3, 40, 20, 0, 300, 298, 1, 0, 0, 0, 300, 299, 1, 0, 0, 0, 301, 37, 1, 0, 0, 0, 302, 303, 5, 72, 0, 0, 303, 308, 3, 30, 15, 0, 304, 305, 5, 34, 0, 0, 305, 307, 3, 30, 15, 0, 306, 304, 1, 0, 0, 0, 307, 310, 1, 0, 0, 0, 308, 306, 1, 0, 0, 0, 308, 309, 1, 0, 0, 0, 309, 39, 1, 0, 0, 0, 310, 308, 1, 0, 0, 0, 311, 312, 5, 64, 0, 0, 312, 313, 3, 38, 19, 0, 313, 314, 5, 65, 0, 0, 314, 41, 1, 0, 0, 0, 315, 316, 5, 4, 0, 0, 316, 317, 3, 24, 12, 0, 317, 43, 1, 0, 0, 0, 318, 320, 5, 17, 0, 0, 319, 321, 3, 24, 12, 0, 320, 319, 1, 0, 0, 0, 320, 321, 1, 0, 0, 0, 321, 324, 1, 0, 0, 0, 322, 323, 5, 30, 0, 0, 323, 325, 3, 24, 12, 0, 324, 322, 1, 0, 0, 0, 324, 325, 1, 0, 0, 0, 325, 45, 1, 0, 0, 0, 326, 327, 5, 8, 0, 0, 327, 330, 3, 24, 12, 0, 328, 329, 5, 30, 0, 0, 329, 331, 3, 24, 12, 0, 330, 328, 1, 0, 0, 0, 330, 331, 1, 0, 0, 0, 331, 47, 1, 0, 0, 0, 332, 337, 3, 52, 26, 0, 333, 334, 5, 36, 0, 0, 334, 336, 3, 52, 26, 0, 335, 333, 1, 0, 0, 0, 336, 339, 1, 0, 0, 0, 337, 335, 1, 0, 0, 0, 337, 338, 1, 0, 0, 0, 338, 49, 1, 0, 0, 0, 339, 337, 1, 0, 0, 0, 340, 345, 3, 54, 27, 0, 341, 342, 5, 36, 0, 0, 342, 344, 3, 54, 27, 0, 343, 341, 1, 0, 0, 0, 344, 347, 1, 0, 0, 0, 345, 343, 1, 0, 0, 0, 345, 346, 1, 0, 0, 0, 346, 51, 1, 0, 0, 0, 347, 345, 1, 0, 0, 0, 348, 349, 7, 3, 0, 0, 349, 53, 1, 0, 0, 0, 350, 351, 5, 77, 0, 0, 351, 55, 1, 0, 0, 0, 352, 395, 5, 45, 0, 0, 353, 354, 3, 88, 44, 0, 354, 355, 5, 66, 0, 0, 355, 395, 1, 0, 0, 0, 356, 395, 3, 86, 43, 0, 357, 395, 3, 88, 44, 0, 358, 395, 3, 82, 41, 0, 359, 395, 5, 48, 0, 0, 360, 395, 3, 90, 45, 0, 361, 362, 5, 64, 0, 0, 362, 367, 3, 84, 42, 0, 363, 364, 5, 34, 0, 0, 364, 366, 3, 84, 42, 0, 365, 363, 1, 0, 0, 0, 366, 369, 1, 0, 0, 0, 367, 365, 1, 0, 0, 0, 367, 368, 1, 0, 0, 0, 368, 370, 1, 0, 0, 0, 369, 367, 1, 0, 0, 0, 370, 371, 5, 65, 0, 0, 371, 395, 1, 0, 0, 0, 372, 373, 5, 64, 0, 0, 373, 378, 3, 82, 41, 0, 374, 375, 5, 34, 0, 0, 375, 377, 3, 82, 41, 0, 376, 374, 1, 0, 0, 0, 377, 380, 1, 0, 0, 0, 378, 376, 1, 0, 0, 0, 378, 379, 1, 0, 0, 0, 379, 381, 1, 0, 0, 0, 380, 378, 1, 0, 0, 0, 381, 382, 5, 65, 0, 0, 382, 395, 1, 0, 0, 0, 383, 384, 5, 64, 0, 0, 384, 389, 3, 90, 45, 0, 385, 386, 5, 34, 0, 0, 386, 388, 3, 90, 45, 0, 387, 385, 1, 0, 0, 0, 388, 391, 1, 0, 0, 0, 389, 387, 1, 0, 0, 0, 389, 390, 1, 0, 0, 0, 390, 392, 1, 0, 0, 0, 391, 389, 1, 0, 0, 0, 392, 393, 5, 65, 0, 0, 393, 395, 1, 0, 0, 0, 394, 352, 1, 0, 0, 0, 394, 353, 1, 0, 0, 0, 394, 356, 1, 0, 0, 0, 394, 357, 1, 0, 0, 0, 394, 358, 1, 0, 0, 0, 394, 359, 1, 0, 0, 0, 394, 360, 1, 0, 0, 0, 394, 361, 1, 0, 0, 0, 394, 372, 1, 0, 0, 0, 394, 383, 1, 0, 0, 0, 395, 57, 1, 0, 0, 0, 396, 397, 5, 10, 0, 0, 397, 398, 5, 28, 0, 0, 398, 59, 1, 0, 0, 0, 399, 400, 5, 16, 0, 0, 400, 405, 3, 62, 31, 0, 401, 402, 5, 34, 0, 0, 402, 404, 3, 62, 31, 0, 403, 401, 1, 0, 0, 0, 404, 407, 1, 0, 0, 0, 405, 403, 1, 0, 0, 0, 405, 406, 1, 0, 0, 0, 406, 61, 1, 0, 0, 0, 407, 405, 1, 0, 0, 0, 408, 410, 3, 10, 5, 0, 409, 411, 7, 4, 0, 0, 410, 409, 1, 0, 0, 0, 410, 411, 1, 0, 0, 0, 411, 414, 1, 0, 0, 0, 412, 413, 5, 46, 0, 0, 413, 415, 7, 5, 0, 0, 414, 412, 1, 0, 0, 0, 414, 415, 1, 0, 0, 0, 415, 63, 1, 0, 0, 0, 416, 417, 5, 9, 0, 0, 417, 422, 3, 50, 25, 0, 418, 419, 5, 34, 0, 0, 419, 421, 3, 50, 25, 0, 420, 418, 1, 0, 0, 0, 421, 424, 1, 0, 0, 0, 422, 420, 1, 0, 0, 0, 422, 423, 1, 0, 0, 0, 423, 65, 1, 0, 0, 0, 424, 422, 1, 0, 0, 0, 425, 426, 5, 2, 0, 0, 426, 431, 3, 50, 25, 0, 427, 428, 5, 34, 0, 0, 428, 430, 3, 50, 25, 0, 429, 427, 1, 0, 0, 0, 430, 433, 1, 0, 0, 0, 431, 429, 1, 0, 0, 0, 431, 432, 1, 0, 0, 0, 432, 67, 1, 0, 0, 0, 433, 431, 1, 0, 0, 0, 434, 435, 5, 13, 0, 0, 435, 440, 3, 70, 35, 0, 436, 437, 5, 34, 0, 0, 437, 439, 3, 70, 35, 0, 438, 436, 1, 0, 0, 0, 439, 442, 1, 0, 0, 0, 440, 438, 1, 0, 0, 0, 440, 441, 1, 0, 0, 0, 441, 69, 1, 0, 0, 0, 442, 440, 1, 0, 0, 0, 443, 444, 3, 50, 25, 0, 444, 445, 5, 81, 0, 0, 445, 446, 3, 50, 25, 0, 446, 71, 1, 0, 0, 0, 447, 448, 5, 1, 0, 0, 448, 449, 3, 18, 9, 0, 449, 451, 3, 90, 45, 0, 450, 452, 3, 78, 39, 0, 451, 450, 1, 0, 0, 0, 451, 452, 1, 0, 0, 0, 452, 73, 1, 0, 0, 0, 453, 454, 5, 7, 0, 0, 454, 455, 3, 18, 9, 0, 455, 456, 3, 90, 45, 0, 456, 75, 1, 0, 0, 0, 457, 458, 5, 12, 0, 0, 458, 459, 3, 48, 24, 0, 459, 77, 1, 0, 0, 0, 460, 465, 3, 80, 40, 0, 461, 462, 5, 34, 0, 0, 462, 464, 3, 80, 40, 0, 463, 461, 1, 0, 0, 0, 464, 467, 1, 0, 0, 0, 465, 463, 1, 0, 0, 0, 465, 466, 1, 0, 0, 0, 466, 79, 1, 0, 0, 0, 467, 465, 1, 0, 0, 0, 468, 469, 3, 52, 26, 0, 469, 470, 5, 33, 0, 0, 470, 471, 3, 56, 28, 0, 471, 81, 1, 0, 0, 0, 472, 473, 7, 6, 0, 0, 473, 83, 1, 0, 0, 0, 474, 477, 3, 86, 43, 0, 475, 477, 3, 88, 44, 0, 476, 474, 1, 0, 0, 0, 476, 475, 1, 0, 0, 0, 477, 85, 1, 0, 0, 0, 478, 480, 7, 0, 0, 0, 479, 478, 1, 0, 0, 0, 479, 480, 1, 0, 0, 0, 480, 481, 1, 0, 0, 0, 481, 482, 5, 29, 0, 0, 482, 87, 1, 0, 0, 0, 483, 485, 7, 0, 0, 0, 484, 483, 1, 0, 0, 0, 484, 485, 1, 0, 0, 0, 485, 486, 1, 0, 0, 0, 486, 487, 5, 28, 0, 0, 487, 89, 1, 0, 0, 0, 488, 489, 5, 27, 0, 0, 489, 91, 1, 0, 0, 0, 490, 491, 7, 7, 0, 0, 491, 93, 1, 0, 0, 0, 492, 493, 5, 5, 0, 0, 493, 494, 3, 96, 48, 0, 494, 95, 1, 0, 0, 0, 495, 496, 5, 64, 0, 0, 496, 497, 3, 2, 1, 0, 497, 498, 5, 65, 0, 0, 498, 97, 1, 0, 0, 0, 499, 500, 5, 15, 0, 0, 500, 501, 5, 97, 0, 0, 501, 99, 1, 0, 0, 0, 502, 503, 5, 11, 0, 0, 503, 504, 5, 101, 0, 0, 504, 101, 1, 0, 0, 0, 505, 506, 5, 3, 0, 0, 506, 509, 5, 87, 0, 0, 507, 508, 5, 85, 0, 0, 508, 510, 3, 50, 25, 0, 509, 507, 1, 0, 0, 0, 509, 510, 1, 0, 0, 0, 510, 520, 1, 0, 0, 0, 511, 512, 5, 86, 0, 0, 512, 517, 3, 104, 52, 0, 513, 514, 5, 34, 0, 0, 514, 516, 3, 104, 52, 0, 515, 513, 1, 0, 0, 0, 516, 519, 1, 0, 0, 0, 517, 515, 1, 0, 0, 0, 517, 518, 1, 0, 0, 0, 518, 521, 1, 0, 0, 0, 519, 517, 1, 0, 0, 0, 520, 511, 1, 0, 0, 0, 520, 521, 1, 0, 0, 0, 521, 103, 1, 0, 0, 0, 522, 523, 3, 50, 25, 0, 523, 524, 5, 33, 0, 0, 524, 526, 1, 0, 0, 0, 525, 522, 1, 0, 0, 0, 525, 526, 1, 0, 0, 0, 526, 527, 1, 0, 0, 0, 527, 528, 3, 50, 25, 0, 528, 105, 1, 0, 0, 0, 51, 117, 125, 140, 152, 161, 169, 173, 181, 183, 188, 195, 200, 207, 213, 221, 223, 233, 243, 246, 258, 266, 274, 278, 281, 291, 300, 308, 320, 324, 330, 337, 345, 367, 378, 389, 394, 405, 410, 414, 422, 431, 440, 451, 465, 476, 479, 484, 509, 517, 520, 525] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java index bf8b31cab183f..96ce4569fdd8e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java @@ -1885,12 +1885,12 @@ public FromIdentifierContext fromIdentifier(int i) { public TerminalNode COMMA(int i) { return getToken(EsqlBaseParser.COMMA, i); } - public FromOptionsContext fromOptions() { - return getRuleContext(FromOptionsContext.class,0); - } public MetadataContext metadata() { return getRuleContext(MetadataContext.class,0); } + public FromOptionsContext fromOptions() { + return getRuleContext(FromOptionsContext.class,0); + } @SuppressWarnings("this-escape") public FromCommandContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); @@ -1946,7 +1946,7 @@ public final FromCommandContext fromCommand() throws RecognitionException { case 1: { setState(277); - fromOptions(); + metadata(); } break; } @@ -1956,7 +1956,7 @@ public final FromCommandContext fromCommand() throws RecognitionException { case 1: { setState(280); - metadata(); + fromOptions(); } break; } @@ -4959,8 +4959,8 @@ private boolean operatorExpression_sempred(OperatorExpressionContext _localctx, "\u0000\u0000\u0000\u0111\u0114\u0001\u0000\u0000\u0000\u0112\u0110\u0001"+ "\u0000\u0000\u0000\u0112\u0113\u0001\u0000\u0000\u0000\u0113\u0116\u0001"+ "\u0000\u0000\u0000\u0114\u0112\u0001\u0000\u0000\u0000\u0115\u0117\u0003"+ - " \u0010\u0000\u0116\u0115\u0001\u0000\u0000\u0000\u0116\u0117\u0001\u0000"+ - "\u0000\u0000\u0117\u0119\u0001\u0000\u0000\u0000\u0118\u011a\u0003$\u0012"+ + "$\u0012\u0000\u0116\u0115\u0001\u0000\u0000\u0000\u0116\u0117\u0001\u0000"+ + "\u0000\u0000\u0117\u0119\u0001\u0000\u0000\u0000\u0118\u011a\u0003 \u0010"+ "\u0000\u0119\u0118\u0001\u0000\u0000\u0000\u0119\u011a\u0001\u0000\u0000"+ "\u0000\u011a\u001d\u0001\u0000\u0000\u0000\u011b\u011c\u0007\u0002\u0000"+ "\u0000\u011c\u001f\u0001\u0000\u0000\u0000\u011d\u011e\u0005G\u0000\u0000"+ diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index 304f693adf89c..f7bb90208af3f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -705,7 +705,7 @@ public void testFromOptionsValues() { } public void testFromOptionsWithMetadata() { - var plan = statement(FROM + " OPTIONS \"preference\"=\"foo\" METADATA _id"); + var plan = statement(FROM + " METADATA _id OPTIONS \"preference\"=\"foo\""); var unresolved = as(plan, EsqlUnresolvedRelation.class); assertNotNull(unresolved.esSourceOptions()); assertThat(unresolved.esSourceOptions().preference(), is("foo")); From c74490c1375570b9341d916a016995f0bbb4d079 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Wed, 3 Apr 2024 07:42:40 -0400 Subject: [PATCH 080/264] ESQL: Enable VALUES agg for datetime (#107016) When I wrote the `VALUES` agg I didn't plug it in for `datetime` fields. Ooops. We just have to plug it in. --- docs/changelog/107016.yaml | 5 ++ .../functions/parameters/st_contains.asciidoc | 2 + .../functions/parameters/st_within.asciidoc | 2 + docs/reference/esql/functions/values.asciidoc | 2 +- .../src/main/resources/date.csv-spec | 58 +++++++++++++++++++ .../expression/function/aggregate/Values.java | 2 +- 6 files changed, 69 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/107016.yaml diff --git a/docs/changelog/107016.yaml b/docs/changelog/107016.yaml new file mode 100644 index 0000000000000..a2e32749a8008 --- /dev/null +++ b/docs/changelog/107016.yaml @@ -0,0 +1,5 @@ +pr: 107016 +summary: "ESQL: Enable VALUES agg for datetime" +area: Aggregations +type: bug +issues: [] diff --git a/docs/reference/esql/functions/parameters/st_contains.asciidoc b/docs/reference/esql/functions/parameters/st_contains.asciidoc index dbc9adf478948..e87a0d0eb94f0 100644 --- a/docs/reference/esql/functions/parameters/st_contains.asciidoc +++ b/docs/reference/esql/functions/parameters/st_contains.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `geomA`:: diff --git a/docs/reference/esql/functions/parameters/st_within.asciidoc b/docs/reference/esql/functions/parameters/st_within.asciidoc index dbc9adf478948..e87a0d0eb94f0 100644 --- a/docs/reference/esql/functions/parameters/st_within.asciidoc +++ b/docs/reference/esql/functions/parameters/st_within.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `geomA`:: diff --git a/docs/reference/esql/functions/values.asciidoc b/docs/reference/esql/functions/values.asciidoc index 9707180058e24..f13338a572b36 100644 --- a/docs/reference/esql/functions/values.asciidoc +++ b/docs/reference/esql/functions/values.asciidoc @@ -12,7 +12,7 @@ VALUES(expression) ---- `expression`:: -Expression of any type except `geo_point`, `cartesian_point`, or `geo_shape`. +Expression of any type except `geo_point`, `cartesian_point`, `geo_shape`, or `cartesian_shape`. *Description* diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec index 96a025ce5dc9c..8f9ce9968d89d 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec @@ -1215,3 +1215,61 @@ count:long | age:long 8 | 70 8 | 64 ; + +values +required_feature: esql.agg_values + + FROM employees +| WHERE emp_no <= 10003 +| STATS birth_date=MV_SORT(VALUES(birth_date)) +; + + birth_date:datetime +["1953-09-02T00:00:00Z", "1959-12-03T00:00:00.000Z", "1964-06-02T00:00:00.000Z"] +; + +valuesGrouped +required_feature: esql.agg_values + + FROM employees +| WHERE emp_no <= 10009 +| EVAL first_letter = SUBSTRING(first_name, 0, 1) +| STATS birth_date=MV_SORT(VALUES(birth_date)) BY first_letter +| SORT first_letter +; + + birth_date:datetime | first_letter:keyword + 1953-04-20T00:00:00Z | A + 1964-06-02T00:00:00Z | B + 1954-05-01T00:00:00Z | C + 1953-09-02T00:00:00Z | G + 1955-01-21T00:00:00Z | K + 1959-12-03T00:00:00Z | P +[1952-04-19T00:00:00Z, 1958-02-19T00:00:00Z] | S + 1957-05-23T00:00:00Z | T +; + +valuesGroupedByOrdinals +required_feature: esql.agg_values + + FROM employees +| WHERE emp_no <= 10009 +| EVAL first_letter = SUBSTRING(first_name, 0, 1) +| STATS birth_date=MV_SORT(VALUES(birth_date)) BY job_positions +| SORT job_positions +; + + birth_date:datetime | job_positions:keyword + 1953-09-02T00:00:00Z | Accountant + 1954-05-01T00:00:00Z | Head Human Resources + [1952-04-19T00:00:00Z, 1958-02-19T00:00:00Z] | Internship + 1958-02-19T00:00:00Z | Junior Developer + 1953-04-20T00:00:00Z | Principal Support Engineer + 1958-02-19T00:00:00Z | Purchase Manager + 1954-05-01T00:00:00 | Reporting Analyst +[1952-04-19T00:00:00Z, 1953-09-02T00:00:00Z, 1958-02-19T00:00:00Z] | Senior Python Developer + [1953-04-20T00:00:00Z, 1964-06-02T00:00:00Z] | Senior Team Lead + 1954-05-01T00:00:00Z | Support Engineer + [1953-04-20T00:00:00Z, 1954-05-01T00:00:00Z] | Tech Lead +[1955-01-21T00:00:00Z, 1957-05-23T00:00:00Z, 1959-12-03T00:00:00Z] | null +; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java index d0d614a665794..aada71bba97d6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java @@ -68,7 +68,7 @@ public AggregatorFunctionSupplier supplier(List inputChannels) { if (type == DataTypes.INTEGER) { return new ValuesIntAggregatorFunctionSupplier(inputChannels); } - if (type == DataTypes.LONG) { + if (type == DataTypes.LONG || type == DataTypes.DATETIME) { return new ValuesLongAggregatorFunctionSupplier(inputChannels); } if (type == DataTypes.DOUBLE) { From 1c923a66bd173f0dfce7b70820405f652fb186cf Mon Sep 17 00:00:00 2001 From: Pat Whelan Date: Wed, 3 Apr 2024 07:46:12 -0400 Subject: [PATCH 081/264] [ML] Convert DatafeedContext to a Record (#107009) Refactor DatafeedContext from a POJO with a Builder to a Record. This is mostly to reduce code footprint and improve readability in the `DatafeedContextProvider` class. The lambda closures are now responsible for passing the fields to the DatafeedContext, rather than the Builder. Close #107008 --- .../xpack/ml/datafeed/DatafeedContext.java | 72 ++----------------- .../ml/datafeed/DatafeedContextProvider.java | 10 +-- .../xpack/ml/datafeed/DatafeedJobBuilder.java | 16 ++--- .../ml/datafeed/DatafeedJobBuilderTests.java | 48 ++++++------- .../ml/datafeed/DatafeedRunnerTests.java | 13 ++-- 5 files changed, 44 insertions(+), 115 deletions(-) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedContext.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedContext.java index 133ea66485c2e..82a3ceae07b09 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedContext.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedContext.java @@ -14,72 +14,12 @@ import java.util.Objects; -public class DatafeedContext { +public record DatafeedContext(DatafeedConfig datafeedConfig, Job job, RestartTimeInfo restartTimeInfo, DatafeedTimingStats timingStats) { - private final DatafeedConfig datafeedConfig; - private final Job job; - private final RestartTimeInfo restartTimeInfo; - private final DatafeedTimingStats timingStats; - - private DatafeedContext(DatafeedConfig datafeedConfig, Job job, RestartTimeInfo restartTimeInfo, DatafeedTimingStats timingStats) { - this.datafeedConfig = Objects.requireNonNull(datafeedConfig); - this.job = Objects.requireNonNull(job); - this.restartTimeInfo = Objects.requireNonNull(restartTimeInfo); - this.timingStats = Objects.requireNonNull(timingStats); - } - - public DatafeedConfig getDatafeedConfig() { - return datafeedConfig; - } - - public Job getJob() { - return job; - } - - public RestartTimeInfo getRestartTimeInfo() { - return restartTimeInfo; - } - - public DatafeedTimingStats getTimingStats() { - return timingStats; - } - - static Builder builder() { - return new Builder(); - } - - static class Builder { - private volatile DatafeedConfig datafeedConfig; - private volatile Job job; - private volatile RestartTimeInfo restartTimeInfo; - private volatile DatafeedTimingStats timingStats; - - Builder setDatafeedConfig(DatafeedConfig datafeedConfig) { - this.datafeedConfig = datafeedConfig; - return this; - } - - Builder setJob(Job job) { - this.job = job; - return this; - } - - Job getJob() { - return job; - } - - Builder setRestartTimeInfo(RestartTimeInfo restartTimeInfo) { - this.restartTimeInfo = restartTimeInfo; - return this; - } - - Builder setTimingStats(DatafeedTimingStats timingStats) { - this.timingStats = timingStats; - return this; - } - - DatafeedContext build() { - return new DatafeedContext(datafeedConfig, job, restartTimeInfo, timingStats); - } + public DatafeedContext { + Objects.requireNonNull(datafeedConfig); + Objects.requireNonNull(job); + Objects.requireNonNull(restartTimeInfo); + Objects.requireNonNull(timingStats); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedContextProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedContextProvider.java index 1e4db8aff4559..ba0420c6cf94a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedContextProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedContextProvider.java @@ -32,18 +32,12 @@ public DatafeedContextProvider( } public void buildDatafeedContext(String datafeedId, ActionListener listener) { - DatafeedContext.Builder context = DatafeedContext.builder(); - datafeedConfigProvider.getDatafeedConfig(datafeedId, null, listener.delegateFailureAndWrap((delegate1, datafeedConfigBuilder) -> { DatafeedConfig datafeedConfig = datafeedConfigBuilder.build(); - context.setDatafeedConfig(datafeedConfig); jobConfigProvider.getJob(datafeedConfig.getJobId(), null, delegate1.delegateFailureAndWrap((delegate2, jobBuilder) -> { - context.setJob(jobBuilder.build()); resultsProvider.getRestartTimeInfo(jobBuilder.getId(), delegate2.delegateFailureAndWrap((delegate3, restartTimeInfo) -> { - context.setRestartTimeInfo(restartTimeInfo); - resultsProvider.datafeedTimingStats(context.getJob().getId(), timingStats -> { - context.setTimingStats(timingStats); - delegate3.onResponse(context.build()); + resultsProvider.datafeedTimingStats(jobBuilder.getId(), timingStats -> { + delegate3.onResponse(new DatafeedContext(datafeedConfig, jobBuilder.build(), restartTimeInfo, timingStats)); }, delegate3::onFailure); })); })); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobBuilder.java index ec4a85814390d..f0b909922df73 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobBuilder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobBuilder.java @@ -76,16 +76,16 @@ private void setDelayedDataCheckFreq(TimeValue value) { void build(TransportStartDatafeedAction.DatafeedTask task, DatafeedContext context, ActionListener listener) { final ParentTaskAssigningClient parentTaskAssigningClient = new ParentTaskAssigningClient(client, clusterService.localNode(), task); - final DatafeedConfig datafeedConfig = context.getDatafeedConfig(); - final Job job = context.getJob(); - final long latestFinalBucketEndMs = context.getRestartTimeInfo().getLatestFinalBucketTimeMs() == null + final DatafeedConfig datafeedConfig = context.datafeedConfig(); + final Job job = context.job(); + final long latestFinalBucketEndMs = context.restartTimeInfo().getLatestFinalBucketTimeMs() == null ? -1 - : context.getRestartTimeInfo().getLatestFinalBucketTimeMs() + job.getAnalysisConfig().getBucketSpan().millis() - 1; - final long latestRecordTimeMs = context.getRestartTimeInfo().getLatestRecordTimeMs() == null + : context.restartTimeInfo().getLatestFinalBucketTimeMs() + job.getAnalysisConfig().getBucketSpan().millis() - 1; + final long latestRecordTimeMs = context.restartTimeInfo().getLatestRecordTimeMs() == null ? -1 - : context.getRestartTimeInfo().getLatestRecordTimeMs(); + : context.restartTimeInfo().getLatestRecordTimeMs(); final DatafeedTimingStatsReporter timingStatsReporter = new DatafeedTimingStatsReporter( - context.getTimingStats(), + context.timingStats(), jobResultsPersister::persistDatafeedTimingStats ); @@ -130,7 +130,7 @@ void build(TransportStartDatafeedAction.DatafeedTask task, DatafeedContext conte datafeedConfig.getMaxEmptySearches(), latestFinalBucketEndMs, latestRecordTimeMs, - context.getRestartTimeInfo().haveSeenDataPreviously(), + context.restartTimeInfo().haveSeenDataPreviously(), delayedDataCheckFreq ); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobBuilderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobBuilderTests.java index 0479837b5b827..be768e1a01e20 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobBuilderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobBuilderTests.java @@ -129,12 +129,12 @@ public void testBuild_GivenScrollDatafeedAndNewJob() throws Exception { wasHandlerCalled.compareAndSet(false, true); }); - DatafeedContext datafeedContext = DatafeedContext.builder() - .setDatafeedConfig(datafeed.build()) - .setJob(jobBuilder.build()) - .setRestartTimeInfo(new RestartTimeInfo(null, null, false)) - .setTimingStats(new DatafeedTimingStats(jobBuilder.getId())) - .build(); + DatafeedContext datafeedContext = new DatafeedContext( + datafeed.build(), + jobBuilder.build(), + new RestartTimeInfo(null, null, false), + new DatafeedTimingStats(jobBuilder.getId()) + ); TransportStartDatafeedAction.DatafeedTask datafeedTask = newDatafeedTask("datafeed1"); @@ -159,12 +159,12 @@ public void testBuild_GivenScrollDatafeedAndOldJobWithLatestRecordTimestampAfter wasHandlerCalled.compareAndSet(false, true); }); - DatafeedContext datafeedContext = DatafeedContext.builder() - .setDatafeedConfig(datafeed.build()) - .setJob(jobBuilder.build()) - .setRestartTimeInfo(new RestartTimeInfo(3_600_000L, 7_200_000L, false)) - .setTimingStats(new DatafeedTimingStats(jobBuilder.getId())) - .build(); + DatafeedContext datafeedContext = new DatafeedContext( + datafeed.build(), + jobBuilder.build(), + new RestartTimeInfo(3_800_000L, 7_200_000L, false), + new DatafeedTimingStats(jobBuilder.getId()) + ); TransportStartDatafeedAction.DatafeedTask datafeedTask = newDatafeedTask("datafeed1"); @@ -189,12 +189,12 @@ public void testBuild_GivenScrollDatafeedAndOldJobWithLatestBucketAfterLatestRec wasHandlerCalled.compareAndSet(false, true); }); - DatafeedContext datafeedContext = DatafeedContext.builder() - .setDatafeedConfig(datafeed.build()) - .setJob(jobBuilder.build()) - .setRestartTimeInfo(new RestartTimeInfo(3_800_000L, 3_600_000L, false)) - .setTimingStats(new DatafeedTimingStats(jobBuilder.getId())) - .build(); + DatafeedContext datafeedContext = new DatafeedContext( + datafeed.build(), + jobBuilder.build(), + new RestartTimeInfo(3_800_000L, 3_600_000L, false), + new DatafeedTimingStats(jobBuilder.getId()) + ); TransportStartDatafeedAction.DatafeedTask datafeedTask = newDatafeedTask("datafeed1"); @@ -241,12 +241,12 @@ public void testBuildGivenRemoteIndicesButNoRemoteSearching() throws Exception { } ); - DatafeedContext datafeedContext = DatafeedContext.builder() - .setDatafeedConfig(datafeed.build()) - .setJob(jobBuilder.build()) - .setRestartTimeInfo(new RestartTimeInfo(null, null, false)) - .setTimingStats(new DatafeedTimingStats(jobBuilder.getId())) - .build(); + DatafeedContext datafeedContext = new DatafeedContext( + datafeed.build(), + jobBuilder.build(), + new RestartTimeInfo(null, null, false), + new DatafeedTimingStats(jobBuilder.getId()) + ); TransportStartDatafeedAction.DatafeedTask datafeedTask = newDatafeedTask("datafeed1"); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedRunnerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedRunnerTests.java index 284ad48a9b3a4..cba97835ee2f4 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedRunnerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedRunnerTests.java @@ -522,15 +522,10 @@ private DatafeedTask spyDatafeedTask(DatafeedTask task) { private void givenDatafeedHasNeverRunBefore(Job job, DatafeedConfig datafeed) { doAnswer(invocationOnMock -> { - @SuppressWarnings("unchecked") - ActionListener datafeedContextListener = (ActionListener) invocationOnMock.getArguments()[1]; - DatafeedContext datafeedContext = DatafeedContext.builder() - .setJob(job) - .setDatafeedConfig(datafeed) - .setRestartTimeInfo(new RestartTimeInfo(null, null, false)) - .setTimingStats(new DatafeedTimingStats(job.getId())) - .build(); - datafeedContextListener.onResponse(datafeedContext); + ActionListener datafeedContextListener = invocationOnMock.getArgument(1); + datafeedContextListener.onResponse( + new DatafeedContext(datafeed, job, new RestartTimeInfo(null, null, false), new DatafeedTimingStats(job.getId())) + ); return null; }).when(datafeedContextProvider).buildDatafeedContext(eq(DATAFEED_ID), any()); } From bb520eb9d452eca2c97a917fe3172724eb601a87 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Przemys=C5=82aw=20Witek?= Date: Wed, 3 Apr 2024 15:07:48 +0200 Subject: [PATCH 082/264] [Transform] Implement robustness test that constantly creates/starts/stops/deletes a continuous transform (#106313) --- .../transform/integration/TransformIT.java | 147 +++++++++++++++++- .../integration/TransformRestTestCase.java | 17 +- 2 files changed, 154 insertions(+), 10 deletions(-) diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIT.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIT.java index 073f604e608da..538479c33b084 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIT.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIT.java @@ -22,11 +22,14 @@ import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.transform.TransformConfigVersion; +import org.elasticsearch.xpack.core.transform.TransformField; import org.elasticsearch.xpack.core.transform.transforms.QueryConfig; import org.elasticsearch.xpack.core.transform.transforms.SettingsConfig; +import org.elasticsearch.xpack.core.transform.transforms.SyncConfig; import org.elasticsearch.xpack.core.transform.transforms.TimeRetentionPolicyConfig; import org.elasticsearch.xpack.core.transform.transforms.TimeSyncConfig; import org.elasticsearch.xpack.core.transform.transforms.TransformConfig; +import org.elasticsearch.xpack.core.transform.transforms.pivot.PivotConfig; import org.elasticsearch.xpack.core.transform.transforms.pivot.SingleGroupSource; import org.elasticsearch.xpack.core.transform.transforms.pivot.TermsGroupSource; import org.junit.After; @@ -34,7 +37,9 @@ import java.io.IOException; import java.time.Instant; +import java.util.ArrayList; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; @@ -42,9 +47,12 @@ import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.oneOf; @@ -225,6 +233,133 @@ public void testBasicContinuousTransformStats() throws Exception { deleteTransform(transformId); } + public void testTransformLifecycleInALoop() throws Exception { + String transformId = "lifecycle-in-a-loop"; + String indexName = transformId + "-src"; + createReviewsIndex(indexName, 100, NUM_USERS, TransformIT::getUserIdForRow, TransformIT::getDateStringForRow); + + String destIndex = transformId + "-dest"; + String config = createConfig(transformId, indexName, destIndex); + for (int i = 0; i < 100; ++i) { + long sleepAfterStartMillis = randomLongBetween(0, 5_000); + boolean force = randomBoolean(); + try { + // Create the continuous transform + putTransform(transformId, config, RequestOptions.DEFAULT); + assertThat(getTransformTasks(), is(empty())); + assertThatTransformTaskDoesNotExist(transformId); + + startTransform(transformId, RequestOptions.DEFAULT); + // There is 1 transform task after start + assertThat(getTransformTasks(), hasSize(1)); + assertThatTransformTaskExists(transformId); + + Thread.sleep(sleepAfterStartMillis); + // There should still be 1 transform task as the transform is continuous + assertThat(getTransformTasks(), hasSize(1)); + assertThatTransformTaskExists(transformId); + + // Stop the transform with force set randomly + stopTransform(transformId, true, null, false, force); + // After the transform is stopped, there should be no transform task left + assertThat(getTransformTasks(), is(empty())); + assertThatTransformTaskDoesNotExist(transformId); + + // Delete the transform + deleteTransform(transformId); + } catch (AssertionError | Exception e) { + throw new AssertionError( + format( + "Failure at iteration %d (sleepAfterStartMillis=%s,force=%s): %s", + i, + sleepAfterStartMillis, + force, + e.getMessage() + ), + e + ); + } + } + } + + private String createConfig(String transformId, String sourceIndex, String destIndex) throws Exception { + Map groups = new HashMap<>(); + groups.put("by-day", createDateHistogramGroupSourceWithCalendarInterval("timestamp", DateHistogramInterval.DAY, null)); + groups.put("by-user", new TermsGroupSource("user_id", null, false)); + groups.put("by-business", new TermsGroupSource("business_id", null, false)); + + AggregatorFactories.Builder aggs = AggregatorFactories.builder() + .addAggregator(AggregationBuilders.avg("review_score").field("stars")) + .addAggregator(AggregationBuilders.max("timestamp").field("timestamp")); + + PivotConfig pivotConfig = createPivotConfig(groups, aggs); + + SyncConfig syncConfig = new TimeSyncConfig("timestamp", TimeValue.timeValueSeconds(1)); + + TransformConfig config = createTransformConfigBuilder(transformId, destIndex, QueryConfig.matchAll(), sourceIndex).setFrequency( + TimeValue.timeValueSeconds(1) + ).setSyncConfig(syncConfig).setPivotConfig(pivotConfig).build(); + + return Strings.toString(config); + } + + /** + * Returns the list of transform tasks as reported by _tasks API. + */ + @SuppressWarnings("unchecked") + protected List getTransformTasks() throws IOException { + final Request tasksRequest = new Request("GET", "/_tasks"); + tasksRequest.addParameter("actions", TransformField.TASK_NAME + "*"); + final Map tasksResponse = entityAsMap(client().performRequest(tasksRequest)); + + Map nodes = (Map) tasksResponse.get("nodes"); + if (nodes == null) { + return List.of(); + } + + List foundTasks = new ArrayList<>(); + for (Map.Entry node : nodes.entrySet()) { + Map nodeInfo = (Map) node.getValue(); + Map tasks = (Map) nodeInfo.get("tasks"); + if (tasks != null) { + foundTasks.addAll(tasks.keySet()); + } + } + return foundTasks; + } + + /** + * Verifies that the given transform task exists in cluster state. + */ + private void assertThatTransformTaskExists(String transformId) throws IOException { + assertThatTransformTaskCountIsEqualTo(transformId, 1); + } + + /** + * Verifies that the given transform task does not exist in cluster state. + */ + private void assertThatTransformTaskDoesNotExist(String transformId) throws IOException { + assertThatTransformTaskCountIsEqualTo(transformId, 0); + } + + /** + * Verifies that the number of transform tasks in cluster state for the given transform is as expected. + */ + @SuppressWarnings("unchecked") + private void assertThatTransformTaskCountIsEqualTo(String transformId, int expectedCount) throws IOException { + Request request = new Request("GET", "_cluster/state"); + Map response = entityAsMap(adminClient().performRequest(request)); + + List> tasks = (List>) XContentMapValues.extractValue( + response, + "metadata", + "persistent_tasks", + "tasks" + ); + + assertThat("Tasks were: " + tasks, tasks.stream().filter(t -> transformId.equals(t.get("id"))).toList(), hasSize(expectedCount)); + } + public void testContinuousTransformUpdate() throws Exception { String indexName = "continuous-reviews-update"; createReviewsIndex(indexName, 10, NUM_USERS, TransformIT::getUserIdForRow, TransformIT::getDateStringForRow); @@ -271,7 +406,7 @@ public void testContinuousTransformUpdate() throws Exception { putPipeline.setEntity(new StringEntity(Strings.toString(pipelineBuilder), ContentType.APPLICATION_JSON)); assertOK(client().performRequest(putPipeline)); - String update = Strings.format(""" + String update = format(""" { "description": "updated config", "dest": { @@ -382,7 +517,7 @@ public void testStopWaitForCheckpoint() throws Exception { }); // waitForCheckpoint: true should make the transform continue until we hit the first checkpoint, then it will stop - stopTransform(transformId, false, null, true); + stopTransform(transformId, false, null, true, false); // Wait until the first checkpoint waitUntilCheckpoint(config.getId(), 1L); @@ -416,7 +551,7 @@ public void testStopWaitForCheckpoint() throws Exception { }); var waitForCompletion = randomBoolean(); - stopTransform(transformId, waitForCompletion, null, true); + stopTransform(transformId, waitForCompletion, null, true, false); assertBusy(() -> { var stateAndStats = getBasicTransformStats(transformId); assertThat(stateAndStats.get("state"), equalTo("stopped")); @@ -467,7 +602,7 @@ public void testContinuousTransformRethrottle() throws Exception { // test randomly: with explicit settings and reset to default String reqsPerSec = randomBoolean() ? "1000" : "null"; String maxPageSize = randomBoolean() ? "1000" : "null"; - String update = Strings.format(""" + String update = format(""" { "settings" : { "docs_per_second": %s, @@ -556,14 +691,14 @@ public void testStartTransform_GivenTimeout_Returns408() throws Exception { private void indexMoreDocs(long timestamp, long userId, String index) throws Exception { StringBuilder bulkBuilder = new StringBuilder(); for (int i = 0; i < 25; i++) { - bulkBuilder.append(Strings.format(""" + bulkBuilder.append(format(""" {"create":{"_index":"%s"}} """, index)); int stars = (i + 20) % 5; long business = (i + 100) % 50; - String source = Strings.format(""" + String source = format(""" {"user_id":"user_%s","count":%s,"business_id":"business_%s","stars":%s,"timestamp":%s} """, userId, i, business, stars, timestamp); bulkBuilder.append(source); diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java index 897de6c120a8b..6e13e936f5532 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java @@ -153,11 +153,16 @@ protected Map getIndexMapping(String index, RequestOptions optio } protected void stopTransform(String id) throws IOException { - stopTransform(id, true, null, false); + stopTransform(id, true, null, false, false); } - protected void stopTransform(String id, boolean waitForCompletion, @Nullable TimeValue timeout, boolean waitForCheckpoint) - throws IOException { + protected void stopTransform( + String id, + boolean waitForCompletion, + @Nullable TimeValue timeout, + boolean waitForCheckpoint, + boolean force + ) throws IOException { final Request stopTransformRequest = new Request("POST", TRANSFORM_ENDPOINT + id + "/_stop"); stopTransformRequest.addParameter(TransformField.WAIT_FOR_COMPLETION.getPreferredName(), Boolean.toString(waitForCompletion)); @@ -165,6 +170,9 @@ protected void stopTransform(String id, boolean waitForCompletion, @Nullable Tim if (timeout != null) { stopTransformRequest.addParameter(TransformField.TIMEOUT.getPreferredName(), timeout.getStringRep()); } + if (force) { + stopTransformRequest.addParameter(TransformField.FORCE.getPreferredName(), "true"); + } assertAcknowledged(client().performRequest(stopTransformRequest)); } @@ -215,9 +223,10 @@ protected void deleteTransform(String id) throws IOException { protected void deleteTransform(String id, boolean force) throws IOException { Request request = new Request("DELETE", TRANSFORM_ENDPOINT + id); if (force) { - request.addParameter("force", "true"); + request.addParameter(TransformField.FORCE.getPreferredName(), "true"); } assertOK(adminClient().performRequest(request)); + createdTransformIds.remove(id); } protected Response putTransform(String id, String config, RequestOptions options) throws IOException { From febc23c2e8c78bb57a0a56aefa5844c9e86f86d9 Mon Sep 17 00:00:00 2001 From: Pat Whelan Date: Wed, 3 Apr 2024 09:15:19 -0400 Subject: [PATCH 083/264] [Transform] Clarify afterFinishOrFailure javadoc (#107048) Close #100515 --- .../elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexer.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexer.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexer.java index e811c38740618..48f73fc352dd1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexer.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexer.java @@ -401,6 +401,8 @@ protected float getMaxDocsPerSecond() { /** * Called after onFinish or after onFailure and all the following steps - in particular state persistence - are completed. + * This will be called before the internal state changes from {@link IndexerState#INDEXING} to {@link IndexerState#STARTED} or + * from {@link IndexerState#STOPPING} to {@link IndexerState#STOPPED}. */ protected void afterFinishOrFailure() {} From a3a60b592cdbbee680a3fe5a4c2bcbebc284c8cf Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Wed, 3 Apr 2024 16:21:41 +0300 Subject: [PATCH 084/264] [TEST] Add rolling upgrade test for downsampling (#107036) Simple test, sets up downsampling to run in the old cluster, then waits for it to complete and verifies that downsampled indexes can get queried in the mixed and upgraded clusters. --- .../elasticsearch/upgrades/DownsampleIT.java | 261 ++++++++++++++++++ .../test/rest/RestTestLegacyFeatures.java | 3 + 2 files changed, 264 insertions(+) create mode 100644 qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DownsampleIT.java diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DownsampleIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DownsampleIT.java new file mode 100644 index 0000000000000..757f793ac4c46 --- /dev/null +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DownsampleIT.java @@ -0,0 +1,261 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.upgrades; + +import com.carrotsearch.randomizedtesting.annotations.Name; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.rest.RestTestLegacyFeatures; +import org.junit.Before; + +import java.io.IOException; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static org.hamcrest.Matchers.equalTo; + +public class DownsampleIT extends ParameterizedRollingUpgradeTestCase { + + private static final String FIXED_INTERVAL = "1h"; + private String index; + private String policy; + private String dataStream; + + public DownsampleIT(@Name("upgradedNodes") int upgradedNodes) { + super(upgradedNodes); + } + + private static final String POLICY = """ + { + "policy": { + "phases": { + "hot": { + "actions": { + "rollover" : { + "max_age": "30s" + }, + "downsample": { + "fixed_interval": "$interval" + } + } + } + } + } + } + """; + + private static final String TEMPLATE = """ + { + "index_patterns": ["%s*"], + "template": { + "settings":{ + "index": { + "number_of_replicas": 0, + "number_of_shards": 1, + "time_series": { + "start_time": "2010-01-01T00:00:00.000Z", + "end_time": "2022-01-01T00:00:00.000Z" + }, + "routing_path": ["metricset"], + "mode": "time_series", + "look_ahead_time": "1m", + "lifecycle.name": "%s" + } + }, + "mappings":{ + "properties": { + "@timestamp" : { + "type": "date" + }, + "metricset": { + "type": "keyword", + "time_series_dimension": true + }, + "volume": { + "type": "double", + "time_series_metric": "gauge" + } + } + } + }, + "data_stream": { } + }"""; + + private static final String TEMPLATE_NO_TIME_BOUNDARIES = """ + { + "index_patterns": ["%s*"], + "template": { + "settings":{ + "index": { + "number_of_replicas": 0, + "number_of_shards": 1, + "routing_path": ["metricset"], + "mode": "time_series", + "lifecycle.name": "%s" + } + }, + "mappings":{ + "properties": { + "@timestamp" : { + "type": "date" + }, + "metricset": { + "type": "keyword", + "time_series_dimension": true + }, + "volume": { + "type": "double", + "time_series_metric": "gauge" + } + } + } + }, + "data_stream": { } + }"""; + + private static final String BULK = """ + {"create": {}} + {"@timestamp": "2020-01-01T05:10:00Z", "metricset": "pod", "volume" : 10} + {"create": {}} + {"@timestamp": "2020-01-01T05:20:00Z", "metricset": "pod", "volume" : 20} + {"create": {}} + {"@timestamp": "2020-01-01T05:30:00Z", "metricset": "pod", "volume" : 30} + {"create": {}} + {"@timestamp": "2020-01-01T05:40:00Z", "metricset": "pod", "volume" : 40} + {"create": {}} + {"@timestamp": "2020-01-01T06:10:00Z", "metricset": "pod", "volume" : 50} + {"create": {}} + {"@timestamp": "2020-01-01T07:10:00Z", "metricset": "pod", "volume" : 60} + {"create": {}} + {"@timestamp": "2020-01-01T09:10:00Z", "metricset": "pod", "volume" : 70} + {"create": {}} + {"@timestamp": "2020-01-01T09:20:00Z", "metricset": "pod", "volume" : 80} + """; + + @Before + public void refreshAbstractions() { + policy = "policy-" + randomAlphaOfLength(5); + dataStream = "ds-" + randomAlphaOfLength(10).toLowerCase(Locale.ROOT); + index = ".ds-" + dataStream; + logger.info("--> running [{}] with index [{}], data stream [{}], and policy [{}]", getTestName(), index, dataStream, policy); + } + + @Before + public void updatePollInterval() throws IOException { + updateClusterSettings(client(), Settings.builder().put("indices.lifecycle.poll_interval", "5s").build()); + } + + private void createIndex() throws IOException { + var putIndexTemplateRequest = new Request("POST", "/_index_template/1"); + putIndexTemplateRequest.setJsonEntity(Strings.format(TEMPLATE, dataStream, policy)); + assertOK(client().performRequest(putIndexTemplateRequest)); + } + + private void bulk() throws IOException { + var bulkRequest = new Request("POST", "/" + dataStream + "/_bulk"); + bulkRequest.setJsonEntity(BULK); + bulkRequest.addParameter("refresh", "true"); + var response = client().performRequest(bulkRequest); + assertOK(response); + var responseBody = entityAsMap(response); + assertThat("errors in response:\n " + responseBody, responseBody.get("errors"), equalTo(false)); + } + + private void createIlmPolicy() throws IOException { + Request request = new Request("PUT", "_ilm/policy/" + policy); + request.setJsonEntity(POLICY.replace("$interval", FIXED_INTERVAL)); + client().performRequest(request); + } + + private void startDownsampling() throws Exception { + // Update template to not contain time boundaries anymore (rollover is blocked otherwise due to index time + // boundaries overlapping after rollover) + Request updateIndexTemplateRequest = new Request("POST", "/_index_template/1"); + updateIndexTemplateRequest.setJsonEntity(Strings.format(TEMPLATE_NO_TIME_BOUNDARIES, dataStream, policy)); + assertOK(client().performRequest(updateIndexTemplateRequest)); + + // Manual rollover the original index such that it's not the write index in the data stream anymore + Request rolloverRequest = new Request("POST", "/" + dataStream + "/_rollover"); + rolloverRequest.setJsonEntity(""" + { + "conditions": { + "max_docs": "1" + } + }"""); + client().performRequest(rolloverRequest); + logger.info("rollover complete"); + } + + private void runQuery() throws Exception { + String rollup = waitAndGetRollupIndexName(); + assertFalse(rollup.isEmpty()); + + // Retry until the downsample index is populated. + assertBusy(() -> { + Request request = new Request("POST", "/" + dataStream + "/_search"); + var map = entityAsMap(client().performRequest(request)); + var hits = (List) ((Map) map.get("hits")).get("hits"); + assertEquals(4, hits.size()); + for (var hit : hits) { + assertEquals(rollup, ((Map) hit).get("_index")); + } + }, 30, TimeUnit.SECONDS); + } + + private String waitAndGetRollupIndexName() throws InterruptedException, IOException { + final String[] rollupIndexName = new String[1]; + waitUntil(() -> { + try { + rollupIndexName[0] = getRollupIndexName(); + return rollupIndexName[0] != null; + } catch (IOException e) { + return false; + } + }, 120, TimeUnit.SECONDS); + if (rollupIndexName[0] == null) { + logger.warn("--> rollup index name is NULL"); + } else { + logger.info("--> original index name is [{}], rollup index name is [{}]", index, rollupIndexName[0]); + } + return rollupIndexName[0]; + } + + private String getRollupIndexName() throws IOException { + String endpoint = "/downsample-" + FIXED_INTERVAL + "-" + index + "-*/?expand_wildcards=all"; + Response response = client().performRequest(new Request("GET", endpoint)); + Map asMap = responseAsMap(response); + if (asMap.size() == 1) { + return (String) asMap.keySet().toArray()[0]; + } + logger.warn("--> No matching rollup name for path [%s]", endpoint); + return null; + } + + public void testRollupIndex() throws Exception { + assumeTrue( + "Downsample got many stability improvements in 8.10.0", + oldClusterHasFeature(RestTestLegacyFeatures.TSDB_DOWNSAMPLING_STABLE) + ); + if (isOldCluster()) { + createIlmPolicy(); + createIndex(); + bulk(); + startDownsampling(); + } else if (isMixedCluster()) { + runQuery(); + } else if (isUpgradedCluster()) { + runQuery(); + } + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java index 88232bd7fd16c..198226536af42 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java @@ -79,6 +79,8 @@ public class RestTestLegacyFeatures implements FeatureSpecification { public static final NodeFeature TSDB_NEW_INDEX_FORMAT = new NodeFeature("indices.tsdb_new_format"); public static final NodeFeature TSDB_GENERALLY_AVAILABLE = new NodeFeature("indices.tsdb_supported"); + public static final NodeFeature TSDB_DOWNSAMPLING_STABLE = new NodeFeature("indices.tsdb_downsampling_stable"); + /* * A composable index template with no template defined in the body is mistakenly always assumed to not be a time series template. * Fixed in #98840 @@ -157,6 +159,7 @@ public Map getHistoricalFeatures() { entry(DESIRED_BALANCED_ALLOCATOR_SUPPORTED, Version.V_8_6_0), entry(DESIRED_BALANCED_ALLOCATOR_FIXED, Version.V_8_7_1), entry(TSDB_GENERALLY_AVAILABLE, Version.V_8_7_0), + entry(TSDB_DOWNSAMPLING_STABLE, Version.V_8_10_0), entry(TSDB_EMPTY_TEMPLATE_FIXED, Version.V_8_11_0), entry(INDEXING_SLOWLOG_LEVEL_SETTING_REMOVED, Version.V_8_0_0), entry(DEPRECATION_WARNINGS_LEAK_FIXED, Version.V_7_17_9), From 8dff32166f2bdb5b0b190cc5e9c3cd5b83aadee6 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Wed, 3 Apr 2024 15:46:03 +0200 Subject: [PATCH 085/264] Remove allowedFields argument from SearchExecutionContext constructor (#107021) allowedFields is a mutable instance member, that only ever gets set via its setter. It does not need to be provided via constructor. Removing a needless argument seems important given the number of other arguments already present. --- .../src/main/java/org/elasticsearch/index/IndexService.java | 1 - .../elasticsearch/index/query/CoordinatorRewriteContext.java | 1 - .../org/elasticsearch/index/query/QueryRewriteContext.java | 3 --- .../org/elasticsearch/index/query/SearchExecutionContext.java | 4 ---- .../java/org/elasticsearch/test/AbstractBuilderTestCase.java | 1 - 5 files changed, 10 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/IndexService.java b/server/src/main/java/org/elasticsearch/index/IndexService.java index 16a5d153a3c19..a3980599e0e1a 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexService.java +++ b/server/src/main/java/org/elasticsearch/index/IndexService.java @@ -715,7 +715,6 @@ public QueryRewriteContext newQueryRewriteContext( mapperService, mappingLookup, parseRuntimeMappings(runtimeMappings, mapperService, indexSettings, mappingLookup), - null, indexSettings, new Index( RemoteClusterAware.buildRemoteIndexName(clusterAlias, indexSettings.getIndex().getName()), diff --git a/server/src/main/java/org/elasticsearch/index/query/CoordinatorRewriteContext.java b/server/src/main/java/org/elasticsearch/index/query/CoordinatorRewriteContext.java index 2a1062f8876d2..91305af55dcf5 100644 --- a/server/src/main/java/org/elasticsearch/index/query/CoordinatorRewriteContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/CoordinatorRewriteContext.java @@ -50,7 +50,6 @@ public CoordinatorRewriteContext( null, null, null, - null, null ); this.indexLongFieldRange = indexLongFieldRange; diff --git a/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java b/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java index fd8d3794cf2d8..f314d4d08de5f 100644 --- a/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java @@ -68,7 +68,6 @@ public QueryRewriteContext( final MapperService mapperService, final MappingLookup mappingLookup, final Map runtimeMappings, - final Predicate allowedFields, final IndexSettings indexSettings, final Index fullyQualifiedIndex, final Predicate indexNameMatcher, @@ -85,7 +84,6 @@ public QueryRewriteContext( this.mappingLookup = Objects.requireNonNull(mappingLookup); this.allowUnmappedFields = indexSettings == null || indexSettings.isDefaultAllowUnmappedFields(); this.runtimeMappings = runtimeMappings; - this.allowedFields = allowedFields; this.indexSettings = indexSettings; this.fullyQualifiedIndex = fullyQualifiedIndex; this.indexNameMatcher = indexNameMatcher; @@ -109,7 +107,6 @@ public QueryRewriteContext(final XContentParserConfiguration parserConfiguration null, null, null, - null, null ); } diff --git a/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java b/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java index 638a04fb2e47d..cdd31f40dcfc6 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java @@ -196,7 +196,6 @@ public SearchExecutionContext( allowExpensiveQueries, valuesSourceRegistry, parseRuntimeMappings(runtimeMappings, mapperService, indexSettings, mappingLookup), - null, requestSize ); } @@ -222,7 +221,6 @@ public SearchExecutionContext(SearchExecutionContext source) { source.allowExpensiveQueries, source.getValuesSourceRegistry(), source.runtimeMappings, - source.allowedFields, source.requestSize ); } @@ -247,7 +245,6 @@ private SearchExecutionContext( BooleanSupplier allowExpensiveQueries, ValuesSourceRegistry valuesSourceRegistry, Map runtimeMappings, - Predicate allowedFields, Integer requestSize ) { super( @@ -257,7 +254,6 @@ private SearchExecutionContext( mapperService, mappingLookup, runtimeMappings, - allowedFields, indexSettings, fullyQualifiedIndex, indexNameMatcher, diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java index 76b836ba7e2a7..a5c0d4dbc6544 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java @@ -591,7 +591,6 @@ QueryRewriteContext createQueryRewriteContext() { mapperService, mapperService.mappingLookup(), emptyMap(), - null, idxSettings, new Index( RemoteClusterAware.buildRemoteIndexName(null, idxSettings.getIndex().getName()), From b97e2d61fba1409abfdeb8f2b992d731c10ad773 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Wed, 3 Apr 2024 09:46:32 -0400 Subject: [PATCH 086/264] ESQL: Fixup docs for LOG and LOG10 (#106963) This merges all of the hand written docs for `LOG` and `LOG10` into the annotations which updates the `META FUNCTIONS` - now it'll always be the same as the docs. This also deletes the hand maintained docs and let's the documentation generation process rebuild it. --- .../esql/functions/description/log.asciidoc | 2 +- .../esql/functions/description/log10.asciidoc | 2 +- .../esql/functions/examples/log.asciidoc | 21 +++++++++ .../esql/functions/examples/log10.asciidoc | 13 ++++++ .../esql/functions/layout/log.asciidoc | 1 + .../esql/functions/layout/log10.asciidoc | 1 + docs/reference/esql/functions/log.asciidoc | 46 ------------------- docs/reference/esql/functions/log10.asciidoc | 31 ------------- .../esql/functions/math-functions.asciidoc | 4 +- .../esql/functions/parameters/log.asciidoc | 4 +- .../esql/functions/parameters/log10.asciidoc | 2 +- .../src/main/resources/meta.csv-spec | 8 ++-- .../expression/function/scalar/math/Log.java | 24 ++++++++-- .../function/scalar/math/Log10.java | 19 +++++++- 14 files changed, 85 insertions(+), 93 deletions(-) create mode 100644 docs/reference/esql/functions/examples/log.asciidoc create mode 100644 docs/reference/esql/functions/examples/log10.asciidoc delete mode 100644 docs/reference/esql/functions/log.asciidoc delete mode 100644 docs/reference/esql/functions/log10.asciidoc diff --git a/docs/reference/esql/functions/description/log.asciidoc b/docs/reference/esql/functions/description/log.asciidoc index 9e88e2fa90621..a761cc551e6fa 100644 --- a/docs/reference/esql/functions/description/log.asciidoc +++ b/docs/reference/esql/functions/description/log.asciidoc @@ -2,4 +2,4 @@ *Description* -Returns the logarithm of a number to a base. +Returns the logarithm of a value to a base. The input can be any numeric value, the return value is always a double. Logs of zero, negative numbers, and base of one return `null` as well as a warning. diff --git a/docs/reference/esql/functions/description/log10.asciidoc b/docs/reference/esql/functions/description/log10.asciidoc index fd5d9ce16ee5b..2725cd93df226 100644 --- a/docs/reference/esql/functions/description/log10.asciidoc +++ b/docs/reference/esql/functions/description/log10.asciidoc @@ -2,4 +2,4 @@ *Description* -Returns the log base 10. +Returns the logarithm of a value to base 10. The input can be any numeric value, the return value is always a double. Logs of 0 and negative numbers return `null` as well as a warning. diff --git a/docs/reference/esql/functions/examples/log.asciidoc b/docs/reference/esql/functions/examples/log.asciidoc new file mode 100644 index 0000000000000..a77aa6af5df7b --- /dev/null +++ b/docs/reference/esql/functions/examples/log.asciidoc @@ -0,0 +1,21 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Examples* + +[source.merge.styled,esql] +---- +include::{esql-specs}/math.csv-spec[tag=log] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/math.csv-spec[tag=log-result] +|=== +[source.merge.styled,esql] +---- +include::{esql-specs}/math.csv-spec[tag=logUnary] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/math.csv-spec[tag=logUnary-result] +|=== + diff --git a/docs/reference/esql/functions/examples/log10.asciidoc b/docs/reference/esql/functions/examples/log10.asciidoc new file mode 100644 index 0000000000000..1e6cc0c67219b --- /dev/null +++ b/docs/reference/esql/functions/examples/log10.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/math.csv-spec[tag=log10] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/math.csv-spec[tag=log10-result] +|=== + diff --git a/docs/reference/esql/functions/layout/log.asciidoc b/docs/reference/esql/functions/layout/log.asciidoc index d5ce98c524421..2cefb5ffa551e 100644 --- a/docs/reference/esql/functions/layout/log.asciidoc +++ b/docs/reference/esql/functions/layout/log.asciidoc @@ -12,3 +12,4 @@ image::esql/functions/signature/log.svg[Embedded,opts=inline] include::../parameters/log.asciidoc[] include::../description/log.asciidoc[] include::../types/log.asciidoc[] +include::../examples/log.asciidoc[] diff --git a/docs/reference/esql/functions/layout/log10.asciidoc b/docs/reference/esql/functions/layout/log10.asciidoc index 3de3008e5b91a..e1f3e9f330ed6 100644 --- a/docs/reference/esql/functions/layout/log10.asciidoc +++ b/docs/reference/esql/functions/layout/log10.asciidoc @@ -12,3 +12,4 @@ image::esql/functions/signature/log10.svg[Embedded,opts=inline] include::../parameters/log10.asciidoc[] include::../description/log10.asciidoc[] include::../types/log10.asciidoc[] +include::../examples/log10.asciidoc[] diff --git a/docs/reference/esql/functions/log.asciidoc b/docs/reference/esql/functions/log.asciidoc deleted file mode 100644 index b1470e50f2881..0000000000000 --- a/docs/reference/esql/functions/log.asciidoc +++ /dev/null @@ -1,46 +0,0 @@ -[discrete] -[[esql-log]] -=== `LOG` - -*Syntax* - -[source,esql] ----- -LOG([base,] value) ----- - -*Parameters* - -`base`:: -Numeric expression. If `null`, the function returns `null`. The base is an optional input parameter. If a base is not provided, this function returns the natural logarithm (base e) of a value. - -`value`:: -Numeric expression. If `null`, the function returns `null`. - -*Description* - -Returns the logarithm of a value to a base. The input can be any numeric value, the return value is always a double. - -Logs of zero, negative numbers, infinites and base of one return `null` as well as a warning. - -include::types/log.asciidoc[] - -*Example* - -[source.merge.styled,esql] ----- -include::{esql-specs}/math.csv-spec[tag=log] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/math.csv-spec[tag=log-result] -|=== - -[source.merge.styled,esql] ----- -include::{esql-specs}/math.csv-spec[tag=logUnary] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/math.csv-spec[tag=logUnary-result] -|=== diff --git a/docs/reference/esql/functions/log10.asciidoc b/docs/reference/esql/functions/log10.asciidoc deleted file mode 100644 index f0c16f3e4769e..0000000000000 --- a/docs/reference/esql/functions/log10.asciidoc +++ /dev/null @@ -1,31 +0,0 @@ -[discrete] -[[esql-log10]] -=== `LOG10` - -*Syntax* - -[.text-center] -image::esql/functions/signature/log10.svg[Embedded,opts=inline] - -`n`:: -Numeric expression. If `null`, the function returns `null`. - -*Description* - -Returns the logarithm to base 10. The input can be any numeric value, the return -value is always a double. - -Logs of 0, negative numbers, and infinites return `null` as well as a warning. - -include::types/log10.asciidoc[] - -*Example* - -[source.merge.styled,esql] ----- -include::{esql-specs}/math.csv-spec[tag=log10] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/math.csv-spec[tag=log10-result] -|=== diff --git a/docs/reference/esql/functions/math-functions.asciidoc b/docs/reference/esql/functions/math-functions.asciidoc index 28830554198d2..8748b35443e8e 100644 --- a/docs/reference/esql/functions/math-functions.asciidoc +++ b/docs/reference/esql/functions/math-functions.asciidoc @@ -41,8 +41,8 @@ include::layout/cos.asciidoc[] include::layout/cosh.asciidoc[] include::e.asciidoc[] include::floor.asciidoc[] -include::log.asciidoc[] -include::log10.asciidoc[] +include::layout/log.asciidoc[] +include::layout/log10.asciidoc[] include::pi.asciidoc[] include::pow.asciidoc[] include::round.asciidoc[] diff --git a/docs/reference/esql/functions/parameters/log.asciidoc b/docs/reference/esql/functions/parameters/log.asciidoc index 3591efb47a9bd..cf6a439f68da6 100644 --- a/docs/reference/esql/functions/parameters/log.asciidoc +++ b/docs/reference/esql/functions/parameters/log.asciidoc @@ -3,7 +3,7 @@ *Parameters* `base`:: - +Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value. `number`:: - +Numeric expression. If `null`, the function returns `null`. diff --git a/docs/reference/esql/functions/parameters/log10.asciidoc b/docs/reference/esql/functions/parameters/log10.asciidoc index 91c56709d182a..65013f4c21265 100644 --- a/docs/reference/esql/functions/parameters/log10.asciidoc +++ b/docs/reference/esql/functions/parameters/log10.asciidoc @@ -3,4 +3,4 @@ *Parameters* `number`:: - +Numeric expression. If `null`, the function returns `null`. diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec index c72feaccfc622..524de7c2c3b67 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -137,8 +137,8 @@ greatest |first |"integer|long|double|boolean least |first |"integer|long|double|boolean|keyword|text|ip|version" |[""] left |[string, length] |["keyword|text", integer] |[The string from which to return a substring., The number of characters to return.] length |string |"keyword|text" |[""] -log |[base, number] |["integer|unsigned_long|long|double", "integer|unsigned_long|long|double"] |[, ] -log10 |number |"double|integer|long|unsigned_long" |[""] +log |[base, number] |["integer|unsigned_long|long|double", "integer|unsigned_long|long|double"] |["Base of logarithm. If `null`\, the function returns `null`. If not provided\, this function returns the natural logarithm (base e) of a value.", "Numeric expression. If `null`\, the function returns `null`."] +log10 |number |"double|integer|long|unsigned_long" |Numeric expression. If `null`, the function returns `null`. ltrim |string |"keyword|text" |[""] max |number |"double|integer|long" |[""] median |number |"double|integer|long" |[""] @@ -245,8 +245,8 @@ greatest |Returns the maximum value from many columns. least |Returns the minimum value from many columns. left |Returns the substring that extracts 'length' chars from 'string' starting from the left. length |Returns the character length of a string. -log |Returns the logarithm of a number to a base. -log10 |Returns the log base 10. +log |Returns the logarithm of a value to a base. The input can be any numeric value, the return value is always a double. Logs of zero, negative numbers, and base of one return `null` as well as a warning. +log10 |Returns the logarithm of a value to base 10. The input can be any numeric value, the return value is always a double. Logs of 0 and negative numbers return `null` as well as a warning. ltrim |Removes leading whitespaces from a string. max |The maximum value of a numeric field. median |The value that is greater than half of all values and less than half of all values. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log.java index f434437e12d5b..cf6cfa5525dc6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log.java @@ -9,6 +9,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; @@ -32,11 +33,28 @@ public class Log extends EsqlScalarFunction implements OptionalArgument { private final Expression base, value; - @FunctionInfo(returnType = "double", description = "Returns the logarithm of a number to a base.") + @FunctionInfo( + returnType = "double", + description = "Returns the logarithm of a value to a base. The input can be any numeric value, " + + "the return value is always a double.\n" + + "\n" + + "Logs of zero, negative numbers, and base of one return `null` as well as a warning.", + examples = { @Example(file = "math", tag = "log"), @Example(file = "math", tag = "logUnary") } + ) public Log( Source source, - @Param(name = "base", type = { "integer", "unsigned_long", "long", "double" }, optional = true) Expression base, - @Param(name = "number", type = { "integer", "unsigned_long", "long", "double" }) Expression value + @Param( + name = "base", + type = { "integer", "unsigned_long", "long", "double" }, + description = "Base of logarithm. If `null`, the function returns `null`. " + + "If not provided, this function returns the natural logarithm (base e) of a value.", + optional = true + ) Expression base, + @Param( + name = "number", + type = { "integer", "unsigned_long", "long", "double" }, + description = "Numeric expression. If `null`, the function returns `null`." + ) Expression value ) { super(source, value != null ? Arrays.asList(base, value) : Arrays.asList(base)); this.value = value != null ? value : base; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java index 46df37c685cf7..ab109c8c95bd3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; @@ -28,8 +29,22 @@ import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isNumeric; public class Log10 extends UnaryScalarFunction { - @FunctionInfo(returnType = "double", description = "Returns the log base 10.") - public Log10(Source source, @Param(name = "number", type = { "double", "integer", "long", "unsigned_long" }) Expression n) { + @FunctionInfo( + returnType = "double", + description = "Returns the logarithm of a value to base 10. The input can " + + "be any numeric value, the return value is always a double.\n" + + "\n" + + "Logs of 0 and negative numbers return `null` as well as a warning.", + examples = @Example(file = "math", tag = "log10") + ) + public Log10( + Source source, + @Param( + name = "number", + type = { "double", "integer", "long", "unsigned_long" }, + description = "Numeric expression. If `null`, the function returns `null`." + ) Expression n + ) { super(source, n); } From ea9e6a95376bd3f3af1873959fbd0dcb31ed6555 Mon Sep 17 00:00:00 2001 From: shainaraskas <58563081+shainaraskas@users.noreply.github.com> Date: Wed, 3 Apr 2024 09:48:31 -0400 Subject: [PATCH 087/264] Clarify closed index setting warning (#106888) --- docs/reference/index-modules.asciidoc | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/docs/reference/index-modules.asciidoc b/docs/reference/index-modules.asciidoc index 31fe747feb63b..1b6914e946c82 100644 --- a/docs/reference/index-modules.asciidoc +++ b/docs/reference/index-modules.asciidoc @@ -7,7 +7,7 @@ an index. [discrete] [[index-modules-settings]] -== Index Settings +== Index settings [[index-modules-settings-description]] // tag::index-modules-settings-description-tag[] @@ -27,9 +27,7 @@ They can be changed on a live index using the <> API. // end::index-modules-settings-description-tag[] -WARNING: Changing static or dynamic index settings on a closed index could -result in incorrect settings that are impossible to rectify without deleting -and recreating the index. +CAUTION: You can change any documented index settings on closed indices. However, changing undocumented index settings on closed indices is unsupported and might result in errors. [discrete] === Static index settings From 206a0b7a4cf1236e01556fe2c5aa44060d3a08a6 Mon Sep 17 00:00:00 2001 From: shainaraskas <58563081+shainaraskas@users.noreply.github.com> Date: Wed, 3 Apr 2024 09:54:53 -0400 Subject: [PATCH 088/264] [DOCS] Remove obsolete accounting circuit breakers (#107015) --- .../modules/indices/circuit_breaker.asciidoc | 18 ------------------ 1 file changed, 18 deletions(-) diff --git a/docs/reference/modules/indices/circuit_breaker.asciidoc b/docs/reference/modules/indices/circuit_breaker.asciidoc index caa6c43d45a9a..a5a787e23d170 100644 --- a/docs/reference/modules/indices/circuit_breaker.asciidoc +++ b/docs/reference/modules/indices/circuit_breaker.asciidoc @@ -97,24 +97,6 @@ also as a structured object which is reflected by default overhead. A constant that all in flight requests estimations are multiplied with to determine a final estimation. Defaults to 2. -[[accounting-circuit-breaker]] -[discrete] -==== Accounting requests circuit breaker - -The accounting circuit breaker allows Elasticsearch to limit the memory -usage of things held in memory that are not released when a request is -completed. This includes things like the Lucene segment memory. - -`indices.breaker.accounting.limit`:: - (<>) - Limit for accounting breaker, defaults to 100% of JVM heap. This means that it is bound - by the limit configured for the parent circuit breaker. - -`indices.breaker.accounting.overhead`:: - (<>) - A constant that all accounting estimations are multiplied with to determine a - final estimation. Defaults to 1 - [[script-compilation-circuit-breaker]] [discrete] ==== Script compilation circuit breaker From 333c6a6e6d285144c5359e7c91579ab63dc82dcb Mon Sep 17 00:00:00 2001 From: Salvatore Campagna <93581129+salvatore-campagna@users.noreply.github.com> Date: Wed, 3 Apr 2024 15:58:57 +0200 Subject: [PATCH 089/264] Replace UnsupportedOperationException with IllegalArgumentException (#107038) This makes sure the Rest status we return is a 4xx instead of a 5xx, which means the error does not map anymore to "Service Unavailable". --- docs/changelog/107038.yaml | 5 +++++ .../java/org/elasticsearch/rest/action/cat/RestTable.java | 4 +--- .../org/elasticsearch/rest/action/cat/RestTableTests.java | 2 +- 3 files changed, 7 insertions(+), 4 deletions(-) create mode 100644 docs/changelog/107038.yaml diff --git a/docs/changelog/107038.yaml b/docs/changelog/107038.yaml new file mode 100644 index 0000000000000..e00b0d45a8a3a --- /dev/null +++ b/docs/changelog/107038.yaml @@ -0,0 +1,5 @@ +pr: 107038 +summary: Replace `UnsupportedOperationException` with `IllegalArgumentException` for non-existing columns +area: Search +type: bug +issues: [] diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestTable.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestTable.java index cfe5d6d2aef39..5999d1b81da47 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestTable.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestTable.java @@ -156,9 +156,7 @@ static List getRowOrder(Table table, RestRequest request) { if (headerAliasMap.containsKey(columnHeader)) { ordering.add(new ColumnOrderElement(headerAliasMap.get(columnHeader), reverse)); } else { - throw new UnsupportedOperationException( - String.format(Locale.ROOT, "Unable to sort by unknown sort key `%s`", columnHeader) - ); + throw new IllegalArgumentException(String.format(Locale.ROOT, "Unable to sort by unknown sort key `%s`", columnHeader)); } } Collections.sort(rowOrder, new TableIndexComparator(table, ordering)); diff --git a/server/src/test/java/org/elasticsearch/rest/action/cat/RestTableTests.java b/server/src/test/java/org/elasticsearch/rest/action/cat/RestTableTests.java index 1ec180fdaad77..dff6b52e470df 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/cat/RestTableTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/cat/RestTableTests.java @@ -200,7 +200,7 @@ public void testUnknownHeader() { table.addCell("compare"); table.endHeaders(); restRequest.params().put("s", "notaheader"); - Exception e = expectThrows(UnsupportedOperationException.class, () -> RestTable.getRowOrder(table, restRequest)); + Exception e = expectThrows(IllegalArgumentException.class, () -> RestTable.getRowOrder(table, restRequest)); assertEquals("Unable to sort by unknown sort key `notaheader`", e.getMessage()); } From 377c7e568f42cae45c569ff014db212aa45fcc94 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 3 Apr 2024 16:01:50 +0200 Subject: [PATCH 090/264] Implement time series grouping via HashAggregationOperator using a specialized block hash implementation (#106127) This change adds new operator factory that performs grouping by `_tsid` and `@timestamp` field. The new operator factory delegates to the `HashAggregationOperator` do the grouping, but does make use of a specialized block hash (which makes a few assumptions about the group fields). This is an initial implementation that is not optimize for 'local' grouping. The new time series grouping operator factory isn't usable from API and is only usable from tests. --- .../aggregation/blockhash/BlockHash.java | 2 +- .../blockhash/TimeSeriesBlockHash.java | 130 ++++++ .../compute/lucene/LuceneSliceQueue.java | 4 + ...TimeSeriesSortedSourceOperatorFactory.java | 98 +++-- .../TimeSeriesAggregationOperatorFactory.java | 48 +++ .../TimeSeriesSortedSourceOperatorTests.java | 261 +++-------- .../ValuesSourceReaderOperatorTests.java | 2 +- .../TimeSeriesAggregationOperatorTests.java | 404 ++++++++++++++++++ .../plan/physical/EsTimeseriesQueryExec.java | 4 +- .../planner/EsPhysicalOperationProviders.java | 2 + 10 files changed, 722 insertions(+), 233 deletions(-) create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/TimeSeriesBlockHash.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactory.java create mode 100644 x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BlockHash.java index 93cd3a6b9326a..b43e2ac767b0b 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BlockHash.java @@ -33,7 +33,7 @@ */ public abstract sealed class BlockHash implements Releasable, SeenGroupIds // permits BooleanBlockHash, BytesRefBlockHash, DoubleBlockHash, IntBlockHash, LongBlockHash,// - NullBlockHash, PackedValuesBlockHash, BytesRefLongBlockHash, LongLongBlockHash { + NullBlockHash, PackedValuesBlockHash, BytesRefLongBlockHash, LongLongBlockHash, TimeSeriesBlockHash { protected final BlockFactory blockFactory; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/TimeSeriesBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/TimeSeriesBlockHash.java new file mode 100644 index 0000000000000..a3d2bcae73df9 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/TimeSeriesBlockHash.java @@ -0,0 +1,130 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.blockhash; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.BitArray; +import org.elasticsearch.common.util.BytesRefHash; +import org.elasticsearch.common.util.LongLongHash; +import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; +import org.elasticsearch.compute.aggregation.SeenGroupIds; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.core.Releasables; + +import java.util.Objects; + +public final class TimeSeriesBlockHash extends BlockHash { + + private final int tsHashChannel; + private final int timestampIntervalChannel; + private final BytesRefHash tsidHashes; + private final LongLongHash intervalHash; + + long groupOrdinal = -1; + BytesRef previousTsidHash; + long previousTimestampInterval; + + public TimeSeriesBlockHash(int tsHashChannel, int timestampIntervalChannel, DriverContext driverContext) { + super(driverContext.blockFactory()); + this.tsHashChannel = tsHashChannel; + this.timestampIntervalChannel = timestampIntervalChannel; + this.tsidHashes = new BytesRefHash(1, blockFactory.bigArrays()); + this.intervalHash = new LongLongHash(1, blockFactory.bigArrays()); + } + + @Override + public void close() { + Releasables.close(tsidHashes, intervalHash); + } + + @Override + public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { + BytesRefBlock tsHashBlock = page.getBlock(tsHashChannel); + BytesRefVector tsHashVector = Objects.requireNonNull(tsHashBlock.asVector()); + try (var ordsBuilder = blockFactory.newIntVectorBuilder(tsHashVector.getPositionCount())) { + LongBlock timestampIntervalBlock = page.getBlock(timestampIntervalChannel); + BytesRef spare = new BytesRef(); + for (int i = 0; i < tsHashVector.getPositionCount(); i++) { + BytesRef tsHash = tsHashVector.getBytesRef(i, spare); + long timestampInterval = timestampIntervalBlock.getLong(i); + // Optimization that relies on the fact that blocks are sorted by tsid hash and timestamp + if (tsHash.equals(previousTsidHash) == false || timestampInterval != previousTimestampInterval) { + long tsidOrdinal = tsidHashes.add(tsHash); + if (tsidOrdinal < 0) { + tsidOrdinal = -1 - tsidOrdinal; + } + groupOrdinal = intervalHash.add(tsidOrdinal, timestampInterval); + if (groupOrdinal < 0) { + groupOrdinal = -1 - groupOrdinal; + } + previousTsidHash = BytesRef.deepCopyOf(tsHash); + previousTimestampInterval = timestampInterval; + } + ordsBuilder.appendInt(Math.toIntExact(groupOrdinal)); + } + try (var ords = ordsBuilder.build()) { + addInput.add(0, ords); + } + } + } + + @Override + public Block[] getKeys() { + int positions = (int) intervalHash.size(); + BytesRefVector tsidHashes = null; + LongVector timestampIntervals = null; + try ( + BytesRefVector.Builder tsidHashesBuilder = blockFactory.newBytesRefVectorBuilder(positions); + LongVector.Builder timestampIntervalsBuilder = blockFactory.newLongVectorFixedBuilder(positions) + ) { + BytesRef scratch = new BytesRef(); + for (long i = 0; i < positions; i++) { + BytesRef key1 = this.tsidHashes.get(intervalHash.getKey1(i), scratch); + tsidHashesBuilder.appendBytesRef(key1); + timestampIntervalsBuilder.appendLong(intervalHash.getKey2(i)); + } + tsidHashes = tsidHashesBuilder.build(); + timestampIntervals = timestampIntervalsBuilder.build(); + } finally { + if (timestampIntervals == null) { + Releasables.closeExpectNoException(tsidHashes); + } + } + return new Block[] { tsidHashes.asBlock(), timestampIntervals.asBlock() }; + } + + @Override + public IntVector nonEmpty() { + long endExclusive = intervalHash.size(); + return IntVector.range(0, Math.toIntExact(endExclusive), blockFactory); + } + + @Override + public BitArray seenGroupIds(BigArrays bigArrays) { + long size = intervalHash.size(); + return new SeenGroupIds.Range(0, Math.toIntExact(size)).seenGroupIds(bigArrays); + } + + public String toString() { + return "TimeSeriesBlockHash{keys=[BytesRefKey[channel=" + + tsHashChannel + + "], LongKey[channel=" + + timestampIntervalChannel + + "]], entries=" + + groupOrdinal + + "b}"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java index f367499b12902..f3bcdc7593dab 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java @@ -46,6 +46,10 @@ public int totalSlices() { return totalSlices; } + public Iterable getSlices() { + return slices; + } + public static LuceneSliceQueue create( List contexts, Function weightFunction, diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorFactory.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorFactory.java index 855066fcb9da5..58f2c8de67b61 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorFactory.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorFactory.java @@ -7,6 +7,7 @@ package org.elasticsearch.compute.lucene; +import org.apache.lucene.document.LongPoint; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedNumericDocValues; @@ -16,7 +17,9 @@ import org.apache.lucene.search.Weight; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.PriorityQueue; +import org.elasticsearch.common.Rounding; import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.DocVector; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; @@ -25,6 +28,8 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.SourceOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.mapper.DataStreamTimestampFieldMapper; import java.io.IOException; import java.io.UncheckedIOException; @@ -43,13 +48,18 @@ * This operator currently only supports shard level concurrency. A new concurrency mechanism should be introduced at the time serie level * in order to read tsdb indices in parallel. */ -public record TimeSeriesSortedSourceOperatorFactory(int limit, int maxPageSize, int taskConcurrency, LuceneSliceQueue sliceQueue) - implements - LuceneOperator.Factory { +public record TimeSeriesSortedSourceOperatorFactory( + int limit, + int maxPageSize, + int taskConcurrency, + TimeValue timeSeriesPeriod, + LuceneSliceQueue sliceQueue +) implements LuceneOperator.Factory { @Override public SourceOperator get(DriverContext driverContext) { - return new Impl(driverContext.blockFactory(), sliceQueue, maxPageSize, limit); + var rounding = timeSeriesPeriod.equals(TimeValue.ZERO) == false ? Rounding.builder(timeSeriesPeriod).build() : null; + return new Impl(driverContext.blockFactory(), sliceQueue, maxPageSize, limit, rounding); } @Override @@ -66,13 +76,14 @@ public static TimeSeriesSortedSourceOperatorFactory create( int limit, int maxPageSize, int taskConcurrency, + TimeValue timeSeriesPeriod, List searchContexts, Function queryFunction ) { var weightFunction = LuceneOperator.weightFunction(queryFunction, ScoreMode.COMPLETE_NO_SCORES); var sliceQueue = LuceneSliceQueue.create(searchContexts, weightFunction, DataPartitioning.SHARD, taskConcurrency); taskConcurrency = Math.min(sliceQueue.totalSlices(), taskConcurrency); - return new TimeSeriesSortedSourceOperatorFactory(limit, maxPageSize, taskConcurrency, sliceQueue); + return new TimeSeriesSortedSourceOperatorFactory(limit, maxPageSize, taskConcurrency, timeSeriesPeriod, sliceQueue); } static final class Impl extends SourceOperator { @@ -80,26 +91,49 @@ static final class Impl extends SourceOperator { private final int maxPageSize; private final BlockFactory blockFactory; private final LuceneSliceQueue sliceQueue; + private final Rounding.Prepared rounding; private int currentPagePos = 0; private int remainingDocs; private boolean doneCollecting; private IntVector.Builder docsBuilder; private IntVector.Builder segmentsBuilder; - private LongVector.Builder timestampIntervalBuilder; - // TODO: handle when a time series spans across backing indices - // In that case we need to bytes representation of the tsid - private IntVector.Builder tsOrdBuilder; + private LongVector.Builder timestampsBuilder; + private LongVector.Builder intervalsBuilder; + // TODO: add an ordinal block for tsid hashes + // (This allows for efficiently grouping by tsid locally, no need to use bytes representation of tsid hash) + private BytesRefVector.Builder tsHashesBuilder; private TimeSeriesIterator iterator; - Impl(BlockFactory blockFactory, LuceneSliceQueue sliceQueue, int maxPageSize, int limit) { + Impl(BlockFactory blockFactory, LuceneSliceQueue sliceQueue, int maxPageSize, int limit, Rounding rounding) { this.maxPageSize = maxPageSize; this.blockFactory = blockFactory; this.remainingDocs = limit; this.docsBuilder = blockFactory.newIntVectorBuilder(Math.min(limit, maxPageSize)); this.segmentsBuilder = null; - this.timestampIntervalBuilder = blockFactory.newLongVectorBuilder(Math.min(limit, maxPageSize)); - this.tsOrdBuilder = blockFactory.newIntVectorBuilder(Math.min(limit, maxPageSize)); + this.timestampsBuilder = blockFactory.newLongVectorBuilder(Math.min(limit, maxPageSize)); + this.tsHashesBuilder = blockFactory.newBytesRefVectorBuilder(Math.min(limit, maxPageSize)); this.sliceQueue = sliceQueue; + if (rounding != null) { + try { + long minTimestamp = Long.MAX_VALUE; + long maxTimestamp = Long.MIN_VALUE; + for (var slice : sliceQueue.getSlices()) { + for (var leaf : slice.leaves()) { + var pointValues = leaf.leafReaderContext().reader().getPointValues(DataStreamTimestampFieldMapper.DEFAULT_PATH); + long segmentMin = LongPoint.decodeDimension(pointValues.getMinPackedValue(), 0); + minTimestamp = Math.min(segmentMin, minTimestamp); + long segmentMax = LongPoint.decodeDimension(pointValues.getMaxPackedValue(), 0); + maxTimestamp = Math.max(segmentMax, maxTimestamp); + } + } + this.rounding = rounding.prepare(minTimestamp, maxTimestamp); + this.intervalsBuilder = blockFactory.newLongVectorBuilder(Math.min(limit, maxPageSize)); + } catch (IOException ioe) { + throw new UncheckedIOException(ioe); + } + } else { + this.rounding = null; + } } @Override @@ -127,8 +161,9 @@ public Page getOutput() { IntBlock shard = null; IntVector leaf = null; IntVector docs = null; - LongVector timestampIntervals = null; - IntVector tsids = null; + LongVector timestamps = null; + LongVector intervals = null; + BytesRefVector tsids = null; try { if (iterator == null) { var slice = sliceQueue.nextSlice(); @@ -154,15 +189,22 @@ public Page getOutput() { docs = docsBuilder.build(); docsBuilder = blockFactory.newIntVectorBuilder(Math.min(remainingDocs, maxPageSize)); - timestampIntervals = timestampIntervalBuilder.build(); - timestampIntervalBuilder = blockFactory.newLongVectorBuilder(Math.min(remainingDocs, maxPageSize)); - tsids = tsOrdBuilder.build(); - tsOrdBuilder = blockFactory.newIntVectorBuilder(Math.min(remainingDocs, maxPageSize)); + timestamps = timestampsBuilder.build(); + timestampsBuilder = blockFactory.newLongVectorBuilder(Math.min(remainingDocs, maxPageSize)); + if (rounding != null) { + intervals = intervalsBuilder.build(); + intervalsBuilder = blockFactory.newLongVectorBuilder(Math.min(remainingDocs, maxPageSize)); + } else { + intervals = blockFactory.newConstantLongVector(0, timestamps.getPositionCount()); + } + tsids = tsHashesBuilder.build(); + tsHashesBuilder = blockFactory.newBytesRefVectorBuilder(Math.min(remainingDocs, maxPageSize)); page = new Page( currentPagePos, new DocVector(shard.asVector(), leaf, docs, leaf.isConstant()).asBlock(), tsids.asBlock(), - timestampIntervals.asBlock() + timestamps.asBlock(), + intervals.asBlock() ); currentPagePos = 0; @@ -173,7 +215,7 @@ public Page getOutput() { throw new UncheckedIOException(e); } finally { if (page == null) { - Releasables.closeExpectNoException(shard, leaf, docs, timestampIntervals, tsids); + Releasables.closeExpectNoException(shard, leaf, docs, timestamps, tsids, intervals); } } return page; @@ -181,7 +223,7 @@ public Page getOutput() { @Override public void close() { - Releasables.closeExpectNoException(docsBuilder, segmentsBuilder, timestampIntervalBuilder, tsOrdBuilder); + Releasables.closeExpectNoException(docsBuilder, segmentsBuilder, timestampsBuilder, intervalsBuilder, tsHashesBuilder); } class TimeSeriesIterator { @@ -236,8 +278,11 @@ void consume() throws IOException { Leaf leaf = queue.top(); segmentsBuilder.appendInt(leaf.segmentOrd); docsBuilder.appendInt(leaf.iterator.docID()); - timestampIntervalBuilder.appendLong(leaf.timestamp); - tsOrdBuilder.appendInt(globalTsidOrd); + timestampsBuilder.appendLong(leaf.timestamp); + if (rounding != null) { + intervalsBuilder.appendLong(rounding.round(leaf.timestamp)); + } + tsHashesBuilder.appendBytesRef(currentTsid); final Leaf newTop; if (leaf.nextDoc()) { // TODO: updating the top is one of the most expensive parts of this operation. @@ -257,8 +302,11 @@ void consume() throws IOException { // Only one segment, so no need to use priority queue and use segment ordinals as tsid ord. leaf.reinitializeIfNeeded(Thread.currentThread()); while (leaf.nextDoc()) { - tsOrdBuilder.appendInt(leaf.timeSeriesHashOrd); - timestampIntervalBuilder.appendLong(leaf.timestamp); + tsHashesBuilder.appendBytesRef(leaf.timeSeriesHash); + timestampsBuilder.appendLong(leaf.timestamp); + if (rounding != null) { + intervalsBuilder.appendLong(rounding.round(leaf.timestamp)); + } // Don't append segment ord, because there is only one segment. docsBuilder.appendInt(leaf.iterator.docID()); currentPagePos++; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactory.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactory.java new file mode 100644 index 0000000000000..0cf0854a9b0c7 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactory.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator; + +import org.elasticsearch.compute.aggregation.AggregatorMode; +import org.elasticsearch.compute.aggregation.GroupingAggregator; +import org.elasticsearch.compute.aggregation.blockhash.BlockHash; +import org.elasticsearch.compute.aggregation.blockhash.TimeSeriesBlockHash; +import org.elasticsearch.core.TimeValue; + +import java.util.List; + +public record TimeSeriesAggregationOperatorFactory( + AggregatorMode mode, + int tsHashChannel, + int timestampIntervalChannel, + TimeValue timeSeriesPeriod, + List aggregators, + int maxPageSize +) implements Operator.OperatorFactory { + + @Override + public String describe() { + return "TimeSeriesAggregationOperator[mode=" + + mode + + ", tsHashChannel = " + + tsHashChannel + + ", timestampIntervalChannel = " + + timestampIntervalChannel + + ", timeSeriesPeriod = " + + timeSeriesPeriod + + ", maxPageSize = " + + maxPageSize + + "]"; + } + + @Override + public Operator get(DriverContext driverContext) { + BlockHash blockHash = new TimeSeriesBlockHash(tsHashChannel, timestampIntervalChannel, driverContext); + return new HashAggregationOperator(aggregators, () -> blockHash, driverContext); + } + +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java index b397d36837d01..ab050bcb03c7d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java @@ -26,34 +26,24 @@ import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Randomness; -import org.elasticsearch.compute.aggregation.AggregatorMode; -import org.elasticsearch.compute.aggregation.RateLongAggregatorFunctionSupplier; -import org.elasticsearch.compute.aggregation.blockhash.BlockHash; -import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.DocVector; -import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.AnyOperatorTestCase; import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.compute.operator.HashAggregationOperator; import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.compute.operator.OperatorTestCase; -import org.elasticsearch.compute.operator.OrdinalsGroupingOperator; import org.elasticsearch.compute.operator.TestResultPageSinkOperator; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.index.mapper.BlockDocValuesReader; import org.elasticsearch.index.mapper.DataStreamTimestampFieldMapper; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper; -import org.elasticsearch.index.mapper.SourceLoader; import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper; import org.junit.After; @@ -65,9 +55,9 @@ import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.function.Consumer; import java.util.function.Function; -import static org.elasticsearch.index.mapper.DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.lessThanOrEqualTo; @@ -91,12 +81,12 @@ public void testSimple() { // for now we emit at most one time series each page int offset = 0; for (Page page : results) { - assertThat(page.getBlockCount(), equalTo(5)); + assertThat(page.getBlockCount(), equalTo(6)); DocVector docVector = (DocVector) page.getBlock(0).asVector(); - IntVector tsidVector = (IntVector) page.getBlock(1).asVector(); + BytesRefVector tsidVector = (BytesRefVector) page.getBlock(1).asVector(); LongVector timestampVector = (LongVector) page.getBlock(2).asVector(); - LongVector voltageVector = (LongVector) page.getBlock(3).asVector(); - BytesRefVector hostnameVector = (BytesRefVector) page.getBlock(4).asVector(); + LongVector voltageVector = (LongVector) page.getBlock(4).asVector(); + BytesRefVector hostnameVector = (BytesRefVector) page.getBlock(5).asVector(); for (int i = 0; i < page.getPositionCount(); i++) { int expectedTsidOrd = offset / numSamplesPerTS; String expectedHostname = String.format(Locale.ROOT, "host-%02d", expectedTsidOrd); @@ -106,7 +96,7 @@ public void testSimple() { assertThat(docVector.shards().getInt(i), equalTo(0)); assertThat(voltageVector.getLong(i), equalTo(expectedVoltage)); assertThat(hostnameVector.getBytesRef(i, new BytesRef()).utf8ToString(), equalTo(expectedHostname)); - assertThat(tsidVector.getInt(i), equalTo(expectedTsidOrd)); + assertThat(tsidVector.getBytesRef(i, new BytesRef()).utf8ToString(), equalTo("\u0001\bhostnames\u0007" + expectedHostname)); assertThat(timestampVector.getLong(i), equalTo(expectedTimestamp)); offset++; } @@ -121,27 +111,27 @@ public void testLimit() { List results = runDriver(limit, randomIntBetween(1, 1024), randomBoolean(), numTimeSeries, numSamplesPerTS, timestampStart); assertThat(results, hasSize(1)); Page page = results.get(0); - assertThat(page.getBlockCount(), equalTo(5)); + assertThat(page.getBlockCount(), equalTo(6)); DocVector docVector = (DocVector) page.getBlock(0).asVector(); assertThat(docVector.getPositionCount(), equalTo(limit)); - IntVector tsidVector = (IntVector) page.getBlock(1).asVector(); + BytesRefVector tsidVector = (BytesRefVector) page.getBlock(1).asVector(); assertThat(tsidVector.getPositionCount(), equalTo(limit)); LongVector timestampVector = (LongVector) page.getBlock(2).asVector(); assertThat(timestampVector.getPositionCount(), equalTo(limit)); - LongVector voltageVector = (LongVector) page.getBlock(3).asVector(); + LongVector voltageVector = (LongVector) page.getBlock(4).asVector(); assertThat(voltageVector.getPositionCount(), equalTo(limit)); - BytesRefVector hostnameVector = (BytesRefVector) page.getBlock(4).asVector(); + BytesRefVector hostnameVector = (BytesRefVector) page.getBlock(5).asVector(); assertThat(hostnameVector.getPositionCount(), equalTo(limit)); assertThat(docVector.shards().getInt(0), equalTo(0)); assertThat(voltageVector.getLong(0), equalTo(5L)); assertThat(hostnameVector.getBytesRef(0, new BytesRef()).utf8ToString(), equalTo("host-00")); - assertThat(tsidVector.getInt(0), equalTo(0)); + assertThat(tsidVector.getBytesRef(0, new BytesRef()).utf8ToString(), equalTo("\u0001\bhostnames\u0007host-00")); // legacy tsid assertThat(timestampVector.getLong(0), equalTo(timestampStart + ((numSamplesPerTS - 1) * 10_000L))); } @@ -161,13 +151,21 @@ record Doc(int host, long timestamp, long metric) {} } int maxPageSize = between(1, 1024); int limit = randomBoolean() ? between(1, 100000) : Integer.MAX_VALUE; - var timeSeriesFactory = createTimeSeriesSourceOperator(limit, maxPageSize, randomBoolean(), writer -> { - Randomness.shuffle(docs); - for (Doc doc : docs) { - writeTS(writer, doc.timestamp, new Object[] { "hostname", "h" + doc.host }, new Object[] { "metric", doc.metric }); + var timeSeriesFactory = createTimeSeriesSourceOperator( + directory, + r -> this.reader = r, + limit, + maxPageSize, + randomBoolean(), + TimeValue.ZERO, + writer -> { + Randomness.shuffle(docs); + for (Doc doc : docs) { + writeTS(writer, doc.timestamp, new Object[] { "hostname", "h" + doc.host }, new Object[] { "metric", doc.metric }); + } + return docs.size(); } - return docs.size(); - }); + ); DriverContext driverContext = driverContext(); List results = new ArrayList<>(); var metricField = new NumberFieldMapper.NumberFieldType("metric", NumberFieldMapper.NumberType.LONG); @@ -192,16 +190,16 @@ record Doc(int host, long timestamp, long metric) {} assertThat(page.getPositionCount(), lessThanOrEqualTo(limit)); assertThat(page.getPositionCount(), lessThanOrEqualTo(maxPageSize)); } - assertThat(page.getBlockCount(), equalTo(4)); + assertThat(page.getBlockCount(), equalTo(5)); DocVector docVector = (DocVector) page.getBlock(0).asVector(); - IntVector tsidVector = (IntVector) page.getBlock(1).asVector(); + BytesRefVector tsidVector = (BytesRefVector) page.getBlock(1).asVector(); LongVector timestampVector = (LongVector) page.getBlock(2).asVector(); - LongVector metricVector = (LongVector) page.getBlock(3).asVector(); + LongVector metricVector = (LongVector) page.getBlock(4).asVector(); for (int i = 0; i < page.getPositionCount(); i++) { Doc doc = docs.get(offset); offset++; assertThat(docVector.shards().getInt(0), equalTo(0)); - assertThat(tsidVector.getInt(i), equalTo(hostToTsidOrd.get(doc.host))); + assertThat(tsidVector.getBytesRef(i, new BytesRef()).utf8ToString(), equalTo("\u0001\bhostnames\u0002h" + doc.host)); assertThat(timestampVector.getLong(i), equalTo(doc.timestamp)); assertThat(metricVector.getLong(i), equalTo(doc.metric)); } @@ -209,169 +207,9 @@ record Doc(int host, long timestamp, long metric) {} assertThat(offset, equalTo(Math.min(limit, numDocs))); } - public void testBasicRate() { - long[] v1 = { 1, 1, 3, 0, 2, 9, 21, 3, 7, 7, 9, 12 }; - long[] t1 = { 1, 5, 11, 20, 21, 59, 88, 91, 92, 97, 99, 112 }; - - long[] v2 = { 7, 2, 0, 11, 24, 0, 4, 1, 10, 2 }; - long[] t2 = { 1, 2, 4, 5, 6, 8, 10, 11, 12, 14 }; - - long[] v3 = { 0, 1, 0, 1, 1, 4, 2, 2, 2, 2, 3, 5, 5 }; - long[] t3 = { 2, 3, 5, 7, 8, 9, 10, 12, 14, 15, 18, 20, 22 }; - List pods = List.of(new Pod("p1", t1, v1), new Pod("p2", t2, v2), new Pod("p3", t3, v3)); - long unit = between(1, 5); - Map actualRates = runRateTest(pods, TimeValue.timeValueMillis(unit)); - assertThat(actualRates, equalTo(Map.of("p1", 35.0 * unit / 111.0, "p2", 42.0 * unit / 13.0, "p3", 10.0 * unit / 20.0))); - } - - public void testRandomRate() { - int numPods = between(1, 10); - List pods = new ArrayList<>(); - Map expectedRates = new HashMap<>(); - TimeValue unit = TimeValue.timeValueSeconds(1); - for (int p = 0; p < numPods; p++) { - int numValues = between(2, 100); - long[] values = new long[numValues]; - long[] times = new long[numValues]; - long t = DEFAULT_DATE_TIME_FORMATTER.parseMillis("2024-01-01T00:00:00Z"); - for (int i = 0; i < numValues; i++) { - values[i] = randomIntBetween(0, 100); - t += TimeValue.timeValueSeconds(between(1, 10)).millis(); - times[i] = t; - } - Pod pod = new Pod("p" + p, times, values); - pods.add(pod); - if (numValues == 1) { - expectedRates.put(pod.name, null); - } else { - expectedRates.put(pod.name, pod.expectedRate(unit)); - } - } - Map actualRates = runRateTest(pods, unit); - assertThat(actualRates, equalTo(expectedRates)); - } - - record Pod(String name, long[] times, long[] values) { - Pod { - assert times.length == values.length : times.length + "!=" + values.length; - } - - double expectedRate(TimeValue unit) { - double dv = 0; - for (int i = 0; i < values.length - 1; i++) { - if (values[i + 1] < values[i]) { - dv += values[i]; - } - } - dv += (values[values.length - 1] - values[0]); - long dt = times[times.length - 1] - times[0]; - return (dv * unit.millis()) / dt; - } - } - - Map runRateTest(List pods, TimeValue unit) { - long unitInMillis = unit.millis(); - record Doc(String pod, long timestamp, long requests) { - - } - var sourceOperatorFactory = createTimeSeriesSourceOperator(Integer.MAX_VALUE, between(1, 100), randomBoolean(), writer -> { - List docs = new ArrayList<>(); - for (Pod pod : pods) { - for (int i = 0; i < pod.times.length; i++) { - docs.add(new Doc(pod.name, pod.times[i], pod.values[i])); - } - } - Randomness.shuffle(docs); - for (Doc doc : docs) { - writeTS(writer, doc.timestamp, new Object[] { "pod", doc.pod }, new Object[] { "requests", doc.requests }); - } - return docs.size(); - }); - var ctx = driverContext(); - HashAggregationOperator finalHash = new HashAggregationOperator( - List.of(new RateLongAggregatorFunctionSupplier(List.of(1, 2, 3), unitInMillis).groupingAggregatorFactory(AggregatorMode.FINAL)), - () -> BlockHash.build( - List.of(new HashAggregationOperator.GroupSpec(0, ElementType.BYTES_REF)), - ctx.blockFactory(), - randomIntBetween(1, 1000), - randomBoolean() - ), - ctx - ); - List results = new ArrayList<>(); - var requestsField = new NumberFieldMapper.NumberFieldType("requests", NumberFieldMapper.NumberType.LONG); - var podField = new KeywordFieldMapper.KeywordFieldType("pod"); - if (randomBoolean()) { - HashAggregationOperator initialHash = new HashAggregationOperator( - List.of( - new RateLongAggregatorFunctionSupplier(List.of(4, 2), unitInMillis).groupingAggregatorFactory(AggregatorMode.INITIAL) - ), - () -> BlockHash.build( - List.of(new HashAggregationOperator.GroupSpec(3, ElementType.BYTES_REF)), - ctx.blockFactory(), - randomIntBetween(1, 1000), - randomBoolean() - ), - ctx - ); - OperatorTestCase.runDriver( - new Driver( - ctx, - sourceOperatorFactory.get(ctx), - List.of( - ValuesSourceReaderOperatorTests.factory(reader, podField, ElementType.BYTES_REF).get(ctx), - ValuesSourceReaderOperatorTests.factory(reader, requestsField, ElementType.LONG).get(ctx), - initialHash, - finalHash - ), - new TestResultPageSinkOperator(results::add), - () -> {} - ) - ); - } else { - var blockLoader = new BlockDocValuesReader.BytesRefsFromOrdsBlockLoader("pod"); - var shardContext = new ValuesSourceReaderOperator.ShardContext(reader, () -> SourceLoader.FROM_STORED_SOURCE); - var ordinalGrouping = new OrdinalsGroupingOperator( - shardIdx -> blockLoader, - List.of(shardContext), - ElementType.BYTES_REF, - 0, - "pod", - List.of( - new RateLongAggregatorFunctionSupplier(List.of(3, 2), unitInMillis).groupingAggregatorFactory(AggregatorMode.INITIAL) - ), - randomIntBetween(1, 1000), - ctx - ); - OperatorTestCase.runDriver( - new Driver( - ctx, - sourceOperatorFactory.get(ctx), - List.of( - ValuesSourceReaderOperatorTests.factory(reader, requestsField, ElementType.LONG).get(ctx), - ordinalGrouping, - finalHash - ), - new TestResultPageSinkOperator(results::add), - () -> {} - ) - ); - } - Map rates = new HashMap<>(); - for (Page result : results) { - BytesRefBlock keysBlock = result.getBlock(0); - DoubleBlock ratesBlock = result.getBlock(1); - for (int i = 0; i < result.getPositionCount(); i++) { - rates.put(keysBlock.getBytesRef(i, new BytesRef()).utf8ToString(), ratesBlock.getDouble(i)); - } - result.releaseBlocks(); - } - return rates; - } - @Override protected Operator.OperatorFactory simple() { - return createTimeSeriesSourceOperator(1, 1, false, writer -> { + return createTimeSeriesSourceOperator(directory, r -> this.reader = r, 1, 1, false, TimeValue.ZERO, writer -> { long timestamp = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2024-01-01T00:00:00Z"); writeTS(writer, timestamp, new Object[] { "hostname", "host-01" }, new Object[] { "voltage", 2 }); return 1; @@ -390,18 +228,26 @@ protected String expectedToStringOfSimple() { List runDriver(int limit, int maxPageSize, boolean forceMerge, int numTimeSeries, int numSamplesPerTS, long timestampStart) { var ctx = driverContext(); - var timeSeriesFactory = createTimeSeriesSourceOperator(limit, maxPageSize, forceMerge, writer -> { - long timestamp = timestampStart; - for (int i = 0; i < numSamplesPerTS; i++) { - for (int j = 0; j < numTimeSeries; j++) { - String hostname = String.format(Locale.ROOT, "host-%02d", j); - writeTS(writer, timestamp, new Object[] { "hostname", hostname }, new Object[] { "voltage", j + 5 }); + var timeSeriesFactory = createTimeSeriesSourceOperator( + directory, + indexReader -> this.reader = indexReader, + limit, + maxPageSize, + forceMerge, + TimeValue.ZERO, + writer -> { + long timestamp = timestampStart; + for (int i = 0; i < numSamplesPerTS; i++) { + for (int j = 0; j < numTimeSeries; j++) { + String hostname = String.format(Locale.ROOT, "host-%02d", j); + writeTS(writer, timestamp, new Object[] { "hostname", hostname }, new Object[] { "voltage", j + 5 }); + } + timestamp += 10_000; + writer.commit(); } - timestamp += 10_000; - writer.commit(); + return numTimeSeries * numSamplesPerTS; } - return numTimeSeries * numSamplesPerTS; - }); + ); List results = new ArrayList<>(); var voltageField = new NumberFieldMapper.NumberFieldType("voltage", NumberFieldMapper.NumberType.LONG); @@ -426,16 +272,20 @@ List runDriver(int limit, int maxPageSize, boolean forceMerge, int numTime return results; } - TimeSeriesSortedSourceOperatorFactory createTimeSeriesSourceOperator( + public static TimeSeriesSortedSourceOperatorFactory createTimeSeriesSourceOperator( + Directory directory, + Consumer readerConsumer, int limit, int maxPageSize, boolean forceMerge, + TimeValue timeValue, CheckedFunction indexingLogic ) { Sort sort = new Sort( new SortField(TimeSeriesIdFieldMapper.NAME, SortField.Type.STRING, false), new SortedNumericSortField(DataStreamTimestampFieldMapper.DEFAULT_PATH, SortField.Type.LONG, true) ); + IndexReader reader; try ( RandomIndexWriter writer = new RandomIndexWriter( random(), @@ -449,16 +299,17 @@ TimeSeriesSortedSourceOperatorFactory createTimeSeriesSourceOperator( writer.forceMerge(1); } reader = writer.getReader(); + readerConsumer.accept(reader); assertThat(reader.numDocs(), equalTo(numDocs)); } catch (IOException e) { throw new UncheckedIOException(e); } var ctx = new LuceneSourceOperatorTests.MockShardContext(reader, 0); Function queryFunction = c -> new MatchAllDocsQuery(); - return TimeSeriesSortedSourceOperatorFactory.create(limit, maxPageSize, 1, List.of(ctx), queryFunction); + return TimeSeriesSortedSourceOperatorFactory.create(limit, maxPageSize, 1, timeValue, List.of(ctx), queryFunction); } - static void writeTS(RandomIndexWriter iw, long timestamp, Object[] dimensions, Object[] metrics) throws IOException { + public static void writeTS(RandomIndexWriter iw, long timestamp, Object[] dimensions, Object[] metrics) throws IOException { final List fields = new ArrayList<>(); fields.add(new SortedNumericDocValuesField(DataStreamTimestampFieldMapper.DEFAULT_PATH, timestamp)); fields.add(new LongPoint(DataStreamTimestampFieldMapper.DEFAULT_PATH, timestamp)); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java index 1ba9fa5d1d354..2402e6f656db0 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java @@ -133,7 +133,7 @@ protected Operator.OperatorFactory simple() { return factory(reader, mapperService.fieldType("long"), ElementType.LONG); } - static Operator.OperatorFactory factory(IndexReader reader, MappedFieldType ft, ElementType elementType) { + public static Operator.OperatorFactory factory(IndexReader reader, MappedFieldType ft, ElementType elementType) { return factory(reader, ft.name(), elementType, ft.blockLoader(null)); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java new file mode 100644 index 0000000000000..15c2cb1c57218 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java @@ -0,0 +1,404 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator; + +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.store.Directory; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.Randomness; +import org.elasticsearch.compute.aggregation.AggregatorMode; +import org.elasticsearch.compute.aggregation.RateLongAggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.blockhash.BlockHash; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.lucene.ValuesSourceReaderOperator; +import org.elasticsearch.compute.lucene.ValuesSourceReaderOperatorTests; +import org.elasticsearch.core.IOUtils; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.mapper.BlockDocValuesReader; +import org.elasticsearch.index.mapper.KeywordFieldMapper; +import org.elasticsearch.index.mapper.NumberFieldMapper; +import org.elasticsearch.index.mapper.SourceLoader; +import org.junit.After; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.compute.lucene.TimeSeriesSortedSourceOperatorTests.createTimeSeriesSourceOperator; +import static org.elasticsearch.compute.lucene.TimeSeriesSortedSourceOperatorTests.writeTS; +import static org.elasticsearch.index.mapper.DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER; +import static org.elasticsearch.test.MapMatcher.assertMap; +import static org.elasticsearch.test.MapMatcher.matchesMap; +import static org.hamcrest.Matchers.equalTo; + +public class TimeSeriesAggregationOperatorTests extends AnyOperatorTestCase { + + private IndexReader reader; + private final Directory directory = newDirectory(); + + @After + public void cleanup() throws IOException { + IOUtils.close(reader, directory); + } + + @Override + protected Operator.OperatorFactory simple() { + return new TimeSeriesAggregationOperatorFactory(AggregatorMode.FINAL, 0, 1, TimeValue.ZERO, List.of(), 100); + } + + @Override + protected String expectedDescriptionOfSimple() { + return "TimeSeriesAggregationOperator[mode=FINAL, tsHashChannel = 0, timestampIntervalChannel = 1, " + + "timeSeriesPeriod = 0s, maxPageSize = 100]"; + } + + @Override + protected String expectedToStringOfSimple() { + return "HashAggregationOperator[blockHash=TimeSeriesBlockHash{keys=[BytesRefKey[channel=0], " + + "LongKey[channel=1]], entries=-1b}, aggregators=[]]"; + } + + public void testBasicRate() { + long[] v1 = { 1, 1, 3, 0, 2, 9, 21, 3, 7, 7, 9, 12 }; + long[] t1 = { 1, 5, 11, 20, 21, 59, 88, 91, 92, 97, 99, 112 }; + + long[] v2 = { 7, 2, 0, 11, 24, 0, 4, 1, 10, 2 }; + long[] t2 = { 1, 2, 4, 5, 6, 8, 10, 11, 12, 14 }; + + long[] v3 = { 0, 1, 0, 1, 1, 4, 2, 2, 2, 2, 3, 5, 5 }; + long[] t3 = { 2, 3, 5, 7, 8, 9, 10, 12, 14, 15, 18, 20, 22 }; + List pods = List.of(new Pod("p1", t1, v1), new Pod("p2", t2, v2), new Pod("p3", t3, v3)); + long unit = between(1, 5); + Map actualRates = runRateTest(pods, TimeValue.timeValueMillis(unit), TimeValue.ZERO); + assertThat( + actualRates, + equalTo( + Map.of( + new Group("\u0001\u0003pods\u0002p1", 0), + 35.0 * unit / 111.0, + new Group("\u0001\u0003pods\u0002p2", 0), + 42.0 * unit / 13.0, + new Group("\u0001\u0003pods\u0002p3", 0), + 10.0 * unit / 20.0 + ) + ) + ); + } + + public void testRateWithInterval() { + long[] v1 = { 1, 2, 3, 0, 1, 2, 3, 4, 5, 0, 1, 2, 3 }; + long[] t1 = { 0, 10_000, 20_000, 30_000, 40_000, 50_000, 60_000, 70_000, 80_000, 90_000, 100_000, 110_000, 120_000 }; + + long[] v2 = { 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2 }; + long[] t2 = { 0, 10_000, 20_000, 30_000, 40_000, 50_000, 60_000, 70_000, 80_000, 90_000, 100_000, 110_000, 120_000 }; + + long[] v3 = { 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192 }; + long[] t3 = { 0, 10_000, 20_000, 30_000, 40_000, 50_000, 60_000, 70_000, 80_000, 90_000, 100_000, 110_000, 120_000 }; + List pods = List.of(new Pod("p1", t1, v1), new Pod("p2", t2, v2), new Pod("p3", t3, v3)); + Map actualRates = runRateTest(pods, TimeValue.timeValueMillis(1), TimeValue.timeValueMinutes(1)); + assertMap( + actualRates, + matchesMap().entry(new Group("\u0001\u0003pods\u0002p1", 120_000), 0.0D) + .entry(new Group("\u0001\u0003pods\u0002p1", 60_000), 8.0E-5D) + .entry(new Group("\u0001\u0003pods\u0002p1", 0), 8.0E-5D) + .entry(new Group("\u0001\u0003pods\u0002p2", 120_000), 0.0D) + .entry(new Group("\u0001\u0003pods\u0002p2", 60_000), 0.0D) + .entry(new Group("\u0001\u0003pods\u0002p2", 0), 0.0D) + .entry(new Group("\u0001\u0003pods\u0002p3", 120_000), 0.0D) + .entry(new Group("\u0001\u0003pods\u0002p3", 60_000), 0.07936D) + .entry(new Group("\u0001\u0003pods\u0002p3", 0), 0.00124D) + ); + } + + public void testRandomRate() { + int numPods = between(1, 10); + List pods = new ArrayList<>(); + Map expectedRates = new HashMap<>(); + TimeValue unit = TimeValue.timeValueSeconds(1); + for (int p = 0; p < numPods; p++) { + int numValues = between(2, 100); + long[] values = new long[numValues]; + long[] times = new long[numValues]; + long t = DEFAULT_DATE_TIME_FORMATTER.parseMillis("2024-01-01T00:00:00Z"); + for (int i = 0; i < numValues; i++) { + values[i] = randomIntBetween(0, 100); + t += TimeValue.timeValueSeconds(between(1, 10)).millis(); + times[i] = t; + } + Pod pod = new Pod("p" + p, times, values); + pods.add(pod); + if (numValues == 1) { + expectedRates.put(new Group("\u0001\u0003pods\u0002" + pod.name, 0), null); + } else { + expectedRates.put(new Group("\u0001\u0003pods\u0002" + pod.name, 0), pod.expectedRate(unit)); + } + } + Map actualRates = runRateTest(pods, unit, TimeValue.ZERO); + assertThat(actualRates, equalTo(expectedRates)); + } + + record Pod(String name, long[] times, long[] values) { + Pod { + assert times.length == values.length : times.length + "!=" + values.length; + } + + double expectedRate(TimeValue unit) { + double dv = 0; + for (int i = 0; i < values.length - 1; i++) { + if (values[i + 1] < values[i]) { + dv += values[i]; + } + } + dv += (values[values.length - 1] - values[0]); + long dt = times[times.length - 1] - times[0]; + return (dv * unit.millis()) / dt; + } + } + + Map runRateTest(List pods, TimeValue unit, TimeValue interval) { + long unitInMillis = unit.millis(); + record Doc(String pod, long timestamp, long requests) { + + } + var sourceOperatorFactory = createTimeSeriesSourceOperator( + directory, + r -> this.reader = r, + Integer.MAX_VALUE, + between(1, 100), + randomBoolean(), + interval, + writer -> { + List docs = new ArrayList<>(); + for (Pod pod : pods) { + for (int i = 0; i < pod.times.length; i++) { + docs.add(new Doc(pod.name, pod.times[i], pod.values[i])); + } + } + Randomness.shuffle(docs); + for (Doc doc : docs) { + writeTS(writer, doc.timestamp, new Object[] { "pod", doc.pod }, new Object[] { "requests", doc.requests }); + } + return docs.size(); + } + ); + var ctx = driverContext(); + + var aggregators = List.of( + new RateLongAggregatorFunctionSupplier(List.of(4, 2), unitInMillis).groupingAggregatorFactory(AggregatorMode.INITIAL) + ); + Operator initialHash = new TimeSeriesAggregationOperatorFactory( + AggregatorMode.INITIAL, + 1, + 3, + interval, + aggregators, + randomIntBetween(1, 1000) + ).get(ctx); + + aggregators = List.of( + new RateLongAggregatorFunctionSupplier(List.of(2, 3, 4), unitInMillis).groupingAggregatorFactory(AggregatorMode.FINAL) + ); + Operator finalHash = new TimeSeriesAggregationOperatorFactory( + AggregatorMode.FINAL, + 0, + 1, + interval, + aggregators, + randomIntBetween(1, 1000) + ).get(ctx); + List results = new ArrayList<>(); + var requestsField = new NumberFieldMapper.NumberFieldType("requests", NumberFieldMapper.NumberType.LONG); + OperatorTestCase.runDriver( + new Driver( + ctx, + sourceOperatorFactory.get(ctx), + List.of(ValuesSourceReaderOperatorTests.factory(reader, requestsField, ElementType.LONG).get(ctx), initialHash, finalHash), + new TestResultPageSinkOperator(results::add), + () -> {} + ) + ); + Map rates = new HashMap<>(); + for (Page result : results) { + BytesRefBlock keysBlock = result.getBlock(0); + LongBlock timestampIntervalsBock = result.getBlock(1); + DoubleBlock ratesBlock = result.getBlock(2); + for (int i = 0; i < result.getPositionCount(); i++) { + var key = new Group(keysBlock.getBytesRef(i, new BytesRef()).utf8ToString(), timestampIntervalsBock.getLong(i)); + rates.put(key, ratesBlock.getDouble(i)); + } + result.releaseBlocks(); + } + return rates; + } + + record Group(String tsidHash, long timestampInterval) {} + + // TODO: in a follow up add support for ordinal based time series grouping operator + // (and then remove this test) + // (ordinal based can only group by one field and never includes timestamp) + public void testBasicRateOrdinalBased() { + long[] v1 = { 1, 1, 3, 0, 2, 9, 21, 3, 7, 7, 9, 12 }; + long[] t1 = { 1, 5, 11, 20, 21, 59, 88, 91, 92, 97, 99, 112 }; + + long[] v2 = { 7, 2, 0, 11, 24, 0, 4, 1, 10, 2 }; + long[] t2 = { 1, 2, 4, 5, 6, 8, 10, 11, 12, 14 }; + + long[] v3 = { 0, 1, 0, 1, 1, 4, 2, 2, 2, 2, 3, 5, 5 }; + long[] t3 = { 2, 3, 5, 7, 8, 9, 10, 12, 14, 15, 18, 20, 22 }; + List pods = List.of(new Pod("p1", t1, v1), new Pod("p2", t2, v2), new Pod("p3", t3, v3)); + long unit = between(1, 5); + Map actualRates = runRateTestOrdinalBased(pods, TimeValue.timeValueMillis(unit)); + assertThat(actualRates, equalTo(Map.of("p1", 35.0 * unit / 111.0, "p2", 42.0 * unit / 13.0, "p3", 10.0 * unit / 20.0))); + } + + // TODO: in a follow up add support for ordinal based time series grouping operator + // (and then remove this test) + // (ordinal based can only group by one field and never includes timestamp) + public void testRandomRateOrdinalBased() { + int numPods = between(1, 10); + List pods = new ArrayList<>(); + Map expectedRates = new HashMap<>(); + TimeValue unit = TimeValue.timeValueSeconds(1); + for (int p = 0; p < numPods; p++) { + int numValues = between(2, 100); + long[] values = new long[numValues]; + long[] times = new long[numValues]; + long t = DEFAULT_DATE_TIME_FORMATTER.parseMillis("2024-01-01T00:00:00Z"); + for (int i = 0; i < numValues; i++) { + values[i] = randomIntBetween(0, 100); + t += TimeValue.timeValueSeconds(between(1, 10)).millis(); + times[i] = t; + } + Pod pod = new Pod("p" + p, times, values); + pods.add(pod); + if (numValues == 1) { + expectedRates.put(pod.name, null); + } else { + expectedRates.put(pod.name, pod.expectedRate(unit)); + } + } + Map actualRates = runRateTestOrdinalBased(pods, unit); + assertThat(actualRates, equalTo(expectedRates)); + } + + Map runRateTestOrdinalBased(List pods, TimeValue unit) { + long unitInMillis = unit.millis(); + record Doc(String pod, long timestamp, long requests) { + + } + var sourceOperatorFactory = createTimeSeriesSourceOperator( + directory, + r -> this.reader = r, + Integer.MAX_VALUE, + between(1, 100), + randomBoolean(), + TimeValue.ZERO, + writer -> { + List docs = new ArrayList<>(); + for (Pod pod : pods) { + for (int i = 0; i < pod.times.length; i++) { + docs.add(new Doc(pod.name, pod.times[i], pod.values[i])); + } + } + Randomness.shuffle(docs); + for (Doc doc : docs) { + writeTS(writer, doc.timestamp, new Object[] { "pod", doc.pod }, new Object[] { "requests", doc.requests }); + } + return docs.size(); + } + ); + var ctx = driverContext(); + HashAggregationOperator finalHash = new HashAggregationOperator( + List.of(new RateLongAggregatorFunctionSupplier(List.of(1, 2, 3), unitInMillis).groupingAggregatorFactory(AggregatorMode.FINAL)), + () -> BlockHash.build( + List.of(new HashAggregationOperator.GroupSpec(0, ElementType.BYTES_REF)), + ctx.blockFactory(), + randomIntBetween(1, 1000), + randomBoolean() + ), + ctx + ); + List results = new ArrayList<>(); + var requestsField = new NumberFieldMapper.NumberFieldType("requests", NumberFieldMapper.NumberType.LONG); + var podField = new KeywordFieldMapper.KeywordFieldType("pod"); + if (randomBoolean()) { + HashAggregationOperator initialHash = new HashAggregationOperator( + List.of( + new RateLongAggregatorFunctionSupplier(List.of(5, 2), unitInMillis).groupingAggregatorFactory(AggregatorMode.INITIAL) + ), + () -> BlockHash.build( + List.of(new HashAggregationOperator.GroupSpec(4, ElementType.BYTES_REF)), + ctx.blockFactory(), + randomIntBetween(1, 1000), + randomBoolean() + ), + ctx + ); + OperatorTestCase.runDriver( + new Driver( + ctx, + sourceOperatorFactory.get(ctx), + List.of( + ValuesSourceReaderOperatorTests.factory(reader, podField, ElementType.BYTES_REF).get(ctx), + ValuesSourceReaderOperatorTests.factory(reader, requestsField, ElementType.LONG).get(ctx), + initialHash, + finalHash + ), + new TestResultPageSinkOperator(results::add), + () -> {} + ) + ); + } else { + var blockLoader = new BlockDocValuesReader.BytesRefsFromOrdsBlockLoader("pod"); + var shardContext = new ValuesSourceReaderOperator.ShardContext(reader, () -> SourceLoader.FROM_STORED_SOURCE); + var ordinalGrouping = new OrdinalsGroupingOperator( + shardIdx -> blockLoader, + List.of(shardContext), + ElementType.BYTES_REF, + 0, + "pod", + List.of( + new RateLongAggregatorFunctionSupplier(List.of(4, 2), unitInMillis).groupingAggregatorFactory(AggregatorMode.INITIAL) + ), + randomIntBetween(1, 1000), + ctx + ); + OperatorTestCase.runDriver( + new Driver( + ctx, + sourceOperatorFactory.get(ctx), + List.of( + ValuesSourceReaderOperatorTests.factory(reader, requestsField, ElementType.LONG).get(ctx), + ordinalGrouping, + finalHash + ), + new TestResultPageSinkOperator(results::add), + () -> {} + ) + ); + } + Map rates = new HashMap<>(); + for (Page result : results) { + BytesRefBlock keysBlock = result.getBlock(0); + DoubleBlock ratesBlock = result.getBlock(1); + for (int i = 0; i < result.getPositionCount(); i++) { + var key = keysBlock.getBytesRef(i, new BytesRef()).utf8ToString(); + rates.put(key, ratesBlock.getDouble(i)); + } + result.releaseBlocks(); + } + return rates; + } + +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EsTimeseriesQueryExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EsTimeseriesQueryExec.java index 0d92a52e6053c..48cde0b8bd587 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EsTimeseriesQueryExec.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EsTimeseriesQueryExec.java @@ -25,6 +25,7 @@ public class EsTimeseriesQueryExec extends EsQueryExec { static final EsField TSID_FIELD = new EsField("_tsid", DataTypes.KEYWORD, Map.of(), true); static final EsField TIMESTAMP_FIELD = new EsField("@timestamp", DataTypes.DATETIME, Map.of(), true); + static final EsField INTERVAL_FIELD = new EsField("timestamp_interval", DataTypes.DATETIME, Map.of(), true); public EsTimeseriesQueryExec(Source source, EsIndex index, QueryBuilder query) { this( @@ -33,7 +34,8 @@ public EsTimeseriesQueryExec(Source source, EsIndex index, QueryBuilder query) { List.of( new FieldAttribute(source, DOC_ID_FIELD.getName(), DOC_ID_FIELD), new FieldAttribute(source, TSID_FIELD.getName(), TSID_FIELD), - new FieldAttribute(source, TIMESTAMP_FIELD.getName(), TSID_FIELD) + new FieldAttribute(source, TIMESTAMP_FIELD.getName(), TIMESTAMP_FIELD), + new FieldAttribute(source, INTERVAL_FIELD.getName(), INTERVAL_FIELD) ), query, null, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java index 234e01ed11633..733bcfc366d85 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java @@ -24,6 +24,7 @@ import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.compute.operator.OrdinalsGroupingOperator; import org.elasticsearch.compute.operator.SourceOperator; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.mapper.BlockLoader; import org.elasticsearch.index.mapper.FieldNamesFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; @@ -145,6 +146,7 @@ public final PhysicalOperation sourcePhysicalOperation(EsQueryExec esQueryExec, limit, context.pageSize(rowEstimatedSize), context.queryPragmas().taskConcurrency(), + TimeValue.ZERO, shardContexts, querySupplier(esQueryExec.query()) ); From f9ae6db31920c5c9489a71d541283eaa18d9edcc Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Wed, 3 Apr 2024 16:23:36 +0200 Subject: [PATCH 091/264] ESQL: Add docs for the OPTIONS directive (#107013) This adds the docs for the newly added `OPTIONS` directive to `FROM`. --- .../esql/esql-index-options.asciidoc | 52 +++++++++++++++++++ docs/reference/esql/esql-language.asciidoc | 4 +- .../esql/source-commands/from.asciidoc | 14 ++++- docs/reference/search/search.asciidoc | 2 + .../src/main/resources/from.csv-spec | 7 ++- 5 files changed, 76 insertions(+), 3 deletions(-) create mode 100644 docs/reference/esql/esql-index-options.asciidoc diff --git a/docs/reference/esql/esql-index-options.asciidoc b/docs/reference/esql/esql-index-options.asciidoc new file mode 100644 index 0000000000000..ba2307f611d45 --- /dev/null +++ b/docs/reference/esql/esql-index-options.asciidoc @@ -0,0 +1,52 @@ +[[esql-index-options]] +=== {esql} index options + +++++ +Index options +++++ + +The `OPTIONS` directive of the <> command allows configuring +the way {esql} accesses the data to be queried. The argument passed to this +directive is a comma-separated list of option name-value pairs, with the option +name and the corresponding value double-quoted. + +[source,esql] +---- +FROM index_pattern [OPTIONS "option1"="value1"[,...[,"optionN"="valueN"]]] +---- + +These options can only be provided as part of a <> command, +and they apply to all the indices provided or matched by an index pattern. + +The option names and their values are the same as used by the +<>, however note that the default +values may differ. + +The currently supported options are: + +include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] ++ +Defaults to `true`. + +// unlike "allow-no-indices", "index-ignore-unavailable" includes a default +// in common-parms.asciidoc, which is different from QL's -- we need to +// provide the full text here. +`ignore_unavailable`:: +(Optional, Boolean) If `false`, the request returns an error if it targets a +missing or closed index. ++ +Defaults to `true`. + +include::{es-repo-dir}/search/search.asciidoc[tag=search-preference] + +*Examples* + +[source.merge.styled,esql] +---- +include::{esql-specs}/from.csv-spec[tag=convertFromDatetimeWithOptions] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/from.csv-spec[tag=convertFromDatetimeWithOptions-result] +|=== + diff --git a/docs/reference/esql/esql-language.asciidoc b/docs/reference/esql/esql-language.asciidoc index 8ffc0af7cbeb2..e4c873457b21b 100644 --- a/docs/reference/esql/esql-language.asciidoc +++ b/docs/reference/esql/esql-language.asciidoc @@ -11,6 +11,7 @@ Detailed information about the {esql} language: * <> * <> * <> +* <> * <> * <> * <> @@ -19,6 +20,7 @@ include::esql-syntax.asciidoc[] include::esql-commands.asciidoc[] include::esql-functions-operators.asciidoc[] include::metadata-fields.asciidoc[] +include::esql-index-options.asciidoc[] include::multivalued-fields.asciidoc[] include::esql-process-data-with-dissect-grok.asciidoc[] -include::esql-enrich-data.asciidoc[] \ No newline at end of file +include::esql-enrich-data.asciidoc[] diff --git a/docs/reference/esql/source-commands/from.asciidoc b/docs/reference/esql/source-commands/from.asciidoc index d81c46530e089..427562a8c0dbb 100644 --- a/docs/reference/esql/source-commands/from.asciidoc +++ b/docs/reference/esql/source-commands/from.asciidoc @@ -6,7 +6,7 @@ [source,esql] ---- -FROM index_pattern [METADATA fields] +FROM index_pattern [METADATA fields] [OPTIONS options] ---- *Parameters* @@ -17,6 +17,10 @@ A list of indices, data streams or aliases. Supports wildcards and date math. `fields`:: A comma-separated list of <> to retrieve. +`options`:: +A comma-separated list of <> to configure +data access. + *Description* The `FROM` source command returns a table with data from a data stream, index, @@ -82,3 +86,11 @@ Use the optional `METADATA` directive to enable <>. +This directive must follow `METADATA`, if both are specified: + +[source,esql] +---- +FROM employees* METADATA _index OPTIONS "ignore_unavailable"="true" +---- diff --git a/docs/reference/search/search.asciidoc b/docs/reference/search/search.asciidoc index f602b6457c31e..3be30c98261d5 100644 --- a/docs/reference/search/search.asciidoc +++ b/docs/reference/search/search.asciidoc @@ -141,6 +141,7 @@ When unspecified, the pre-filter phase is executed if any of these conditions is - The primary sort of the query targets an indexed field. [[search-preference]] +tag::search-preference[] `preference`:: (Optional, string) Nodes and shards used for the search. By default, {es} selects from eligible @@ -177,6 +178,7 @@ Any string that does not start with `_`. If the cluster state and selected shards do not change, searches using the same `` value are routed to the same shards in the same order. ==== +end::search-preference[] [[search-api-query-params-q]] diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/from.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/from.csv-spec index 76b97d11a7f85..11fb0ab532945 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/from.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/from.csv-spec @@ -132,14 +132,19 @@ c:l | name:k convertFromDatetimeWithOptions required_feature: esql.from_options +// tag::convertFromDatetimeWithOptions[] FROM employees OPTIONS "allow_no_indices"="false","preference"="_shards:0" | SORT emp_no | EVAL hire_double = to_double(hire_date) | KEEP emp_no, hire_date, hire_double -| LIMIT 3; +| LIMIT 3 +// end::convertFromDatetimeWithOptions[] +; +// tag::convertFromDatetimeWithOptions-result[] emp_no:integer |hire_date:date |hire_double:double 10001 |1986-06-26T00:00:00.000Z|5.20128E11 10002 |1985-11-21T00:00:00.000Z|5.013792E11 10003 |1986-08-28T00:00:00.000Z|5.255712E11 +// end::convertFromDatetimeWithOptions-result[] ; From 6e3608992c1d8fc28b041fda5777532e7bad9251 Mon Sep 17 00:00:00 2001 From: Joe Reuter Date: Wed, 3 Apr 2024 17:04:24 +0200 Subject: [PATCH 092/264] Add non-indexed fields to ecs templates (#106714) * add non-indexed fields to ecs templates * update * Update docs/changelog/106714.yaml * Adjusting ECS tests to verify proper index and doc_values settings * review comment --------- Co-authored-by: eyalkoren <41850454+eyalkoren@users.noreply.github.com> --- docs/changelog/106714.yaml | 5 ++ .../src/main/resources/ecs@mappings.json | 24 ++++++ .../xpack/stack/EcsDynamicTemplatesIT.java | 78 ++++++++++++------- .../xpack/stack/StackTemplateRegistry.java | 2 +- 4 files changed, 81 insertions(+), 28 deletions(-) create mode 100644 docs/changelog/106714.yaml diff --git a/docs/changelog/106714.yaml b/docs/changelog/106714.yaml new file mode 100644 index 0000000000000..65b0acd77d764 --- /dev/null +++ b/docs/changelog/106714.yaml @@ -0,0 +1,5 @@ +pr: 106714 +summary: Add non-indexed fields to ecs templates +area: Data streams +type: bug +issues: [] diff --git a/x-pack/plugin/core/template-resources/src/main/resources/ecs@mappings.json b/x-pack/plugin/core/template-resources/src/main/resources/ecs@mappings.json index 7eaf37ba1d95e..3eae6c1fa4f5a 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/ecs@mappings.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/ecs@mappings.json @@ -23,6 +23,30 @@ "unmatch_mapping_type": "object" } }, + { + "ecs_non_indexed_keyword": { + "mapping": { + "type": "keyword", + "index": false, + "doc_values": false + }, + "path_match": [ + "event.original" + ] + } + }, + { + "ecs_non_indexed_long": { + "mapping": { + "type": "long", + "index": false, + "doc_values": false + }, + "path_match": [ + "*.x509.public_key_exponent" + ] + } + }, { "ecs_ip": { "mapping": { diff --git a/x-pack/plugin/stack/src/javaRestTest/java/org/elasticsearch/xpack/stack/EcsDynamicTemplatesIT.java b/x-pack/plugin/stack/src/javaRestTest/java/org/elasticsearch/xpack/stack/EcsDynamicTemplatesIT.java index 09e9a6090c485..8bdf7b30a9997 100644 --- a/x-pack/plugin/stack/src/javaRestTest/java/org/elasticsearch/xpack/stack/EcsDynamicTemplatesIT.java +++ b/x-pack/plugin/stack/src/javaRestTest/java/org/elasticsearch/xpack/stack/EcsDynamicTemplatesIT.java @@ -191,6 +191,11 @@ public void testNumericMessage() throws IOException { verifyEcsMappings(indexName); } + private void assertType(String expectedType, Map actualMappings) throws IOException { + assertNotNull("expected to get non-null mappings for field", actualMappings); + assertEquals(expectedType, actualMappings.get("type")); + } + public void testUsage() throws IOException { String indexName = "test-usage"; createTestIndex(indexName); @@ -205,13 +210,13 @@ public void testUsage() throws IOException { indexDocument(indexName, fieldsMap); final Map rawMappings = getMappings(indexName); - final Map flatFieldMappings = new HashMap<>(); + final Map> flatFieldMappings = new HashMap<>(); processRawMappingsSubtree(rawMappings, flatFieldMappings, new HashMap<>(), ""); - assertEquals("scaled_float", flatFieldMappings.get("host.cpu.usage")); - assertEquals("scaled_float", flatFieldMappings.get("string.usage")); - assertEquals("long", flatFieldMappings.get("usage")); - assertEquals("long", flatFieldMappings.get("root.usage.long")); - assertEquals("float", flatFieldMappings.get("root.usage.float")); + assertType("scaled_float", flatFieldMappings.get("host.cpu.usage")); + assertType("scaled_float", flatFieldMappings.get("string.usage")); + assertType("long", flatFieldMappings.get("usage")); + assertType("long", flatFieldMappings.get("root.usage.long")); + assertType("float", flatFieldMappings.get("root.usage.float")); } public void testOnlyMatchLeafFields() throws IOException { @@ -230,16 +235,16 @@ public void testOnlyMatchLeafFields() throws IOException { indexDocument(indexName, fieldsMap); final Map rawMappings = getMappings(indexName); - final Map flatFieldMappings = new HashMap<>(); + final Map> flatFieldMappings = new HashMap<>(); processRawMappingsSubtree(rawMappings, flatFieldMappings, new HashMap<>(), ""); - assertEquals("long", flatFieldMappings.get("foo.message.bar")); - assertEquals("long", flatFieldMappings.get("foo.url.path.bar")); - assertEquals("long", flatFieldMappings.get("foo.url.full.bar")); - assertEquals("long", flatFieldMappings.get("foo.stack_trace.bar")); - assertEquals("long", flatFieldMappings.get("foo.user_agent.original.bar")); - assertEquals("long", flatFieldMappings.get("foo.created.bar")); - assertEquals("float", flatFieldMappings.get("foo._score.bar")); - assertEquals("long", flatFieldMappings.get("foo.structured_data")); + assertType("long", flatFieldMappings.get("foo.message.bar")); + assertType("long", flatFieldMappings.get("foo.url.path.bar")); + assertType("long", flatFieldMappings.get("foo.url.full.bar")); + assertType("long", flatFieldMappings.get("foo.stack_trace.bar")); + assertType("long", flatFieldMappings.get("foo.user_agent.original.bar")); + assertType("long", flatFieldMappings.get("foo.created.bar")); + assertType("float", flatFieldMappings.get("foo._score.bar")); + assertType("long", flatFieldMappings.get("foo.structured_data")); } private static void indexDocument(String indexName, Map flattenedFieldsMap) throws IOException { @@ -364,28 +369,26 @@ private Map getMappings(String indexName) throws IOException { private void processRawMappingsSubtree( final Map fieldSubtrees, - final Map flatFieldMappings, - final Map flatMultiFieldsMappings, + final Map> flatFieldMappings, + final Map> flatMultiFieldsMappings, final String subtreePrefix ) { fieldSubtrees.forEach((fieldName, fieldMappings) -> { String fieldFullPath = subtreePrefix + fieldName; Map fieldMappingsMap = ((Map) fieldMappings); - String type = (String) fieldMappingsMap.get("type"); - if (type != null) { - flatFieldMappings.put(fieldFullPath, type); + if (fieldMappingsMap.get("type") != null) { + flatFieldMappings.put(fieldFullPath, fieldMappingsMap); } Map subfields = (Map) fieldMappingsMap.get("properties"); if (subfields != null) { processRawMappingsSubtree(subfields, flatFieldMappings, flatMultiFieldsMappings, fieldFullPath + "."); } - Map> fields = (Map>) fieldMappingsMap.get("fields"); + Map> fields = (Map>) fieldMappingsMap.get("fields"); if (fields != null) { fields.forEach((subFieldName, multiFieldMappings) -> { String subFieldFullPath = fieldFullPath + "." + subFieldName; - String subFieldType = Objects.requireNonNull(multiFieldMappings.get("type")); - flatMultiFieldsMappings.put(subFieldFullPath, subFieldType); + flatMultiFieldsMappings.put(subFieldFullPath, multiFieldMappings); }); } }); @@ -393,34 +396,44 @@ private void processRawMappingsSubtree( private void verifyEcsMappings(String indexName) throws IOException { final Map rawMappings = getMappings(indexName); - final Map flatFieldMappings = new HashMap<>(); - final Map flatMultiFieldsMappings = new HashMap<>(); + final Map> flatFieldMappings = new HashMap<>(); + final Map> flatMultiFieldsMappings = new HashMap<>(); processRawMappingsSubtree(rawMappings, flatFieldMappings, flatMultiFieldsMappings, ""); Map> shallowFieldMapCopy = new HashMap<>(ecsFlatFieldDefinitions); logger.info("Testing mapping of {} ECS fields", shallowFieldMapCopy.size()); List nonEcsFields = new ArrayList<>(); Map fieldToWrongMappingType = new HashMap<>(); - flatFieldMappings.forEach((fieldName, actualMappingType) -> { + List wronglyIndexedFields = new ArrayList<>(); + List wronglyDocValuedFields = new ArrayList<>(); + flatFieldMappings.forEach((fieldName, actualMappings) -> { Map expectedMappings = shallowFieldMapCopy.remove(fieldName); if (expectedMappings == null) { nonEcsFields.add(fieldName); } else { String expectedType = (String) expectedMappings.get("type"); + String actualMappingType = (String) actualMappings.get("type"); if (actualMappingType.equals(expectedType) == false) { fieldToWrongMappingType.put(fieldName, actualMappingType); } + if (expectedMappings.get("index") != actualMappings.get("index")) { + wronglyIndexedFields.add(fieldName); + } + if (expectedMappings.get("doc_values") != actualMappings.get("doc_values")) { + wronglyDocValuedFields.add(fieldName); + } } }); Map shallowMultiFieldMapCopy = new HashMap<>(ecsFlatMultiFieldDefinitions); logger.info("Testing mapping of {} ECS multi-fields", shallowMultiFieldMapCopy.size()); - flatMultiFieldsMappings.forEach((fieldName, actualMappingType) -> { + flatMultiFieldsMappings.forEach((fieldName, actualMappings) -> { String expectedType = shallowMultiFieldMapCopy.remove(fieldName); if (expectedType != null) { // not finding an entry in the expected multi-field mappings map is acceptable: our dynamic templates are required to // ensure multi-field mapping for all fields with such ECS definitions. However, the patterns in these templates may lead // to multi-field mapping for ECS fields for which such are not defined + String actualMappingType = (String) actualMappings.get("type"); if (actualMappingType.equals(expectedType) == false) { fieldToWrongMappingType.put(fieldName, actualMappingType); } @@ -457,6 +470,8 @@ private void verifyEcsMappings(String indexName) throws IOException { ); }); nonEcsFields.forEach(field -> logger.error("The test document contains '{}', which is not an ECS field", field)); + wronglyIndexedFields.forEach(fieldName -> logger.error("ECS field '{}' should be mapped with \"index: false\"", fieldName)); + wronglyDocValuedFields.forEach(fieldName -> logger.error("ECS field '{}' should be mapped with \"doc_values: false\"", fieldName)); assertTrue("ECS is not fully covered by the current ECS dynamic templates, see details above", shallowFieldMapCopy.isEmpty()); assertTrue( @@ -468,5 +483,14 @@ private void verifyEcsMappings(String indexName) throws IOException { fieldToWrongMappingType.isEmpty() ); assertTrue("The test document contains non-ECS fields, see details above", nonEcsFields.isEmpty()); + assertTrue( + "At least one field was not mapped with \"index: false\" as it should according to its ECS definitions, see details above", + wronglyIndexedFields.isEmpty() + ); + assertTrue( + "At least one field was not mapped with \"doc_values: false\" as it should according to its ECS definitions, see " + + "details above", + wronglyDocValuedFields.isEmpty() + ); } } diff --git a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java index b21e8c0c15811..3930cfe6cd941 100644 --- a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java +++ b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java @@ -47,7 +47,7 @@ public class StackTemplateRegistry extends IndexTemplateRegistry { // The stack template registry version. This number must be incremented when we make changes // to built-in templates. - public static final int REGISTRY_VERSION = 8; + public static final int REGISTRY_VERSION = 9; public static final String TEMPLATE_VERSION_VARIABLE = "xpack.stack.template.version"; public static final Setting STACK_TEMPLATES_ENABLED = Setting.boolSetting( From 755226df23066b0bcd70a06a6ae6e6f340daddd6 Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Wed, 3 Apr 2024 18:09:17 +0300 Subject: [PATCH 093/264] [TEST] Add full cluster restart test for downsampling (#107053) This is a copy of the test added for rolling upgrade in #107036 --- .../FullClusterRestartDownsampleIT.java | 284 ++++++++++++++++++ 1 file changed, 284 insertions(+) create mode 100644 qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartDownsampleIT.java diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartDownsampleIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartDownsampleIT.java new file mode 100644 index 0000000000000..b171c6e6f0358 --- /dev/null +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartDownsampleIT.java @@ -0,0 +1,284 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.upgrades; + +import com.carrotsearch.randomizedtesting.annotations.Name; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.common.Strings; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.FeatureFlag; +import org.elasticsearch.test.cluster.local.LocalClusterConfigProvider; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.rest.RestTestLegacyFeatures; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TemporaryFolder; +import org.junit.rules.TestRule; + +import java.io.IOException; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static org.hamcrest.Matchers.equalTo; + +public class FullClusterRestartDownsampleIT extends ParameterizedFullClusterRestartTestCase { + + private static final String FIXED_INTERVAL = "1h"; + private String index; + private String policy; + private String dataStream; + + private static TemporaryFolder repoDirectory = new TemporaryFolder(); + + protected static LocalClusterConfigProvider clusterConfig = c -> {}; + + private static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .version(getOldClusterTestVersion()) + .nodes(2) + .setting("xpack.security.enabled", "false") + .setting("indices.lifecycle.poll_interval", "5s") + .apply(() -> clusterConfig) + .feature(FeatureFlag.TIME_SERIES_MODE) + .feature(FeatureFlag.FAILURE_STORE_ENABLED) + .build(); + + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(repoDirectory).around(cluster); + + public FullClusterRestartDownsampleIT(@Name("cluster") FullClusterRestartUpgradeStatus upgradeStatus) { + super(upgradeStatus); + } + + @Override + protected ElasticsearchCluster getUpgradeCluster() { + return cluster; + } + + private static final String POLICY = """ + { + "policy": { + "phases": { + "hot": { + "actions": { + "rollover" : { + "max_age": "30s" + }, + "downsample": { + "fixed_interval": "$interval" + } + } + } + } + } + } + """; + + private static final String TEMPLATE = """ + { + "index_patterns": ["%s*"], + "template": { + "settings":{ + "index": { + "number_of_replicas": 0, + "number_of_shards": 1, + "time_series": { + "start_time": "2010-01-01T00:00:00.000Z", + "end_time": "2022-01-01T00:00:00.000Z" + }, + "routing_path": ["metricset"], + "mode": "time_series", + "look_ahead_time": "1m", + "lifecycle.name": "%s" + } + }, + "mappings":{ + "properties": { + "@timestamp" : { + "type": "date" + }, + "metricset": { + "type": "keyword", + "time_series_dimension": true + }, + "volume": { + "type": "double", + "time_series_metric": "gauge" + } + } + } + }, + "data_stream": { } + }"""; + + private static final String TEMPLATE_NO_TIME_BOUNDARIES = """ + { + "index_patterns": ["%s*"], + "template": { + "settings":{ + "index": { + "number_of_replicas": 0, + "number_of_shards": 1, + "routing_path": ["metricset"], + "mode": "time_series", + "lifecycle.name": "%s" + } + }, + "mappings":{ + "properties": { + "@timestamp" : { + "type": "date" + }, + "metricset": { + "type": "keyword", + "time_series_dimension": true + }, + "volume": { + "type": "double", + "time_series_metric": "gauge" + } + } + } + }, + "data_stream": { } + }"""; + + private static final String BULK = """ + {"create": {}} + {"@timestamp": "2020-01-01T05:10:00Z", "metricset": "pod", "volume" : 10} + {"create": {}} + {"@timestamp": "2020-01-01T05:20:00Z", "metricset": "pod", "volume" : 20} + {"create": {}} + {"@timestamp": "2020-01-01T05:30:00Z", "metricset": "pod", "volume" : 30} + {"create": {}} + {"@timestamp": "2020-01-01T05:40:00Z", "metricset": "pod", "volume" : 40} + {"create": {}} + {"@timestamp": "2020-01-01T06:10:00Z", "metricset": "pod", "volume" : 50} + {"create": {}} + {"@timestamp": "2020-01-01T07:10:00Z", "metricset": "pod", "volume" : 60} + {"create": {}} + {"@timestamp": "2020-01-01T09:10:00Z", "metricset": "pod", "volume" : 70} + {"create": {}} + {"@timestamp": "2020-01-01T09:20:00Z", "metricset": "pod", "volume" : 80} + """; + + @Before + public void refreshAbstractions() { + policy = "policy-" + randomAlphaOfLength(5); + dataStream = "ds-" + randomAlphaOfLength(10).toLowerCase(Locale.ROOT); + index = ".ds-" + dataStream; + logger.info("--> running [{}] with index [{}], data stream [{}], and policy [{}]", getTestName(), index, dataStream, policy); + } + + private void createIndex() throws IOException { + var putIndexTemplateRequest = new Request("POST", "/_index_template/1"); + putIndexTemplateRequest.setJsonEntity(Strings.format(TEMPLATE, dataStream, policy)); + assertOK(client().performRequest(putIndexTemplateRequest)); + } + + private void bulk() throws IOException { + var bulkRequest = new Request("POST", "/" + dataStream + "/_bulk"); + bulkRequest.setJsonEntity(BULK); + bulkRequest.addParameter("refresh", "true"); + var response = client().performRequest(bulkRequest); + assertOK(response); + var responseBody = entityAsMap(response); + assertThat("errors in response:\n " + responseBody, responseBody.get("errors"), equalTo(false)); + } + + private void createIlmPolicy() throws IOException { + Request request = new Request("PUT", "_ilm/policy/" + policy); + request.setJsonEntity(POLICY.replace("$interval", FIXED_INTERVAL)); + client().performRequest(request); + } + + private void startDownsampling() throws Exception { + // Update template to not contain time boundaries anymore (rollover is blocked otherwise due to index time + // boundaries overlapping after rollover) + Request updateIndexTemplateRequest = new Request("POST", "/_index_template/1"); + updateIndexTemplateRequest.setJsonEntity(Strings.format(TEMPLATE_NO_TIME_BOUNDARIES, dataStream, policy)); + assertOK(client().performRequest(updateIndexTemplateRequest)); + + // Manual rollover the original index such that it's not the write index in the data stream anymore + Request rolloverRequest = new Request("POST", "/" + dataStream + "/_rollover"); + rolloverRequest.setJsonEntity(""" + { + "conditions": { + "max_docs": "1" + } + }"""); + client().performRequest(rolloverRequest); + logger.info("rollover complete"); + } + + private void runQuery() throws Exception { + String rollup = waitAndGetRollupIndexName(); + assertFalse(rollup.isEmpty()); + + // Retry until the downsample index is populated. + assertBusy(() -> { + Request request = new Request("POST", "/" + dataStream + "/_search"); + var map = entityAsMap(client().performRequest(request)); + var hits = (List) ((Map) map.get("hits")).get("hits"); + assertEquals(4, hits.size()); + for (var hit : hits) { + assertEquals(rollup, ((Map) hit).get("_index")); + } + }, 30, TimeUnit.SECONDS); + } + + private String waitAndGetRollupIndexName() throws InterruptedException, IOException { + final String[] rollupIndexName = new String[1]; + waitUntil(() -> { + try { + rollupIndexName[0] = getRollupIndexName(); + return rollupIndexName[0] != null; + } catch (IOException e) { + return false; + } + }, 120, TimeUnit.SECONDS); + if (rollupIndexName[0] == null) { + logger.warn("--> rollup index name is NULL"); + } else { + logger.info("--> original index name is [{}], rollup index name is [{}]", index, rollupIndexName[0]); + } + return rollupIndexName[0]; + } + + private String getRollupIndexName() throws IOException { + String endpoint = "/downsample-" + FIXED_INTERVAL + "-" + index + "-*/?expand_wildcards=all"; + Response response = client().performRequest(new Request("GET", endpoint)); + Map asMap = responseAsMap(response); + if (asMap.size() == 1) { + return (String) asMap.keySet().toArray()[0]; + } + logger.warn("--> No matching rollup name for path [%s]", endpoint); + return null; + } + + public void testRollupIndex() throws Exception { + assumeTrue( + "Downsample got many stability improvements in 8.10.0", + oldClusterHasFeature(RestTestLegacyFeatures.TSDB_DOWNSAMPLING_STABLE) + ); + if (isRunningAgainstOldCluster()) { + createIlmPolicy(); + createIndex(); + bulk(); + startDownsampling(); + } else { + runQuery(); + } + } +} From 36bcb6b3989a0bc54c8a0d4a68c633c977dd092c Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Wed, 3 Apr 2024 18:33:14 +0300 Subject: [PATCH 094/264] Query API Keys support for both `aggs` and `aggregations` keywords (#107054) The Query API Key Information endpoint supports aggs since #104895. But some lang clients actually use the `aggregations` keyword in requests, as the preferred synonym to `aggs`. This PR adds support for the `aggregations` request keyword as a synonym for the existing `aggs` term. Closes #106839 --- docs/changelog/107054.yaml | 6 ++ .../rest-api/security/query-api-key.asciidoc | 2 +- .../xpack/security/ApiKeyAggsIT.java | 8 +-- .../action/apikey/RestQueryApiKeyAction.java | 28 +++++--- .../apikey/RestQueryApiKeyActionTests.java | 66 +++++++++++++++++++ 5 files changed, 96 insertions(+), 14 deletions(-) create mode 100644 docs/changelog/107054.yaml diff --git a/docs/changelog/107054.yaml b/docs/changelog/107054.yaml new file mode 100644 index 0000000000000..6511cb5185492 --- /dev/null +++ b/docs/changelog/107054.yaml @@ -0,0 +1,6 @@ +pr: 107054 +summary: Query API Keys support for both `aggs` and `aggregations` keywords +area: Security +type: enhancement +issues: + - 106839 diff --git a/docs/reference/rest-api/security/query-api-key.asciidoc b/docs/reference/rest-api/security/query-api-key.asciidoc index 1888a110e072f..ad4184ec34a29 100644 --- a/docs/reference/rest-api/security/query-api-key.asciidoc +++ b/docs/reference/rest-api/security/query-api-key.asciidoc @@ -232,7 +232,7 @@ simply mentioning `metadata` (not followed by any dot and sub-field name). NOTE: You cannot query the role descriptors of an API key. ==== -`aggs`:: +`aggs` or `aggregations`:: (Optional, object) Any <> to run over the corpus of returned API keys. Aggregations and queries work together. Aggregations are computed only on the API keys that match the query. This supports only a subset of aggregation types, namely: <>, diff --git a/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/ApiKeyAggsIT.java b/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/ApiKeyAggsIT.java index 427d918fd64d5..f9d5c42affcf0 100644 --- a/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/ApiKeyAggsIT.java +++ b/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/ApiKeyAggsIT.java @@ -98,7 +98,7 @@ public void testFiltersAggs() throws IOException { // other bucket assertAggs(API_KEY_USER_AUTH_HEADER, typedAggs, """ { - "aggs": { + "aggregations": { "only_user_keys": { "filters": { "other_bucket_key": "other_user_keys", @@ -267,7 +267,7 @@ public void testFiltersAggs() throws IOException { "good-api-key-invalidated": { "term": {"invalidated": false}} } }, - "aggs": { + "aggregations": { "wrong-field": { "filters": { "filters": { @@ -487,7 +487,7 @@ public void testFilterAggs() throws IOException { { "usernames": { "terms": { "field": "username" } } } ] }, - "aggs": { + "aggregations": { "not_expired": { "filter": { "range": { @@ -564,7 +564,7 @@ public void testDisallowedAggTypes() { ); request.setJsonEntity(""" { - "aggs": { + "aggregations": { "all_.security_docs": { "global": {}, "aggs": { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyAction.java index 77c2a080dbb57..59992e42d88d5 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyAction.java @@ -36,6 +36,8 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.search.aggregations.AggregatorFactories.parseAggregators; +import static org.elasticsearch.search.builder.SearchSourceBuilder.AGGREGATIONS_FIELD; +import static org.elasticsearch.search.builder.SearchSourceBuilder.AGGS_FIELD; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; /** @@ -47,19 +49,27 @@ public final class RestQueryApiKeyAction extends ApiKeyBaseRestHandler { @SuppressWarnings("unchecked") private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "query_api_key_request_payload", - a -> new Payload( - (QueryBuilder) a[0], - (AggregatorFactories.Builder) a[1], - (Integer) a[2], - (Integer) a[3], - (List) a[4], - (SearchAfterBuilder) a[5] - ) + a -> { + if (a[1] != null && a[2] != null) { + throw new IllegalArgumentException("Duplicate 'aggs' or 'aggregations' field"); + } else { + return new Payload( + (QueryBuilder) a[0], + (AggregatorFactories.Builder) (a[1] != null ? a[1] : a[2]), + (Integer) a[3], + (Integer) a[4], + (List) a[5], + (SearchAfterBuilder) a[6] + ); + } + } ); static { PARSER.declareObject(optionalConstructorArg(), (p, c) -> parseTopLevelQuery(p), new ParseField("query")); - PARSER.declareObject(optionalConstructorArg(), (p, c) -> parseAggregators(p), new ParseField("aggs")); + // only one of aggs or aggregations is allowed + PARSER.declareObject(optionalConstructorArg(), (p, c) -> parseAggregators(p), AGGREGATIONS_FIELD); + PARSER.declareObject(optionalConstructorArg(), (p, c) -> parseAggregators(p), AGGS_FIELD); PARSER.declareInt(optionalConstructorArg(), new ParseField("from")); PARSER.declareInt(optionalConstructorArg(), new ParseField("size")); PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyActionTests.java index 74d1203fd52ec..2240b72c1a963 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyActionTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; @@ -35,6 +36,7 @@ import org.elasticsearch.test.rest.FakeRestRequest; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xcontent.XContentParseException; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.security.action.apikey.ApiKey; @@ -48,6 +50,7 @@ import java.util.Map; import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; @@ -145,6 +148,69 @@ public void doE assertNotNull(responseSetOnce.get()); } + public void testAggsAndAggregationsTogether() { + String agg1; + String agg2; + if (randomBoolean()) { + agg1 = "aggs"; + agg2 = "aggregations"; + } else { + agg1 = "aggregations"; + agg2 = "aggs"; + } + final String requestBody = Strings.format(""" + { + "%s": { + "all_keys_by_type": { + "composite": { + "sources": [ + { "type": { "terms": { "field": "type" } } } + ] + } + } + }, + "%s": { + "type_cardinality": { + "cardinality": { + "field": "type" + } + } + } + }""", agg1, agg2); + + final FakeRestRequest restRequest = new FakeRestRequest.Builder(xContentRegistry()).withContent( + new BytesArray(requestBody), + XContentType.JSON + ).build(); + final SetOnce responseSetOnce = new SetOnce<>(); + final RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { + @Override + public void sendResponse(RestResponse restResponse) { + responseSetOnce.set(restResponse); + } + }; + final var client = new NodeClient(Settings.EMPTY, threadPool) { + @SuppressWarnings("unchecked") + @Override + public void doExecute( + ActionType action, + Request request, + ActionListener listener + ) { + fail("TEST failed, request parsing should've failed"); + listener.onResponse((Response) QueryApiKeyResponse.EMPTY); + } + }; + RestQueryApiKeyAction restQueryApiKeyAction = new RestQueryApiKeyAction(Settings.EMPTY, mockLicenseState); + XContentParseException ex = expectThrows( + XContentParseException.class, + () -> restQueryApiKeyAction.handleRequest(restRequest, restChannel, client) + ); + assertThat(ex.getCause().getMessage(), containsString("Duplicate 'aggs' or 'aggregations' field")); + assertThat(ex.getMessage(), containsString("Failed to build [query_api_key_request_payload]")); + assertNull(responseSetOnce.get()); + } + public void testParsingSearchParameters() throws Exception { final String requestBody = """ { From 245d69d9e6cc1340fdd4544ab909d06d4d11ddc6 Mon Sep 17 00:00:00 2001 From: Ash <1849116+ashokaditya@users.noreply.github.com> Date: Wed, 3 Apr 2024 17:53:38 +0200 Subject: [PATCH 095/264] [Security Solution] Add `read` permission for third party agent indices for `kibana_system` (#107046) * add read permission for third party agent indices Allow `read` privilege for `kibana_system` role on `logs-sentinel_one*` and `logs-crowdstrike*` index patterns closes elastic/security-team/issues/9046 * Update docs/changelog/107046.yaml * review changes --- docs/changelog/107046.yaml | 6 ++++ .../KibanaOwnedReservedRoleDescriptors.java | 6 ++++ .../authz/store/ReservedRolesStoreTests.java | 31 +++++++++++++++++++ 3 files changed, 43 insertions(+) create mode 100644 docs/changelog/107046.yaml diff --git a/docs/changelog/107046.yaml b/docs/changelog/107046.yaml new file mode 100644 index 0000000000000..6c1373e09d17c --- /dev/null +++ b/docs/changelog/107046.yaml @@ -0,0 +1,6 @@ +pr: 107046 +summary: "[Security Solution] Add `read` permission for third party agent indices\ + \ for `kibana_system`" +area: Authorization +type: enhancement +issues: [] diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java index 3c76734b794d8..cdb7f44d41e4a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java @@ -272,6 +272,12 @@ static RoleDescriptor kibanaSystem(String name) { .indices(".logs-osquery_manager.actions-*") .privileges("auto_configure", "create_index", "read", "index", "write", "delete") .build(), + + // Third party agent (that use non-Elastic Defend integrations) info logs indices. + // Kibana reads from these to display agent status/info to the user. + // These are indices that filebeat writes to, and the data in these indices are ingested by Fleet integrations + // in order to provide support for response actions related to malicious events for such agents. + RoleDescriptor.IndicesPrivileges.builder().indices("logs-sentinel_one.*", "logs-crowdstrike.*").privileges("read").build(), // For ILM policy for APM, Endpoint, & Synthetics packages that have delete action RoleDescriptor.IndicesPrivileges.builder() .indices( diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java index b0d25949947e3..39a94e4a2f0bf 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java @@ -993,6 +993,37 @@ public void testKibanaSystemRole() { assertThat(kibanaRole.indices().allowedIndicesMatcher(RolloverAction.NAME).test(indexAbstraction), is(true)); }); + // Tests for third-party agent indices that `kibana_system` has only `read` access + Arrays.asList( + "logs-sentinel_one." + randomAlphaOfLength(randomIntBetween(0, 13)), + "logs-crowdstrike." + randomAlphaOfLength(randomIntBetween(0, 13)) + ).forEach((index) -> { + final IndexAbstraction indexAbstraction = mockIndexAbstraction(index); + assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:foo").test(indexAbstraction), is(false)); + assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:bar").test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(indexAbstraction), + is(false) + ); + assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(indexAbstraction), is(true)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), + is(false) + ); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(false)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(indexAbstraction), is(false)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportMultiSearchAction.TYPE.name()).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportGetAction.TYPE.name()).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(READ_CROSS_CLUSTER_NAME).test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportUpdateSettingsAction.TYPE.name()).test(indexAbstraction), + is(true) + ); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportPutMappingAction.TYPE.name()).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(RolloverAction.NAME).test(indexAbstraction), is(true)); + }); + // Index for Endpoint specific actions Arrays.asList(".logs-endpoint.actions-" + randomAlphaOfLength(randomIntBetween(0, 13))).forEach((index) -> { final IndexAbstraction indexAbstraction = mockIndexAbstraction(index); From 2f33b012ba508c6c46137f69bcc102f1b08adced Mon Sep 17 00:00:00 2001 From: James Baiera Date: Wed, 3 Apr 2024 12:08:48 -0400 Subject: [PATCH 096/264] Split the mappings for failure stores out of the index template service. (#107025) This will help us keep failure store specific configurations in one place, and hopefully make it easier to evolve the schema in the future. --- .../DataStreamFailureStoreDefinition.java | 134 ++++++++++++++++++ .../MetadataIndexTemplateService.java | 111 +-------------- 2 files changed, 138 insertions(+), 107 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamFailureStoreDefinition.java diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamFailureStoreDefinition.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamFailureStoreDefinition.java new file mode 100644 index 0000000000000..f1fc107df5f62 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamFailureStoreDefinition.java @@ -0,0 +1,134 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.cluster.metadata; + +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.RoutingFieldMapper; + +import java.io.IOException; + +/** + * A utility class that contains the mappings and settings logic for failure store indices that are a part of data streams. + */ +public class DataStreamFailureStoreDefinition { + + public static final CompressedXContent DATA_STREAM_FAILURE_STORE_MAPPING; + + static { + try { + /* + * The data stream failure store mapping. The JSON content is as follows: + * { + * "_doc": { + * "dynamic": false, + * "_routing": { + * "required": false + * }, + * "properties": { + * "@timestamp": { + * "type": "date", + * "ignore_malformed": false + * }, + * "document": { + * "properties": { + * "id": { + * "type": "keyword" + * }, + * "routing": { + * "type": "keyword" + * }, + * "index": { + * "type": "keyword" + * } + * } + * }, + * "error": { + * "properties": { + * "message": { + * "type": "wildcard" + * }, + * "stack_trace": { + * "type": "text" + * }, + * "type": { + * "type": "keyword" + * }, + * "pipeline": { + * "type": "keyword" + * }, + * "pipeline_trace": { + * "type": "keyword" + * }, + * "processor": { + * "type": "keyword" + * } + * } + * } + * } + * } + * } + */ + DATA_STREAM_FAILURE_STORE_MAPPING = new CompressedXContent( + (builder, params) -> builder.startObject(MapperService.SINGLE_MAPPING_NAME) + .field("dynamic", false) + .startObject(RoutingFieldMapper.NAME) + .field("required", false) + .endObject() + .startObject("properties") + .startObject(MetadataIndexTemplateService.DEFAULT_TIMESTAMP_FIELD) + .field("type", DateFieldMapper.CONTENT_TYPE) + .field("ignore_malformed", false) + .endObject() + .startObject("document") + .startObject("properties") + // document.source is unmapped so that it can be persisted in source only without worrying that the document might cause + // a mapping error + .startObject("id") + .field("type", "keyword") + .endObject() + .startObject("routing") + .field("type", "keyword") + .endObject() + .startObject("index") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + .startObject("error") + .startObject("properties") + .startObject("message") + .field("type", "wildcard") + .endObject() + .startObject("stack_trace") + .field("type", "text") + .endObject() + .startObject("type") + .field("type", "keyword") + .endObject() + .startObject("pipeline") + .field("type", "keyword") + .endObject() + .startObject("pipeline_trace") + .field("type", "keyword") + .endObject() + .startObject("processor") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + ); + } catch (IOException e) { + throw new AssertionError(e); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java index 1e2e15a6300c7..0daa12b7ed71f 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java @@ -92,8 +92,6 @@ public class MetadataIndexTemplateService { private static final CompressedXContent DEFAULT_TIMESTAMP_MAPPING_WITH_ROUTING; - private static final CompressedXContent DATA_STREAM_FAILURE_STORE_MAPPING; - static { final Map> defaultTimestampField = Map.of( DEFAULT_TIMESTAMP_FIELD, @@ -122,110 +120,6 @@ public class MetadataIndexTemplateService { .map(defaultTimestampField) .endObject() ); - /* - * The data stream failure store mapping. The JSON content is as follows: - * { - * "_doc": { - * "dynamic": false, - * "_routing": { - * "required": false - * }, - * "properties": { - * "@timestamp": { - * "type": "date", - * "ignore_malformed": false - * }, - * "document": { - * "properties": { - * "id": { - * "type": "keyword" - * }, - * "routing": { - * "type": "keyword" - * }, - * "index": { - * "type": "keyword" - * } - * } - * }, - * "error": { - * "properties": { - * "message": { - * "type": "wildcard" - * }, - * "stack_trace": { - * "type": "text" - * }, - * "type": { - * "type": "keyword" - * }, - * "pipeline": { - * "type": "keyword" - * }, - * "pipeline_trace": { - * "type": "keyword" - * }, - * "processor": { - * "type": "keyword" - * } - * } - * } - * } - * } - * } - */ - DATA_STREAM_FAILURE_STORE_MAPPING = new CompressedXContent( - (builder, params) -> builder.startObject(MapperService.SINGLE_MAPPING_NAME) - .field("dynamic", false) - .startObject(RoutingFieldMapper.NAME) - .field("required", false) - .endObject() - .startObject("properties") - .startObject(DEFAULT_TIMESTAMP_FIELD) - .field("type", DateFieldMapper.CONTENT_TYPE) - .field("ignore_malformed", false) - .endObject() - .startObject("document") - .startObject("properties") - // document.source is unmapped so that it can be persisted in source only without worrying that the document might cause - // a mapping error - .startObject("id") - .field("type", "keyword") - .endObject() - .startObject("routing") - .field("type", "keyword") - .endObject() - .startObject("index") - .field("type", "keyword") - .endObject() - .endObject() - .endObject() - .startObject("error") - .startObject("properties") - .startObject("message") - .field("type", "wildcard") - .endObject() - .startObject("stack_trace") - .field("type", "text") - .endObject() - .startObject("type") - .field("type", "keyword") - .endObject() - .startObject("pipeline") - .field("type", "keyword") - .endObject() - .startObject("pipeline_trace") - .field("type", "keyword") - .endObject() - .startObject("processor") - .field("type", "keyword") - .endObject() - .endObject() - .endObject() - .endObject() - .endObject() - ); - } catch (IOException e) { throw new AssertionError(e); } @@ -1446,7 +1340,10 @@ public static List collectMappings( Objects.requireNonNull(template, "Composable index template must be provided"); // Check if this is a failure store index, and if it is, discard any template mappings. Failure store mappings are predefined. if (template.getDataStreamTemplate() != null && indexName.startsWith(DataStream.FAILURE_STORE_PREFIX)) { - return List.of(DATA_STREAM_FAILURE_STORE_MAPPING, ComposableIndexTemplate.DataStreamTemplate.DATA_STREAM_MAPPING_SNIPPET); + return List.of( + DataStreamFailureStoreDefinition.DATA_STREAM_FAILURE_STORE_MAPPING, + ComposableIndexTemplate.DataStreamTemplate.DATA_STREAM_MAPPING_SNIPPET + ); } List mappings = template.composedOf() .stream() From d88836bb04f9c3e221b7d8b4246a45220a4b61a0 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Wed, 3 Apr 2024 17:19:23 +0100 Subject: [PATCH 097/264] [ML] Add missing TokenizationConfigUpdate named writable to registry (#107056) --- .../MlInferenceNamedXContentProvider.java | 4 ++++ .../action/InferModelActionRequestTests.java | 22 +++++++++++++------ ...erTrainedModelDeploymentRequestsTests.java | 11 ++-------- .../TokenizationConfigUpdateTests.java | 11 +++++++--- 4 files changed, 29 insertions(+), 19 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/MlInferenceNamedXContentProvider.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/MlInferenceNamedXContentProvider.java index 9bcc443f6d7b0..a3fb956c3252d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/MlInferenceNamedXContentProvider.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/MlInferenceNamedXContentProvider.java @@ -74,6 +74,7 @@ import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextSimilarityConfig; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextSimilarityConfigUpdate; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.Tokenization; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TokenizationConfigUpdate; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TokenizationUpdate; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TrainedModel; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TrainedModelLocation; @@ -759,6 +760,9 @@ public List getNamedWriteables() { namedWriteables.add( new NamedWriteableRegistry.Entry(InferenceConfigUpdate.class, TextSimilarityConfigUpdate.NAME, TextSimilarityConfigUpdate::new) ); + namedWriteables.add( + new NamedWriteableRegistry.Entry(InferenceConfigUpdate.class, TokenizationConfigUpdate.NAME, TokenizationConfigUpdate::new) + ); // Location namedWriteables.add( diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferModelActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferModelActionRequestTests.java index 2934d1dc9c42f..983e5d43a946d 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferModelActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferModelActionRequestTests.java @@ -35,6 +35,9 @@ import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextEmbeddingConfigUpdateTests; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextExpansionConfigUpdate; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextExpansionConfigUpdateTests; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextSimilarityConfigUpdate; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextSimilarityConfigUpdateTests; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TokenizationConfigUpdateTests; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ZeroShotClassificationConfigUpdate; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ZeroShotClassificationConfigUpdateTests; @@ -132,17 +135,20 @@ protected Request mutateInstance(Request instance) { public static InferenceConfigUpdate randomInferenceConfigUpdate() { return randomFrom( - RegressionConfigUpdateTests.randomRegressionConfigUpdate(), ClassificationConfigUpdateTests.randomClassificationConfigUpdate(), - ResultsFieldUpdateTests.randomUpdate(), - TextClassificationConfigUpdateTests.randomUpdate(), - TextEmbeddingConfigUpdateTests.randomUpdate(), - NerConfigUpdateTests.randomUpdate(), + EmptyConfigUpdateTests.testInstance(), FillMaskConfigUpdateTests.randomUpdate(), - ZeroShotClassificationConfigUpdateTests.randomUpdate(), + NerConfigUpdateTests.randomUpdate(), PassThroughConfigUpdateTests.randomUpdate(), QuestionAnsweringConfigUpdateTests.randomUpdate(), - EmptyConfigUpdateTests.testInstance() + RegressionConfigUpdateTests.randomRegressionConfigUpdate(), + ResultsFieldUpdateTests.randomUpdate(), + TextClassificationConfigUpdateTests.randomUpdate(), + TextEmbeddingConfigUpdateTests.randomUpdate(), + TextExpansionConfigUpdateTests.randomUpdate(), + TextSimilarityConfigUpdateTests.randomUpdate(), + TokenizationConfigUpdateTests.randomUpdate(), + ZeroShotClassificationConfigUpdateTests.randomUpdate() ); } @@ -165,6 +171,8 @@ public static InferenceConfigUpdate mutateInferenceConfigUpdate(InferenceConfigU adjustedUpdate = QuestionAnsweringConfigUpdateTests.mutateForVersion(update, version); } else if (nlpConfigUpdate instanceof TextExpansionConfigUpdate update) { adjustedUpdate = TextExpansionConfigUpdateTests.mutateForVersion(update, version); + } else if (nlpConfigUpdate instanceof TextSimilarityConfigUpdate update) { + adjustedUpdate = TextSimilarityConfigUpdateTests.mutateForVersion(update, version); } else { throw new IllegalArgumentException("Unknown update [" + currentUpdate.getName() + "]"); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferTrainedModelDeploymentRequestsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferTrainedModelDeploymentRequestsTests.java index e130951da662f..81cad93182ba7 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferTrainedModelDeploymentRequestsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferTrainedModelDeploymentRequestsTests.java @@ -14,9 +14,6 @@ import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xpack.core.ml.inference.MlInferenceNamedXContentProvider; import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; -import org.elasticsearch.xpack.core.ml.inference.trainedmodel.EmptyConfigUpdateTests; -import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfigUpdate; -import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ZeroShotClassificationConfigUpdateTests; import java.util.ArrayList; import java.util.Arrays; @@ -25,10 +22,6 @@ public class InferTrainedModelDeploymentRequestsTests extends AbstractWireSerializingTestCase { - private static InferenceConfigUpdate randomInferenceConfigUpdate() { - return randomFrom(ZeroShotClassificationConfigUpdateTests.createRandom(), EmptyConfigUpdateTests.testInstance()); - } - @Override protected Writeable.Reader instanceReader() { return InferTrainedModelDeploymentAction.Request::new; @@ -42,7 +35,7 @@ protected InferTrainedModelDeploymentAction.Request createTestInstance() { if (createQueryStringRequest) { request = InferTrainedModelDeploymentAction.Request.forTextInput( randomAlphaOfLength(4), - randomBoolean() ? null : randomInferenceConfigUpdate(), + randomBoolean() ? null : InferModelActionRequestTests.randomInferenceConfigUpdate(), Arrays.asList(generateRandomStringArray(4, 7, false)), randomBoolean() ? null : TimeValue.parseTimeValue(randomTimeValue(), "timeout") ); @@ -54,7 +47,7 @@ protected InferTrainedModelDeploymentAction.Request createTestInstance() { request = InferTrainedModelDeploymentAction.Request.forDocs( randomAlphaOfLength(4), - randomBoolean() ? null : randomInferenceConfigUpdate(), + randomBoolean() ? null : InferModelActionRequestTests.randomInferenceConfigUpdate(), docs, randomBoolean() ? null : TimeValue.parseTimeValue(randomTimeValue(), "timeout") ); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TokenizationConfigUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TokenizationConfigUpdateTests.java index 90b5c60a01b62..4e2dce16aac6e 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TokenizationConfigUpdateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TokenizationConfigUpdateTests.java @@ -13,6 +13,13 @@ import java.io.IOException; public class TokenizationConfigUpdateTests extends AbstractWireSerializingTestCase { + + public static TokenizationConfigUpdate randomUpdate() { + Integer maxSequenceLength = randomBoolean() ? null : randomIntBetween(32, 64); + int span = randomIntBetween(8, 16); + return new TokenizationConfigUpdate(maxSequenceLength, span); + } + @Override protected Writeable.Reader instanceReader() { return TokenizationConfigUpdate::new; @@ -20,9 +27,7 @@ protected Writeable.Reader instanceReader() { @Override protected TokenizationConfigUpdate createTestInstance() { - Integer maxSequenceLength = randomBoolean() ? null : randomIntBetween(32, 64); - int span = randomIntBetween(8, 16); - return new TokenizationConfigUpdate(maxSequenceLength, span); + return randomUpdate(); } @Override From 42df8fef50eb3c5895fe09aad9b8be7e14332076 Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Wed, 3 Apr 2024 19:02:24 +0200 Subject: [PATCH 098/264] [Inference API] Make completion task available in inference processor (#107060) --- .../results/ChatCompletionResults.java | 33 +++++++++++++++---- .../results/ChatCompletionResultsTests.java | 14 ++++++++ 2 files changed, 40 insertions(+), 7 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChatCompletionResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChatCompletionResults.java index 50ca46d85190f..bbd4d026f0d55 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChatCompletionResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChatCompletionResults.java @@ -7,14 +7,12 @@ package org.elasticsearch.xpack.core.inference.results; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.inference.TaskType; -import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -69,7 +67,7 @@ public String getWriteableName() { @Override public List transformToCoordinationFormat() { - throw new UnsupportedOperationException(); + return results; } @Override @@ -89,7 +87,7 @@ public Map asMap() { return map; } - public record Result(String content) implements Writeable, ToXContentObject { + public record Result(String content) implements InferenceResults, Writeable { public static final String RESULT = "result"; @@ -112,13 +110,34 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } @Override - public String toString() { - return Strings.toString(this); + public String getResultsField() { + return RESULT; } + @Override public Map asMap() { - return Map.of(RESULT, content); + Map map = new LinkedHashMap<>(); + map.put(RESULT, content); + return map; } + + @Override + public Map asMap(String outputField) { + Map map = new LinkedHashMap<>(); + map.put(outputField, content); + return map; + } + + @Override + public Object predictedValue() { + return content; + } + + @Override + public String getWriteableName() { + return NAME; + } + } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChatCompletionResultsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChatCompletionResultsTests.java index 444f6792abe63..6bbe6eea5394f 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChatCompletionResultsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChatCompletionResultsTests.java @@ -76,6 +76,20 @@ public void testToXContent_CreatesTheRightFormatForMultipleCompletionResults() { }""")); } + public void testTransformToCoordinationFormat() { + String resultOneContent = "content 1"; + String resultTwoContent = "content 2"; + + var entity = new ChatCompletionResults( + List.of(new ChatCompletionResults.Result(resultOneContent), new ChatCompletionResults.Result(resultTwoContent)) + ); + + var transformedEntity = entity.transformToCoordinationFormat(); + + assertThat(transformedEntity.get(0).asMap(), is(Map.of(ChatCompletionResults.Result.RESULT, resultOneContent))); + assertThat(transformedEntity.get(1).asMap(), is(Map.of(ChatCompletionResults.Result.RESULT, resultTwoContent))); + } + @Override protected Writeable.Reader instanceReader() { return ChatCompletionResults::new; From 8cab439a8a3396c486fda095aea98d01da2baa31 Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Wed, 3 Apr 2024 11:15:48 -0600 Subject: [PATCH 099/264] Mark enrich stats API as internal instead of public (#107022) This was previously publically available, however it exposes node counts and IDs. After discussion we've decided it's unnecessary on Serverless. Co-authored-by: Elastic Machine --- .../elasticsearch/xpack/enrich/rest/RestEnrichStatsAction.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/rest/RestEnrichStatsAction.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/rest/RestEnrichStatsAction.java index 9e3848e878ad2..e666319b563ea 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/rest/RestEnrichStatsAction.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/rest/RestEnrichStatsAction.java @@ -19,7 +19,7 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; -@ServerlessScope(Scope.PUBLIC) +@ServerlessScope(Scope.INTERNAL) public class RestEnrichStatsAction extends BaseRestHandler { @Override From 89563c238a2d4124b110044d5abd5daee191a741 Mon Sep 17 00:00:00 2001 From: Andrei Dan Date: Wed, 3 Apr 2024 20:27:13 +0300 Subject: [PATCH 100/264] Fix auto sharding recommending 0 shards for 0.0 workload (#107064) --- .../autosharding/DataStreamAutoShardingService.java | 7 +++++-- .../autosharding/DataStreamAutoShardingServiceTests.java | 6 ++++++ 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingService.java b/server/src/main/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingService.java index a045c73cc83a1..a26be73cc169d 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingService.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingService.java @@ -338,8 +338,11 @@ private AutoShardingResult getDecreaseShardsResult( // Visible for testing static long computeOptimalNumberOfShards(int minNumberWriteThreads, int maxNumberWriteThreads, double indexingLoad) { return Math.max( - Math.min(roundUp(indexingLoad / (minNumberWriteThreads / 2.0)), 3), - roundUp(indexingLoad / (maxNumberWriteThreads / 2.0)) + Math.max( + Math.min(roundUp(indexingLoad / (minNumberWriteThreads / 2.0)), 3), + roundUp(indexingLoad / (maxNumberWriteThreads / 2.0)) + ), + 1 // we don't want to go lower than 1 shard ); } diff --git a/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java b/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java index 7f50ebca36fc5..41a5d0b70ea10 100644 --- a/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java +++ b/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java @@ -514,6 +514,12 @@ public void testCalculateDecreaseShardingRecommendations() { public void testComputeOptimalNumberOfShards() { int minWriteThreads = 2; int maxWriteThreads = 32; + + { + // 0.0 indexing load recommends 1 shard + logger.info("-> indexingLoad {}", 0.0); + assertThat(DataStreamAutoShardingService.computeOptimalNumberOfShards(minWriteThreads, maxWriteThreads, 0.0), is(1L)); + } { // the small values will be very common so let's randomise to make sure we never go below 1L double indexingLoad = randomDoubleBetween(0.0001, 1.0, true); From f49ead7446e722c5cf6d6268e3a043dd1f818fce Mon Sep 17 00:00:00 2001 From: Max Hniebergall <137079448+maxhniebergall@users.noreply.github.com> Date: Wed, 3 Apr 2024 14:48:22 -0400 Subject: [PATCH 101/264] [ML] Add Cohere rerank to _inference service (#106378) --- docs/changelog/106378.yaml | 5 + .../org/elasticsearch/TransportVersions.java | 1 + .../inference/InferenceService.java | 23 ++- .../org/elasticsearch/inference/TaskType.java | 1 + .../org/elasticsearch/test/ESTestCase.java | 4 + .../inference/action/InferenceAction.java | 50 ++++- .../inference/results/RankedDocsResults.java | 133 ++++++++++++ .../action/InferenceActionRequestTests.java | 48 ++++- .../inference/InferenceBaseRestTest.java | 16 +- .../xpack/inference/RerankingIT.java | 119 +++++++++++ .../TestDenseInferenceServiceExtension.java | 3 + .../TestSparseInferenceServiceExtension.java | 3 + .../action/TransportInferenceAction.java | 1 + .../TransportPutInferenceModelAction.java | 7 +- .../external/action/ExecutableAction.java | 5 +- .../action/cohere/CohereActionCreator.java | 8 + .../action/cohere/CohereActionVisitor.java | 3 + .../action/cohere/CohereEmbeddingsAction.java | 6 +- .../action/cohere/CohereRerankAction.java | 54 +++++ .../action/huggingface/HuggingFaceAction.java | 6 +- .../openai/OpenAiChatCompletionAction.java | 16 +- .../action/openai/OpenAiEmbeddingsAction.java | 6 +- .../external/http/RequestExecutor.java | 4 +- ...ereEmbeddingsExecutableRequestCreator.java | 1 + .../CohereRerankExecutableRequestCreator.java | 56 +++++ .../http/sender/DocumentsOnlyInput.java | 25 +++ .../http/sender/ExecutableRequestCreator.java | 2 + .../http/sender/HttpRequestSender.java | 30 +-- .../HuggingFaceExecutableRequestCreator.java | 1 + .../external/http/sender/InferenceInputs.java | 10 + .../http/sender/InferenceRequest.java | 5 + .../external/http/sender/NoopTask.java | 5 + ...nAiCompletionExecutableRequestCreator.java | 2 + ...nAiEmbeddingsExecutableRequestCreator.java | 1 + .../http/sender/QueryAndDocsInputs.java | 33 +++ .../http/sender/RequestExecutorService.java | 10 +- .../external/http/sender/RequestTask.java | 19 +- .../external/http/sender/Sender.java | 5 +- .../http/sender/SingleRequestManager.java | 1 + .../cohere/CohereEmbeddingsRequestEntity.java | 2 - .../request/cohere/CohereRerankRequest.java | 96 +++++++++ .../cohere/CohereRerankRequestEntity.java | 60 ++++++ .../external/request/cohere/CohereUtils.java | 1 + .../CohereEmbeddingsResponseEntity.java | 2 +- .../cohere/CohereRankedResponseEntity.java | 151 ++++++++++++++ .../inference/services/SenderService.java | 32 ++- .../inference/services/ServiceUtils.java | 87 ++++++-- .../services/cohere/CohereService.java | 28 ++- .../cohere/rerank/CohereRerankModel.java | 94 +++++++++ .../rerank/CohereRerankServiceSettings.java | 95 +++++++++ .../rerank/CohereRerankTaskSettings.java | 184 +++++++++++++++++ .../CustomElandInternalServiceSettings.java | 3 +- .../ElasticsearchInternalService.java | 15 ++ .../services/elser/ElserInternalService.java | 13 ++ .../huggingface/HuggingFaceBaseService.java | 17 +- .../services/openai/OpenAiService.java | 16 +- .../cohere/CohereActionCreatorTests.java | 3 +- .../cohere/CohereEmbeddingsActionTests.java | 15 +- .../HuggingFaceActionCreatorTests.java | 13 +- .../huggingface/HuggingFaceActionTests.java | 7 +- .../openai/OpenAiActionCreatorTests.java | 23 ++- .../OpenAiChatCompletionActionTests.java | 15 +- .../openai/OpenAiEmbeddingsActionTests.java | 13 +- .../sender/ExecutableRequestCreatorTests.java | 6 +- .../http/sender/HttpRequestSenderTests.java | 18 +- .../sender/RequestExecutorServiceTests.java | 59 ++++-- .../http/sender/RequestTaskTests.java | 10 +- .../CohereRankedResponseEntityTests.java | 191 ++++++++++++++++++ .../services/SenderServiceTests.java | 14 ++ .../inference/services/ServiceUtilsTests.java | 16 +- .../services/cohere/CohereServiceTests.java | 11 +- .../ElasticsearchInternalServiceTests.java | 1 + .../elser/ElserInternalServiceTests.java | 1 + .../HuggingFaceBaseServiceTests.java | 2 +- .../huggingface/HuggingFaceServiceTests.java | 4 +- .../services/openai/OpenAiServiceTests.java | 6 +- .../TransportCoordinatedInferenceAction.java | 9 +- 77 files changed, 1876 insertions(+), 185 deletions(-) create mode 100644 docs/changelog/106378.yaml create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/RankedDocsResults.java create mode 100644 x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/RerankingIT.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereRerankAction.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereRerankExecutableRequestCreator.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/DocumentsOnlyInput.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/InferenceInputs.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/QueryAndDocsInputs.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereRerankRequest.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereRerankRequestEntity.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereRankedResponseEntity.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankModel.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankServiceSettings.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankTaskSettings.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereRankedResponseEntityTests.java diff --git a/docs/changelog/106378.yaml b/docs/changelog/106378.yaml new file mode 100644 index 0000000000000..b54760553d184 --- /dev/null +++ b/docs/changelog/106378.yaml @@ -0,0 +1,5 @@ +pr: 106378 +summary: Add Cohere rerank to `_inference` service +area: Machine Learning +type: feature +issues: [] diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 5614d9c1dba12..57a3afe083707 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -162,6 +162,7 @@ static TransportVersion def(int id) { public static final TransportVersion FAILURE_STORE_ROLLOVER = def(8_621_00_0); public static final TransportVersion CCR_STATS_API_TIMEOUT_PARAM = def(8_622_00_0); public static final TransportVersion ESQL_ORDINAL_BLOCK = def(8_623_00_0); + public static final TransportVersion ML_INFERENCE_COHERE_RERANK = def(8_624_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/inference/InferenceService.java b/server/src/main/java/org/elasticsearch/inference/InferenceService.java index ccf405e1074e6..8fec9a64bd275 100644 --- a/server/src/main/java/org/elasticsearch/inference/InferenceService.java +++ b/server/src/main/java/org/elasticsearch/inference/InferenceService.java @@ -11,6 +11,7 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.core.Nullable; import java.io.Closeable; import java.util.List; @@ -79,14 +80,16 @@ void parseRequestConfig( /** * Perform inference on the model. * - * @param model The model - * @param input Inference input + * @param model The model + * @param query + * @param input Inference input * @param taskSettings Settings in the request to override the model's defaults - * @param inputType For search, ingest etc - * @param listener Inference result listener + * @param inputType For search, ingest etc + * @param listener Inference result listener */ void infer( Model model, + @Nullable String query, List input, Map taskSettings, InputType inputType, @@ -98,15 +101,17 @@ void infer( * model defaults if {@code chunkingOptions} contains unset * values. * - * @param model The model - * @param input Inference input - * @param taskSettings Settings in the request to override the model's defaults - * @param inputType For search, ingest etc + * @param model The model + * @param query + * @param input Inference input + * @param taskSettings Settings in the request to override the model's defaults + * @param inputType For search, ingest etc * @param chunkingOptions The window and span options to apply - * @param listener Chunked Inference result listener + * @param listener Chunked Inference result listener */ void chunkedInfer( Model model, + @Nullable String query, List input, Map taskSettings, InputType inputType, diff --git a/server/src/main/java/org/elasticsearch/inference/TaskType.java b/server/src/main/java/org/elasticsearch/inference/TaskType.java index 1e301ad796e90..206cbf074af22 100644 --- a/server/src/main/java/org/elasticsearch/inference/TaskType.java +++ b/server/src/main/java/org/elasticsearch/inference/TaskType.java @@ -21,6 +21,7 @@ public enum TaskType implements Writeable { TEXT_EMBEDDING, SPARSE_EMBEDDING, + RERANK, COMPLETION, ANY { @Override diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index f1db2946aa572..f2b4030983db4 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -1047,6 +1047,10 @@ public static String randomAlphaOfLength(int codeUnits) { return RandomizedTest.randomAsciiOfLength(codeUnits); } + public static String randomNullOrAlphaOfLength(int codeUnits) { + return randomBoolean() ? null : randomAlphaOfLength(codeUnits); + } + /** * Creates a valid random identifier such as node id or index name */ diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceAction.java index 4f858a88faa18..22760e6c1f73d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceAction.java @@ -50,12 +50,14 @@ public static class Request extends ActionRequest { public static final ParseField INPUT = new ParseField("input"); public static final ParseField TASK_SETTINGS = new ParseField("task_settings"); + public static final ParseField QUERY = new ParseField("query"); static final ObjectParser PARSER = new ObjectParser<>(NAME, Request.Builder::new); static { // TODO timeout PARSER.declareStringArray(Request.Builder::setInput, INPUT); PARSER.declareObject(Request.Builder::setTaskSettings, (p, c) -> p.mapOrdered(), TASK_SETTINGS); + PARSER.declareString(Request.Builder::setQuery, QUERY); } private static final EnumSet validEnumsBeforeUnspecifiedAdded = EnumSet.of(InputType.INGEST, InputType.SEARCH); @@ -64,7 +66,7 @@ public static class Request extends ActionRequest { InputType.UNSPECIFIED ); - public static Request parseRequest(String inferenceEntityId, TaskType taskType, XContentParser parser) { + public static Request parseRequest(String inferenceEntityId, TaskType taskType, XContentParser parser) throws IOException { Request.Builder builder = PARSER.apply(parser, null); builder.setInferenceEntityId(inferenceEntityId); builder.setTaskType(taskType); @@ -75,6 +77,7 @@ public static Request parseRequest(String inferenceEntityId, TaskType taskType, private final TaskType taskType; private final String inferenceEntityId; + private final String query; private final List input; private final Map taskSettings; private final InputType inputType; @@ -82,12 +85,14 @@ public static Request parseRequest(String inferenceEntityId, TaskType taskType, public Request( TaskType taskType, String inferenceEntityId, + String query, List input, Map taskSettings, InputType inputType ) { this.taskType = taskType; this.inferenceEntityId = inferenceEntityId; + this.query = query; this.input = input; this.taskSettings = taskSettings; this.inputType = inputType; @@ -108,6 +113,12 @@ public Request(StreamInput in) throws IOException { } else { this.inputType = InputType.UNSPECIFIED; } + + if (in.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_COHERE_RERANK)) { + this.query = in.readOptionalString(); + } else { + this.query = null; + } } public TaskType getTaskType() { @@ -122,6 +133,10 @@ public List getInput() { return input; } + public String getQuery() { + return query; + } + public Map getTaskSettings() { return taskSettings; } @@ -161,6 +176,10 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED)) { out.writeEnum(getInputTypeToWrite(inputType, out.getTransportVersion())); } + + if (out.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_COHERE_RERANK)) { + out.writeOptionalString(query); + } } // default for easier testing @@ -185,12 +204,13 @@ public boolean equals(Object o) { && Objects.equals(inferenceEntityId, request.inferenceEntityId) && Objects.equals(input, request.input) && Objects.equals(taskSettings, request.taskSettings) - && Objects.equals(inputType, request.inputType); + && Objects.equals(inputType, request.inputType) + && Objects.equals(query, request.query); } @Override public int hashCode() { - return Objects.hash(taskType, inferenceEntityId, input, taskSettings, inputType); + return Objects.hash(taskType, inferenceEntityId, input, taskSettings, inputType, query); } public static class Builder { @@ -200,6 +220,7 @@ public static class Builder { private List input; private InputType inputType = InputType.UNSPECIFIED; private Map taskSettings = Map.of(); + private String query; private Builder() {} @@ -218,6 +239,11 @@ public Builder setInput(List input) { return this; } + public Builder setQuery(String query) { + this.query = query; + return this; + } + public Builder setInputType(InputType inputType) { this.inputType = inputType; return this; @@ -229,9 +255,25 @@ public Builder setTaskSettings(Map taskSettings) { } public Request build() { - return new Request(taskType, inferenceEntityId, input, taskSettings, inputType); + return new Request(taskType, inferenceEntityId, query, input, taskSettings, inputType); } } + + public String toString() { + return "InferenceAction.Request(taskType=" + + this.getTaskType() + + ", inferenceEntityId=" + + this.getInferenceEntityId() + + ", query=" + + this.getQuery() + + ", input=" + + this.getInput() + + ", taskSettings=" + + this.getTaskSettings() + + ", inputType=" + + this.getInputType() + + ")"; + } } public static class Response extends ActionResponse implements ToXContentObject { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/RankedDocsResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/RankedDocsResults.java new file mode 100644 index 0000000000000..ae96fa6a332bd --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/RankedDocsResults.java @@ -0,0 +1,133 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.inference.results; + +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.inference.InferenceResults; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +public class RankedDocsResults implements InferenceServiceResults { + public static final String NAME = "rerank_service_results"; + public static final String RERANK = TaskType.RERANK.toString(); + + List rankedDocs; + + public RankedDocsResults(List rankedDocs) { + this.rankedDocs = rankedDocs; + } + + /** + * A record representing a document that has been ranked by the cohere rerank API + * @param index the index of the document when it was passed to the cohere rerank API + * @param relevanceScore + * @param text + */ + public record RankedDoc(String index, String relevanceScore, String text) implements Writeable, ToXContentObject { + + public static final String NAME = "ranked_doc"; + public static final String INDEX = "index"; + public static final String RELEVANCE_SCORE = "relevance_score"; + public static final String TEXT = "text"; + + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + builder.field(INDEX, index); + builder.field(RELEVANCE_SCORE, relevanceScore); + builder.field(TEXT, text); + + builder.endObject(); + + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(index); + out.writeString(relevanceScore); + out.writeString(text); + } + + public Map asMap() { + return Map.of(NAME, Map.of(INDEX, index, RELEVANCE_SCORE, relevanceScore, TEXT, text)); + } + + public String toString() { + return "RankedDoc{" + + "index='" + + index + + '\'' + + ", relevanceScore='" + + relevanceScore + + '\'' + + ", text='" + + text + + '\'' + + ", hashcode=" + + hashCode() + + '}'; + } + }; + + public RankedDocsResults() { + this.rankedDocs = new ArrayList(0); + } + + public List getRankedDocs() { + return this.rankedDocs; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startArray(RERANK); + for (RankedDoc rankedDoc : rankedDocs) { + rankedDoc.toXContent(builder, params); + } + builder.endArray(); + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeCollection(rankedDocs); + } + + @Override + public List transformToCoordinationFormat() { + throw new UnsupportedOperationException("Coordination format not supported by " + NAME); + } + + @Override + public List transformToLegacyFormat() { + throw new UnsupportedOperationException("Legacy format not supported by " + NAME); + } + + @Override + public Map asMap() { + Map map = new LinkedHashMap<>(); + map.put(RERANK, rankedDocs.stream().map(RankedDoc::asMap).collect(Collectors.toList())); + return map; + } + +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/InferenceActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/InferenceActionRequestTests.java index 426eebd6340f1..73312974a6323 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/InferenceActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/InferenceActionRequestTests.java @@ -38,6 +38,8 @@ protected InferenceAction.Request createTestInstance() { return new InferenceAction.Request( randomFrom(TaskType.values()), randomAlphaOfLength(6), + // null, + randomNullOrAlphaOfLength(10), randomList(1, 5, () -> randomAlphaOfLength(8)), randomMap(0, 3, () -> new Tuple<>(randomAlphaOfLength(4), randomAlphaOfLength(4))), randomFrom(InputType.values()) @@ -80,13 +82,14 @@ public void testParseRequest_DefaultsInputTypeToIngest() throws IOException { @Override protected InferenceAction.Request mutateInstance(InferenceAction.Request instance) throws IOException { - int select = randomIntBetween(0, 4); + int select = randomIntBetween(0, 5); return switch (select) { case 0 -> { var nextTask = TaskType.values()[(instance.getTaskType().ordinal() + 1) % TaskType.values().length]; yield new InferenceAction.Request( nextTask, instance.getInferenceEntityId(), + instance.getQuery(), instance.getInput(), instance.getTaskSettings(), instance.getInputType() @@ -95,6 +98,7 @@ protected InferenceAction.Request mutateInstance(InferenceAction.Request instanc case 1 -> new InferenceAction.Request( instance.getTaskType(), instance.getInferenceEntityId() + "foo", + instance.getQuery(), instance.getInput(), instance.getTaskSettings(), instance.getInputType() @@ -105,6 +109,7 @@ protected InferenceAction.Request mutateInstance(InferenceAction.Request instanc yield new InferenceAction.Request( instance.getTaskType(), instance.getInferenceEntityId(), + instance.getQuery(), changedInputs, instance.getTaskSettings(), instance.getInputType() @@ -121,6 +126,7 @@ protected InferenceAction.Request mutateInstance(InferenceAction.Request instanc yield new InferenceAction.Request( instance.getTaskType(), instance.getInferenceEntityId(), + instance.getQuery(), instance.getInput(), taskSettings, instance.getInputType() @@ -131,11 +137,22 @@ protected InferenceAction.Request mutateInstance(InferenceAction.Request instanc yield new InferenceAction.Request( instance.getTaskType(), instance.getInferenceEntityId(), + instance.getQuery(), instance.getInput(), instance.getTaskSettings(), nextInputType ); } + case 5 -> { + yield new InferenceAction.Request( + instance.getTaskType(), + instance.getInferenceEntityId(), + instance.getQuery() == null ? randomAlphaOfLength(10) : instance.getQuery() + randomAlphaOfLength(1), + instance.getInput(), + instance.getTaskSettings(), + instance.getInputType() + ); + } default -> throw new UnsupportedOperationException(); }; } @@ -146,6 +163,7 @@ protected InferenceAction.Request mutateInstanceForVersion(InferenceAction.Reque return new InferenceAction.Request( instance.getTaskType(), instance.getInferenceEntityId(), + null, instance.getInput().subList(0, 1), instance.getTaskSettings(), InputType.UNSPECIFIED @@ -154,6 +172,7 @@ protected InferenceAction.Request mutateInstanceForVersion(InferenceAction.Reque return new InferenceAction.Request( instance.getTaskType(), instance.getInferenceEntityId(), + null, instance.getInput(), instance.getTaskSettings(), InputType.UNSPECIFIED @@ -165,6 +184,7 @@ protected InferenceAction.Request mutateInstanceForVersion(InferenceAction.Reque return new InferenceAction.Request( instance.getTaskType(), instance.getInferenceEntityId(), + null, instance.getInput(), instance.getTaskSettings(), InputType.INGEST @@ -174,10 +194,20 @@ protected InferenceAction.Request mutateInstanceForVersion(InferenceAction.Reque return new InferenceAction.Request( instance.getTaskType(), instance.getInferenceEntityId(), + null, instance.getInput(), instance.getTaskSettings(), InputType.UNSPECIFIED ); + } else if (version.before(TransportVersions.ML_INFERENCE_COHERE_RERANK)) { + return new InferenceAction.Request( + instance.getTaskType(), + instance.getInferenceEntityId(), + null, + instance.getInput(), + instance.getTaskSettings(), + instance.getInputType() + ); } return instance; @@ -185,20 +215,20 @@ protected InferenceAction.Request mutateInstanceForVersion(InferenceAction.Reque public void testWriteTo_WhenVersionIsOnAfterUnspecifiedAdded() throws IOException { assertBwcSerialization( - new InferenceAction.Request(TaskType.TEXT_EMBEDDING, "model", List.of(), Map.of(), InputType.UNSPECIFIED), + new InferenceAction.Request(TaskType.TEXT_EMBEDDING, "model", null, List.of(), Map.of(), InputType.UNSPECIFIED), TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_UNSPECIFIED_ADDED ); } public void testWriteTo_WhenVersionIsBeforeUnspecifiedAdded_ButAfterInputTypeAdded_ShouldSetToIngest() throws IOException { assertBwcSerialization( - new InferenceAction.Request(TaskType.TEXT_EMBEDDING, "model", List.of(), Map.of(), InputType.UNSPECIFIED), + new InferenceAction.Request(TaskType.TEXT_EMBEDDING, "model", null, List.of(), Map.of(), InputType.UNSPECIFIED), TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED ); } public void testWriteTo_WhenVersionIsBeforeUnspecifiedAdded_ButAfterInputTypeAdded_ShouldSetToIngest_ManualCheck() throws IOException { - var instance = new InferenceAction.Request(TaskType.TEXT_EMBEDDING, "model", List.of(), Map.of(), InputType.UNSPECIFIED); + var instance = new InferenceAction.Request(TaskType.TEXT_EMBEDDING, "model", null, List.of(), Map.of(), InputType.UNSPECIFIED); InferenceAction.Request deserializedInstance = copyWriteable( instance, @@ -212,7 +242,7 @@ public void testWriteTo_WhenVersionIsBeforeUnspecifiedAdded_ButAfterInputTypeAdd public void testWriteTo_WhenVersionIsBeforeUnspecifiedAdded_ButAfterInputTypeAdded_ShouldSetToIngest_WhenClustering_ManualCheck() throws IOException { - var instance = new InferenceAction.Request(TaskType.TEXT_EMBEDDING, "model", List.of(), Map.of(), InputType.CLUSTERING); + var instance = new InferenceAction.Request(TaskType.TEXT_EMBEDDING, "model", null, List.of(), Map.of(), InputType.CLUSTERING); InferenceAction.Request deserializedInstance = copyWriteable( instance, @@ -226,7 +256,7 @@ public void testWriteTo_WhenVersionIsBeforeUnspecifiedAdded_ButAfterInputTypeAdd public void testWriteTo_WhenVersionIsBeforeUnspecifiedAdded_ButAfterInputTypeAdded_ShouldSetToIngest_WhenClassification_ManualCheck() throws IOException { - var instance = new InferenceAction.Request(TaskType.TEXT_EMBEDDING, "model", List.of(), Map.of(), InputType.CLASSIFICATION); + var instance = new InferenceAction.Request(TaskType.TEXT_EMBEDDING, "model", null, List.of(), Map.of(), InputType.CLASSIFICATION); InferenceAction.Request deserializedInstance = copyWriteable( instance, @@ -242,7 +272,7 @@ public void testWriteTo_WhenVersionIsBeforeUnspecifiedAdded_ButAfterInputTypeAdd void testWriteTo_WhenVersionIsBeforeClusterClassAdded_ButAfterUnspecifiedAdded_ShouldSetToUnspecified_WhenClassification_ManualCheck() throws IOException { - var instance = new InferenceAction.Request(TaskType.TEXT_EMBEDDING, "model", List.of(), Map.of(), InputType.CLASSIFICATION); + var instance = new InferenceAction.Request(TaskType.TEXT_EMBEDDING, "model", null, List.of(), Map.of(), InputType.CLASSIFICATION); InferenceAction.Request deserializedInstance = copyWriteable( instance, @@ -258,7 +288,7 @@ public void testWriteTo_WhenVersionIsBeforeUnspecifiedAdded_ButAfterInputTypeAdd void testWriteTo_WhenVersionIsBeforeClusterClassAdded_ButAfterUnspecifiedAdded_ShouldSetToUnspecified_WhenClustering_ManualCheck() throws IOException { - var instance = new InferenceAction.Request(TaskType.TEXT_EMBEDDING, "model", List.of(), Map.of(), InputType.CLUSTERING); + var instance = new InferenceAction.Request(TaskType.TEXT_EMBEDDING, "model", null, List.of(), Map.of(), InputType.CLUSTERING); InferenceAction.Request deserializedInstance = copyWriteable( instance, @@ -271,7 +301,7 @@ public void testWriteTo_WhenVersionIsBeforeUnspecifiedAdded_ButAfterInputTypeAdd } public void testWriteTo_WhenVersionIsBeforeInputTypeAdded_ShouldSetInputTypeToUnspecified() throws IOException { - var instance = new InferenceAction.Request(TaskType.TEXT_EMBEDDING, "model", List.of(), Map.of(), InputType.INGEST); + var instance = new InferenceAction.Request(TaskType.TEXT_EMBEDDING, "model", null, List.of(), Map.of(), InputType.INGEST); InferenceAction.Request deserializedInstance = copyWriteable( instance, diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java index a9096f9059c5b..ae4a770fe7dd2 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java @@ -121,7 +121,7 @@ protected void deleteModel(String modelId, TaskType taskType) throws IOException protected Map putModel(String modelId, String modelConfig, TaskType taskType) throws IOException { String endpoint = Strings.format("_inference/%s/%s", taskType, modelId); - return putModelInternal(endpoint, modelConfig); + return putRequest(endpoint, modelConfig); } /** @@ -129,12 +129,20 @@ protected Map putModel(String modelId, String modelConfig, TaskT */ protected Map putModel(String modelId, String modelConfig) throws IOException { String endpoint = Strings.format("_inference/%s", modelId); - return putModelInternal(endpoint, modelConfig); + return putRequest(endpoint, modelConfig); } - private Map putModelInternal(String endpoint, String modelConfig) throws IOException { + Map putRequest(String endpoint, String body) throws IOException { var request = new Request("PUT", endpoint); - request.setJsonEntity(modelConfig); + request.setJsonEntity(body); + var response = client().performRequest(request); + assertOkOrCreated(response); + return entityAsMap(response); + } + + Map postRequest(String endpoint, String body) throws IOException { + var request = new Request("POST", endpoint); + request.setJsonEntity(body); var response = client().performRequest(request); assertOkOrCreated(response); return entityAsMap(response); diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/RerankingIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/RerankingIT.java new file mode 100644 index 0000000000000..77251ada4c488 --- /dev/null +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/RerankingIT.java @@ -0,0 +1,119 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + * + * this file was contributed to by a generative AI + */ + +package org.elasticsearch.xpack.inference; + +import org.apache.lucene.tests.util.LuceneTestCase; + +import java.io.IOException; + +@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106967") +public class RerankingIT extends InferenceBaseRestTest { + + public void testPutCohereRerankEndpoint() throws IOException { + String endpoint = putCohereRerankEndpoint(); + postCohereRerankEndpoint( + endpoint, + "what is elasticsearch for?", + new String[] { "for search", "for security", "for logs", "for email", "for rubber bands", "for kiwis" } + ); + } + + private String putCohereRerankEndpoint() throws IOException { + String endpointID = randomAlphaOfLength(10).toLowerCase(); + putRequest("/_inference/rerank/" + endpointID, """ + { + "service": "cohere", + "service_settings": { + "model_id": "rerank-english-v2.0", + "api_key": "" + } + } + """);// TODO remove key + return endpointID; + } + + public void testPutCohereRerankEndpointWithDocuments() throws IOException { + String endpoint = putCohereRerankEndpointWithDocuments(); + postCohereRerankEndpoint( + endpoint, + "what is elasticsearch for?", + new String[] { "for search", "for security", "for logs", "for email", "for rubber bands", "for kiwis" } + ); + } + + private String putCohereRerankEndpointWithDocuments() throws IOException { + String endpointID = randomAlphaOfLength(10).toLowerCase(); + putRequest("/_inference/rerank/" + endpointID, """ + { + "service": "cohere", + "service_settings": { + "model_id": "rerank-english-v2.0", + "api_key": "" + }, + "task_settings": { + "return_documents": true + } + } + """);// TODO remove key + return endpointID; + } + + public void testPutCohereRerankEndpointWithTop2() throws IOException { + String endpoint = putCohereRerankEndpointWithTop2(); + postCohereRerankEndpoint( + endpoint, + "what is elasticsearch for?", + new String[] { "for search", "for security", "for logs", "for email", "for rubber bands", "for kiwis" } + ); + } + + private String putCohereRerankEndpointWithTop2() throws IOException { + String endpointID = randomAlphaOfLength(10).toLowerCase(); + putRequest("/_inference/rerank/" + endpointID, """ + { + "service": "cohere", + "service_settings": { + "model_id": "rerank-english-v2.0", + "api_key": "8TNPBvpBO7oN97009HQHzQbBhNrxmREbcJrZCwkK" + }, + "task_settings": { + "top_n": 2 + } + } + """);// TODO remove key + return endpointID; + } + + public void postCohereRerankEndpoint(String endpoint, String query, String[] input) throws IOException { + StringBuilder body = new StringBuilder(); + + // Start the JSON object + body.append("{"); + + // Add the query to the JSON object + body.append("\"query\":\"").append(query).append("\","); + + // Start the input array + body.append("\"input\":["); + + // Add each element of the input array to the JSON array + for (int i = 0; i < input.length; i++) { + body.append("\"").append(input[i]).append("\""); + if (i < input.length - 1) { + body.append(","); + } + } + + // End the input array and the JSON object + body.append("]}"); + postRequest("/_inference/rerank/" + endpoint, body.toString()); + } + +} diff --git a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java index 54fe6e01946b4..c53ed82b9fe50 100644 --- a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java +++ b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.inference.ChunkingOptions; import org.elasticsearch.inference.InferenceServiceExtension; @@ -73,6 +74,7 @@ public void parseRequestConfig( @Override public void infer( Model model, + @Nullable String query, List input, Map taskSettings, InputType inputType, @@ -94,6 +96,7 @@ public void infer( @Override public void chunkedInfer( Model model, + @Nullable String query, List input, Map taskSettings, InputType inputType, diff --git a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestSparseInferenceServiceExtension.java b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestSparseInferenceServiceExtension.java index e5020774a70f3..30977c23ef5aa 100644 --- a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestSparseInferenceServiceExtension.java +++ b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestSparseInferenceServiceExtension.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.inference.ChunkingOptions; import org.elasticsearch.inference.InferenceServiceExtension; @@ -74,6 +75,7 @@ public void parseRequestConfig( @Override public void infer( Model model, + @Nullable String query, List input, Map taskSettings, InputType inputType, @@ -93,6 +95,7 @@ public void infer( @Override public void chunkedInfer( Model model, + @Nullable String query, List input, Map taskSettings, InputType inputType, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java index ece4fee1c935f..a480763f33c47 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java @@ -90,6 +90,7 @@ private void inferOnService( ) { service.infer( model, + request.getQuery(), request.getInput(), request.getTaskSettings(), request.getInputType(), diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java index 6667e314a62b8..556acfd89c9c6 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java @@ -105,7 +105,12 @@ protected void masterOperation( String serviceName = (String) requestAsMap.remove(ModelConfigurations.SERVICE); if (serviceName == null) { - listener.onFailure(new ElasticsearchStatusException("Model configuration is missing a service", RestStatus.BAD_REQUEST)); + listener.onFailure( + new ElasticsearchStatusException( + "Model configuration is missing [" + ModelConfigurations.SERVICE + "]", + RestStatus.BAD_REQUEST + ) + ); return; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/ExecutableAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/ExecutableAction.java index 9991abf71fb12..76e997f248f1a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/ExecutableAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/ExecutableAction.java @@ -9,12 +9,11 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.inference.InferenceServiceResults; - -import java.util.List; +import org.elasticsearch.xpack.inference.external.http.sender.InferenceInputs; /** * Defines an inference request to a 3rd party service. The success or failure response is communicated through the provided listener. */ public interface ExecutableAction { - void execute(List input, ActionListener listener); + void execute(InferenceInputs inferenceInputs, ActionListener listener); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionCreator.java index 91db5e691cb61..b8e1b34c11f27 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionCreator.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionCreator.java @@ -12,6 +12,7 @@ import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.services.ServiceComponents; import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsModel; +import org.elasticsearch.xpack.inference.services.cohere.rerank.CohereRerankModel; import java.util.Map; import java.util.Objects; @@ -34,4 +35,11 @@ public ExecutableAction create(CohereEmbeddingsModel model, Map return new CohereEmbeddingsAction(sender, overriddenModel); } + + @Override + public ExecutableAction create(CohereRerankModel model, Map taskSettings) { + var overriddenModel = CohereRerankModel.of(model, taskSettings); + + return new CohereRerankAction(sender, overriddenModel); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionVisitor.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionVisitor.java index cc732e7ab8dc5..5431308850f36 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionVisitor.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionVisitor.java @@ -10,9 +10,12 @@ import org.elasticsearch.inference.InputType; import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsModel; +import org.elasticsearch.xpack.inference.services.cohere.rerank.CohereRerankModel; import java.util.Map; public interface CohereActionVisitor { ExecutableAction create(CohereEmbeddingsModel model, Map taskSettings, InputType inputType); + + ExecutableAction create(CohereRerankModel model, Map taskSettings); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsAction.java index a49fc85200894..712e242e80560 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsAction.java @@ -12,10 +12,10 @@ import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.external.http.sender.CohereEmbeddingsExecutableRequestCreator; +import org.elasticsearch.xpack.inference.external.http.sender.InferenceInputs; import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsModel; -import java.util.List; import java.util.Objects; import static org.elasticsearch.xpack.inference.external.action.ActionUtils.constructFailedToSendRequestMessage; @@ -38,13 +38,13 @@ public CohereEmbeddingsAction(Sender sender, CohereEmbeddingsModel model) { } @Override - public void execute(List input, ActionListener listener) { + public void execute(InferenceInputs inferenceInputs, ActionListener listener) { try { ActionListener wrappedListener = wrapFailuresInElasticsearchException( failedToSendRequestErrorMessage, listener ); - sender.send(requestCreator, input, wrappedListener); + sender.send(requestCreator, inferenceInputs, wrappedListener); } catch (ElasticsearchException e) { listener.onFailure(e); } catch (Exception e) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereRerankAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereRerankAction.java new file mode 100644 index 0000000000000..7e4edf7c59103 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereRerankAction.java @@ -0,0 +1,54 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.action.cohere; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.http.sender.CohereRerankExecutableRequestCreator; +import org.elasticsearch.xpack.inference.external.http.sender.InferenceInputs; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.services.cohere.rerank.CohereRerankModel; + +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.external.action.ActionUtils.constructFailedToSendRequestMessage; +import static org.elasticsearch.xpack.inference.external.action.ActionUtils.createInternalServerError; +import static org.elasticsearch.xpack.inference.external.action.ActionUtils.wrapFailuresInElasticsearchException; + +public class CohereRerankAction implements ExecutableAction { + private final String failedToSendRequestErrorMessage; + private final Sender sender; + private final CohereRerankExecutableRequestCreator requestCreator; + + public CohereRerankAction(Sender sender, CohereRerankModel model) { + Objects.requireNonNull(model); + this.sender = Objects.requireNonNull(sender); + this.failedToSendRequestErrorMessage = constructFailedToSendRequestMessage( + model.getServiceSettings().getCommonSettings().uri(), + "Cohere rerank" + ); + requestCreator = new CohereRerankExecutableRequestCreator(model); + } + + @Override + public void execute(InferenceInputs inferenceInputs, ActionListener listener) { + try { + ActionListener wrappedListener = wrapFailuresInElasticsearchException( + failedToSendRequestErrorMessage, + listener + ); + sender.send(requestCreator, inferenceInputs, wrappedListener); + } catch (ElasticsearchException e) { + listener.onFailure(e); + } catch (Exception e) { + listener.onFailure(createInternalServerError(e, failedToSendRequestErrorMessage)); + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceAction.java index 928d396c991f8..ca228ed0e906a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceAction.java @@ -13,11 +13,11 @@ import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.external.http.retry.ResponseHandler; import org.elasticsearch.xpack.inference.external.http.sender.HuggingFaceExecutableRequestCreator; +import org.elasticsearch.xpack.inference.external.http.sender.InferenceInputs; import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.services.ServiceComponents; import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceModel; -import java.util.List; import java.util.Objects; import static org.elasticsearch.core.Strings.format; @@ -48,10 +48,10 @@ public HuggingFaceAction( } @Override - public void execute(List input, ActionListener listener) { + public void execute(InferenceInputs inferenceInputs, ActionListener listener) { try { ActionListener wrappedListener = wrapFailuresInElasticsearchException(errorMessage, listener); - sender.send(requestCreator, input, wrappedListener); + sender.send(requestCreator, inferenceInputs, wrappedListener); } catch (ElasticsearchException e) { listener.onFailure(e); } catch (Exception e) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionAction.java index 31fd6a35ef26b..bba51d8e5bd23 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionAction.java @@ -13,12 +13,13 @@ import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; +import org.elasticsearch.xpack.inference.external.http.sender.InferenceInputs; import org.elasticsearch.xpack.inference.external.http.sender.OpenAiCompletionExecutableRequestCreator; import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.services.ServiceComponents; import org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModel; -import java.util.List; import java.util.Objects; import static org.elasticsearch.xpack.inference.external.action.ActionUtils.constructFailedToSendRequestMessage; @@ -41,16 +42,21 @@ public OpenAiChatCompletionAction(Sender sender, OpenAiChatCompletionModel model } @Override - public void execute(List input, ActionListener listener) { - if (input.size() > 1) { - listener.onFailure(new ElasticsearchStatusException("OpenAI completions only accepts 1 input", RestStatus.BAD_REQUEST)); + public void execute(InferenceInputs inferenceInputs, ActionListener listener) { + if (inferenceInputs instanceof DocumentsOnlyInput docsOnlyInput) { + if (docsOnlyInput.getInputs().size() > 1) { + listener.onFailure(new ElasticsearchStatusException("OpenAI completions only accepts 1 input", RestStatus.BAD_REQUEST)); + return; + } + } else { + listener.onFailure(new ElasticsearchStatusException("Invalid inference input type", RestStatus.INTERNAL_SERVER_ERROR)); return; } try { ActionListener wrappedListener = wrapFailuresInElasticsearchException(errorMessage, listener); - sender.send(requestCreator, input, wrappedListener); + sender.send(requestCreator, inferenceInputs, wrappedListener); } catch (ElasticsearchException e) { listener.onFailure(e); } catch (Exception e) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiEmbeddingsAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiEmbeddingsAction.java index d5f083ac8aa90..e9cd81968471d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiEmbeddingsAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiEmbeddingsAction.java @@ -11,12 +11,12 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.http.sender.InferenceInputs; import org.elasticsearch.xpack.inference.external.http.sender.OpenAiEmbeddingsExecutableRequestCreator; import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.services.ServiceComponents; import org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsModel; -import java.util.List; import java.util.Objects; import static org.elasticsearch.xpack.inference.external.action.ActionUtils.constructFailedToSendRequestMessage; @@ -38,11 +38,11 @@ public OpenAiEmbeddingsAction(Sender sender, OpenAiEmbeddingsModel model, Servic } @Override - public void execute(List input, ActionListener listener) { + public void execute(InferenceInputs inferenceInputs, ActionListener listener) { try { ActionListener wrappedListener = wrapFailuresInElasticsearchException(errorMessage, listener); - sender.send(requestCreator, input, wrappedListener); + sender.send(requestCreator, inferenceInputs, wrappedListener); } catch (ElasticsearchException e) { listener.onFailure(e); } catch (Exception e) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/RequestExecutor.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/RequestExecutor.java index 77b4d49d62b9f..7b0287e9652f7 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/RequestExecutor.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/RequestExecutor.java @@ -12,8 +12,8 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.xpack.inference.external.http.sender.ExecutableRequestCreator; +import org.elasticsearch.xpack.inference.external.http.sender.InferenceInputs; -import java.util.List; import java.util.concurrent.TimeUnit; public interface RequestExecutor { @@ -29,7 +29,7 @@ public interface RequestExecutor { void execute( ExecutableRequestCreator requestCreator, - List input, + InferenceInputs inferenceInputs, @Nullable TimeValue timeout, ActionListener listener ); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereEmbeddingsExecutableRequestCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereEmbeddingsExecutableRequestCreator.java index ff4f9847da8a1..6488996d2edc9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereEmbeddingsExecutableRequestCreator.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereEmbeddingsExecutableRequestCreator.java @@ -42,6 +42,7 @@ public CohereEmbeddingsExecutableRequestCreator(CohereEmbeddingsModel model) { @Override public Runnable create( + String query, List input, RequestSender requestSender, Supplier hasRequestCompletedFunction, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereRerankExecutableRequestCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereRerankExecutableRequestCreator.java new file mode 100644 index 0000000000000..432a5334ac001 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereRerankExecutableRequestCreator.java @@ -0,0 +1,56 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.sender; + +import org.apache.http.client.protocol.HttpClientContext; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.xpack.inference.external.cohere.CohereAccount; +import org.elasticsearch.xpack.inference.external.cohere.CohereResponseHandler; +import org.elasticsearch.xpack.inference.external.http.retry.RequestSender; +import org.elasticsearch.xpack.inference.external.http.retry.ResponseHandler; +import org.elasticsearch.xpack.inference.external.request.cohere.CohereRerankRequest; +import org.elasticsearch.xpack.inference.external.response.cohere.CohereRankedResponseEntity; +import org.elasticsearch.xpack.inference.services.cohere.rerank.CohereRerankModel; + +import java.util.List; +import java.util.Objects; +import java.util.function.Supplier; + +public class CohereRerankExecutableRequestCreator implements ExecutableRequestCreator { + private static final Logger logger = LogManager.getLogger(CohereRerankExecutableRequestCreator.class); + private static final ResponseHandler HANDLER = createCohereResponseHandler(); + + private static ResponseHandler createCohereResponseHandler() { + return new CohereResponseHandler("cohere rerank", (request, response) -> CohereRankedResponseEntity.fromResponse(response)); + } + + private final CohereAccount account; + private final CohereRerankModel model; + + public CohereRerankExecutableRequestCreator(CohereRerankModel model) { + this.model = Objects.requireNonNull(model); + account = new CohereAccount(this.model.getServiceSettings().getCommonSettings().uri(), this.model.getSecretSettings().apiKey()); + } + + @Override + public Runnable create( + String query, + List input, + RequestSender requestSender, + Supplier hasRequestCompletedFunction, + HttpClientContext context, + ActionListener listener + ) { + CohereRerankRequest request = new CohereRerankRequest(account, query, input, model); + + return new ExecutableInferenceRequest(requestSender, logger, request, context, HANDLER, hasRequestCompletedFunction, listener); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/DocumentsOnlyInput.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/DocumentsOnlyInput.java new file mode 100644 index 0000000000000..a11be003585fd --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/DocumentsOnlyInput.java @@ -0,0 +1,25 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.sender; + +import java.util.List; +import java.util.Objects; + +public class DocumentsOnlyInput extends InferenceInputs { + + List input; + + public DocumentsOnlyInput(List chunks) { + super(); + this.input = Objects.requireNonNull(chunks); + } + + public List getInputs() { + return this.input; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/ExecutableRequestCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/ExecutableRequestCreator.java index 96455ca4b1cb1..dc279573d5c92 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/ExecutableRequestCreator.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/ExecutableRequestCreator.java @@ -9,6 +9,7 @@ import org.apache.http.client.protocol.HttpClientContext; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.xpack.inference.external.http.retry.RequestSender; @@ -20,6 +21,7 @@ */ public interface ExecutableRequestCreator { Runnable create( + @Nullable String query, List input, RequestSender requestSender, Supplier hasRequestCompletedFunction, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSender.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSender.java index 0131bf2989f6f..cd6658d3d70d7 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSender.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSender.java @@ -147,21 +147,22 @@ public void close() throws IOException { /** * Send a request at some point in the future. The timeout used is retrieved from the settings. - * @param requestCreator a factory for creating a request to be sent to a 3rd party service - * @param input the list of string input to send in the request - * @param timeout the maximum time the request should wait for a response before timing out. If null, the timeout is ignored. - * The queuing logic may still throw a timeout if it fails to send the request because it couldn't get a leased - * @param listener a listener to handle the response + * + * @param requestCreator a factory for creating a request to be sent to a 3rd party service + * @param inferenceInputs the list of string input to send in the request + * @param timeout the maximum time the request should wait for a response before timing out. If null, the timeout is ignored. + * The queuing logic may still throw a timeout if it fails to send the request because it couldn't get a leased + * @param listener a listener to handle the response */ public void send( ExecutableRequestCreator requestCreator, - List input, + InferenceInputs inferenceInputs, @Nullable TimeValue timeout, ActionListener listener ) { assert started.get() : "call start() before sending a request"; waitForStartToComplete(); - service.execute(requestCreator, input, timeout, listener); + service.execute(requestCreator, inferenceInputs, timeout, listener); } private void waitForStartToComplete() { @@ -176,14 +177,19 @@ private void waitForStartToComplete() { /** * Send a request at some point in the future. The timeout used is retrieved from the settings. - * @param requestCreator a factory for creating a request to be sent to a 3rd party service - * @param input the list of string input to send in the request - * @param listener a listener to handle the response + * + * @param requestCreator a factory for creating a request to be sent to a 3rd party service + * @param inferenceInputs the list of string input to send in the request + * @param listener a listener to handle the response */ - public void send(ExecutableRequestCreator requestCreator, List input, ActionListener listener) { + public void send( + ExecutableRequestCreator requestCreator, + InferenceInputs inferenceInputs, + ActionListener listener + ) { assert started.get() : "call start() before sending a request"; waitForStartToComplete(); - service.execute(requestCreator, input, maxRequestTimeout, listener); + service.execute(requestCreator, inferenceInputs, maxRequestTimeout, listener); } public static List> getSettings() { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HuggingFaceExecutableRequestCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HuggingFaceExecutableRequestCreator.java index 62558fe6071ac..7c70f738105d1 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HuggingFaceExecutableRequestCreator.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HuggingFaceExecutableRequestCreator.java @@ -42,6 +42,7 @@ public HuggingFaceExecutableRequestCreator(HuggingFaceModel model, ResponseHandl @Override public Runnable create( + String query, List input, RequestSender requestSender, Supplier hasRequestCompletedFunction, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/InferenceInputs.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/InferenceInputs.java new file mode 100644 index 0000000000000..d7e07e734ce80 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/InferenceInputs.java @@ -0,0 +1,10 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.sender; + +public abstract class InferenceInputs {} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/InferenceRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/InferenceRequest.java index ed77e4b207a94..5d5e8df40c22d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/InferenceRequest.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/InferenceRequest.java @@ -23,6 +23,11 @@ public interface InferenceRequest { */ ExecutableRequestCreator getRequestCreator(); + /** + * Returns the query associated with this request. Used for Rerank tasks. + */ + String getQuery(); + /** * Returns the text input associated with this request. */ diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/NoopTask.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/NoopTask.java index 6cdcd38d224a9..cca00b2e9bf58 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/NoopTask.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/NoopTask.java @@ -20,6 +20,11 @@ public ExecutableRequestCreator getRequestCreator() { return null; } + @Override + public String getQuery() { + return null; + } + @Override public List getInput() { return null; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiCompletionExecutableRequestCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiCompletionExecutableRequestCreator.java index 44ab670843335..853038e1a7ca4 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiCompletionExecutableRequestCreator.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiCompletionExecutableRequestCreator.java @@ -11,6 +11,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.xpack.inference.external.http.retry.RequestSender; import org.elasticsearch.xpack.inference.external.http.retry.ResponseHandler; @@ -45,6 +46,7 @@ public OpenAiCompletionExecutableRequestCreator(OpenAiChatCompletionModel model) @Override public Runnable create( + @Nullable String query, List input, RequestSender requestSender, Supplier hasRequestCompletedFunction, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiEmbeddingsExecutableRequestCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiEmbeddingsExecutableRequestCreator.java index 708e67944441c..8f867c374e2d3 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiEmbeddingsExecutableRequestCreator.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiEmbeddingsExecutableRequestCreator.java @@ -53,6 +53,7 @@ public OpenAiEmbeddingsExecutableRequestCreator(OpenAiEmbeddingsModel model, Tru @Override public Runnable create( + String query, List input, RequestSender requestSender, Supplier hasRequestCompletedFunction, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/QueryAndDocsInputs.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/QueryAndDocsInputs.java new file mode 100644 index 0000000000000..4d24598d67831 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/QueryAndDocsInputs.java @@ -0,0 +1,33 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.sender; + +import java.util.List; +import java.util.Objects; + +public class QueryAndDocsInputs extends InferenceInputs { + + String query; + + public String getQuery() { + return query; + } + + public List getChunks() { + return chunks; + } + + List chunks; + + public QueryAndDocsInputs(String query, List chunks) { + super(); + this.query = Objects.requireNonNull(query); + this.chunks = Objects.requireNonNull(chunks); + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorService.java index ecbaf26ea17f4..0a5ab8f87ef1b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorService.java @@ -258,7 +258,7 @@ public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedE * Execute the request at some point in the future. * * @param requestCreator the http request to send - * @param input the text to perform inference on + * @param inferenceInputs the inputs to send in the request * @param timeout the maximum time to wait for this request to complete (failing or succeeding). Once the time elapses, the * listener::onFailure is called with a {@link org.elasticsearch.ElasticsearchTimeoutException}. * If null, then the request will wait forever @@ -266,13 +266,13 @@ public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedE */ public void execute( ExecutableRequestCreator requestCreator, - List input, + InferenceInputs inferenceInputs, @Nullable TimeValue timeout, ActionListener listener ) { var task = new RequestTask( requestCreator, - input, + inferenceInputs, timeout, threadPool, // TODO when multi-tenancy (as well as batching) is implemented we need to be very careful that we preserve @@ -280,6 +280,10 @@ public void execute( ContextPreservingActionListener.wrapPreservingContext(listener, threadPool.getThreadContext()) ); + completeExecution(task); + } + + private void completeExecution(RequestTask task) { if (isShutdown()) { EsRejectedExecutionException rejected = new EsRejectedExecutionException( format("Failed to enqueue task because the http executor service [%s] has already shutdown", serviceName), diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTask.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTask.java index 970366f7765dd..6628b9ef425e2 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTask.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTask.java @@ -27,19 +27,29 @@ class RequestTask implements RejectableTask { private final AtomicBoolean finished = new AtomicBoolean(); private final ExecutableRequestCreator requestCreator; + private final String query; private final List input; private final ActionListener listener; RequestTask( ExecutableRequestCreator requestCreator, - List input, + InferenceInputs inferenceInputs, @Nullable TimeValue timeout, ThreadPool threadPool, ActionListener listener ) { this.requestCreator = Objects.requireNonNull(requestCreator); - this.input = Objects.requireNonNull(input); this.listener = getListener(Objects.requireNonNull(listener), timeout, Objects.requireNonNull(threadPool)); + + if (inferenceInputs instanceof QueryAndDocsInputs) { + this.query = ((QueryAndDocsInputs) inferenceInputs).getQuery(); + this.input = ((QueryAndDocsInputs) inferenceInputs).getChunks(); + } else if (inferenceInputs instanceof DocumentsOnlyInput) { + this.query = null; + this.input = ((DocumentsOnlyInput) inferenceInputs).getInputs(); + } else { + throw new IllegalArgumentException("Unsupported inference inputs type: " + inferenceInputs.getClass()); + } } private ActionListener getListener( @@ -85,6 +95,11 @@ public List getInput() { return input; } + @Override + public String getQuery() { + return query; + } + @Override public ActionListener getListener() { return listener; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/Sender.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/Sender.java index 0272f4b0e351c..3902a154b2b99 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/Sender.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/Sender.java @@ -13,17 +13,16 @@ import org.elasticsearch.inference.InferenceServiceResults; import java.io.Closeable; -import java.util.List; public interface Sender extends Closeable { void start(); void send( ExecutableRequestCreator requestCreator, - List input, + InferenceInputs inferenceInputs, @Nullable TimeValue timeout, ActionListener listener ); - void send(ExecutableRequestCreator requestCreator, List input, ActionListener listener); + void send(ExecutableRequestCreator requestCreator, InferenceInputs inferenceInputs, ActionListener listener); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/SingleRequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/SingleRequestManager.java index ecd12814d0877..494c77964080f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/SingleRequestManager.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/SingleRequestManager.java @@ -30,6 +30,7 @@ public void execute(InferenceRequest inferenceRequest, HttpClientContext context inferenceRequest.getRequestCreator() .create( + inferenceRequest.getQuery(), inferenceRequest.getInput(), requestSender, inferenceRequest.getRequestCompletedFunction(), diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntity.java index 035bd44ebf405..6e389e8537d27 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntity.java @@ -33,9 +33,7 @@ public record CohereEmbeddingsRequestEntity( private static final String SEARCH_QUERY = "search_query"; private static final String CLUSTERING = "clustering"; private static final String CLASSIFICATION = "classification"; - private static final String TEXTS_FIELD = "texts"; - static final String INPUT_TYPE_FIELD = "input_type"; static final String EMBEDDING_TYPES_FIELD = "embedding_types"; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereRerankRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereRerankRequest.java new file mode 100644 index 0000000000000..b8f3916582bf2 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereRerankRequest.java @@ -0,0 +1,96 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.cohere; + +import org.apache.http.HttpHeaders; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.utils.URIBuilder; +import org.apache.http.entity.ByteArrayEntity; +import org.elasticsearch.common.Strings; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.cohere.CohereAccount; +import org.elasticsearch.xpack.inference.external.request.HttpRequest; +import org.elasticsearch.xpack.inference.external.request.Request; +import org.elasticsearch.xpack.inference.services.cohere.rerank.CohereRerankModel; +import org.elasticsearch.xpack.inference.services.cohere.rerank.CohereRerankTaskSettings; + +import java.net.URI; +import java.net.URISyntaxException; +import java.nio.charset.StandardCharsets; +import java.util.List; +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.external.request.RequestUtils.buildUri; +import static org.elasticsearch.xpack.inference.external.request.RequestUtils.createAuthBearerHeader; + +public class CohereRerankRequest implements Request { + + private final CohereAccount account; + private final String query; + private final List input; + private final URI uri; + private final CohereRerankTaskSettings taskSettings; + private final String model; + private final String inferenceEntityId; + + public CohereRerankRequest(CohereAccount account, String query, List input, CohereRerankModel model) { + Objects.requireNonNull(model); + + this.account = Objects.requireNonNull(account); + this.input = Objects.requireNonNull(input); + this.query = Objects.requireNonNull(query); + uri = buildUri(this.account.url(), "Cohere", CohereRerankRequest::buildDefaultUri); + taskSettings = model.getTaskSettings(); + this.model = model.getServiceSettings().getCommonSettings().modelId(); + inferenceEntityId = model.getInferenceEntityId(); + } + + @Override + public HttpRequest createHttpRequest() { + HttpPost httpPost = new HttpPost(uri); + + ByteArrayEntity byteEntity = new ByteArrayEntity( + Strings.toString(new CohereRerankRequestEntity(query, input, taskSettings, model)).getBytes(StandardCharsets.UTF_8) + ); + httpPost.setEntity(byteEntity); + + httpPost.setHeader(HttpHeaders.CONTENT_TYPE, XContentType.JSON.mediaType()); + httpPost.setHeader(createAuthBearerHeader(account.apiKey())); + httpPost.setHeader(CohereUtils.createRequestSourceHeader()); + + return new HttpRequest(httpPost, getInferenceEntityId()); + } + + @Override + public String getInferenceEntityId() { + return inferenceEntityId; + } + + @Override + public URI getURI() { + return uri; + } + + @Override + public Request truncate() { + return this; // TODO? + } + + @Override + public boolean[] getTruncationInfo() { + return null; + } + + // default for testing + static URI buildDefaultUri() throws URISyntaxException { + return new URIBuilder().setScheme("https") + .setHost(CohereUtils.HOST) + .setPathSegments(CohereUtils.VERSION_1, CohereUtils.RERANK_PATH) + .build(); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereRerankRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereRerankRequestEntity.java new file mode 100644 index 0000000000000..e7abe0990eb0c --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereRerankRequestEntity.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.cohere; + +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.services.cohere.rerank.CohereRerankTaskSettings; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +public record CohereRerankRequestEntity(String model, String query, List documents, CohereRerankTaskSettings taskSettings) + implements + ToXContentObject { + + private static final String DOCUMENTS_FIELD = "documents"; + private static final String QUERY_FIELD = "query"; + private static final String MODEL_FIELD = "model"; + + public CohereRerankRequestEntity { + Objects.requireNonNull(query); + Objects.requireNonNull(documents); + Objects.requireNonNull(taskSettings); + } + + public CohereRerankRequestEntity(String query, List input, CohereRerankTaskSettings taskSettings, String model) { + this(model, query, input, taskSettings); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + builder.field(MODEL_FIELD, model); + builder.field(QUERY_FIELD, query); + builder.field(DOCUMENTS_FIELD, documents); + + if (taskSettings.getDoesReturnDocuments() != null) { + builder.field(CohereRerankTaskSettings.RETURN_DOCUMENTS, taskSettings.getDoesReturnDocuments()); + } + + if (taskSettings.getTopNDocumentsOnly() != null) { + builder.field(CohereRerankTaskSettings.TOP_N_DOCS_ONLY, taskSettings.getTopNDocumentsOnly()); + } + + if (taskSettings.getMaxChunksPerDoc() != null) { + builder.field(CohereRerankTaskSettings.MAX_CHUNKS_PER_DOC, taskSettings.getMaxChunksPerDoc()); + } + + builder.endObject(); + return builder; + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereUtils.java index e54328df1dbf7..e6344f4d17b40 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereUtils.java @@ -14,6 +14,7 @@ public class CohereUtils { public static final String HOST = "api.cohere.ai"; public static final String VERSION_1 = "v1"; public static final String EMBEDDINGS_PATH = "embed"; + public static final String RERANK_PATH = "rerank"; public static final String REQUEST_SOURCE_HEADER = "Request-Source"; public static final String ELASTIC_REQUEST_SOURCE = "unspecified:elasticsearch"; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereEmbeddingsResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereEmbeddingsResponseEntity.java index 9221e5c5deed8..fabd96b543594 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereEmbeddingsResponseEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereEmbeddingsResponseEntity.java @@ -50,7 +50,7 @@ private static String supportedEmbeddingTypes() { } /** - * Parses the OpenAI json response. + * Parses the Cohere embed json response. * For a request like: * *
    diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereRankedResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereRankedResponseEntity.java
    new file mode 100644
    index 0000000000000..8574fb2ba520f
    --- /dev/null
    +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereRankedResponseEntity.java
    @@ -0,0 +1,151 @@
    +/*
    + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    + * or more contributor license agreements. Licensed under the Elastic License
    + * 2.0; you may not use this file except in compliance with the Elastic License
    + * 2.0.
    + *
    + * this file was contributed to by a generative AI
    + */
    +
    +package org.elasticsearch.xpack.inference.external.response.cohere;
    +
    +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
    +import org.elasticsearch.common.xcontent.XContentParserUtils;
    +import org.elasticsearch.inference.InferenceServiceResults;
    +import org.elasticsearch.xcontent.XContentFactory;
    +import org.elasticsearch.xcontent.XContentParser;
    +import org.elasticsearch.xcontent.XContentParserConfiguration;
    +import org.elasticsearch.xcontent.XContentType;
    +import org.elasticsearch.xpack.core.inference.results.RankedDocsResults;
    +import org.elasticsearch.xpack.inference.external.http.HttpResult;
    +
    +import java.io.IOException;
    +
    +import static org.elasticsearch.common.xcontent.XContentParserUtils.parseList;
    +import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownToken;
    +import static org.elasticsearch.xpack.inference.external.response.XContentUtils.moveToFirstToken;
    +import static org.elasticsearch.xpack.inference.external.response.XContentUtils.positionParserAtTokenAfterField;
    +
    +public class CohereRankedResponseEntity {
    +
    +    /**
    +     * Parses the Cohere ranked response.
    +     *
    +     * For a request like:
    +     *     "model": "rerank-english-v2.0",
    +     *     "query": "What is the capital of the United States?",
    +     *     "return_documents": true,
    +     *     "top_n": 3,
    +     *     "documents": ["Carson City is the capital city of the American state of Nevada.",
    +     *                   "The Commonwealth of the Northern Mariana ... Its capital is Saipan.",
    +     *                   "Washington, D.C. (also known as simply Washington or D.C., ... It is a federal district.",
    +     *                   "Capital punishment (the death penalty) ... As of 2017, capital punishment is legal in 30 of the 50 states."]
    +     * 

    + * The response will look like (without whitespace): + * { + * "id": "1983d114-a6e8-4940-b121-eb4ac3f6f703", + * "results": [ + * { + * "document": { + * "text": "Washington, D.C. is the capital of the United States. It is a federal district." + * }, + * "index": 2, + * "relevance_score": 0.98005307 + * }, + * { + * "document": { + * "text": "Capital punishment (the death penalty) As of 2017, capital punishment is legal in 30 of the 50 states." + * }, + * "index": 3, + * "relevance_score": 0.27904198 + * }, + * { + * "document": { + * "text": "Carson City is the capital city of the American state of Nevada." + * }, + * "index": 0, + * "relevance_score": 0.10194652 + * } + * ], + * "meta": { + * "api_version": { + * "version": "1" + * }, + * "billed_units": { + * "search_units": 1 + * } + * } + * + * @param response the http response from cohere + * @return the parsed response + * @throws IOException if there is an error parsing the response + */ + public static InferenceServiceResults fromResponse(HttpResult response) throws IOException { + var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); + + try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) { + moveToFirstToken(jsonParser); + + XContentParser.Token token = jsonParser.currentToken(); + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, token, jsonParser); + + positionParserAtTokenAfterField(jsonParser, "results", FAILED_TO_FIND_FIELD_TEMPLATE); // TODO error message + + token = jsonParser.currentToken(); + if (token == XContentParser.Token.START_ARRAY) { + return new RankedDocsResults(parseList(jsonParser, CohereRankedResponseEntity::parseRankedDocObject)); + } else { + throwUnknownToken(token, jsonParser); + } + + // This should never be reached. The above code should either return successfully or hit the throwUnknownToken + // or throw a parsing exception + throw new IllegalStateException("Reached an invalid state while parsing the Cohere response"); + } + } + + private static RankedDocsResults.RankedDoc parseRankedDocObject(XContentParser parser) throws IOException { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); + Integer index = null; + Float relevanceScore = null; + String documentText = null; + parser.nextToken(); + while (parser.currentToken() != XContentParser.Token.END_OBJECT) { + if (parser.currentToken() == XContentParser.Token.FIELD_NAME) { + switch (parser.currentName()) { + case "index": + parser.nextToken(); // move to VALUE_NUMBER + index = parser.intValue(); + parser.nextToken(); // move to next FIELD_NAME or END_OBJECT + break; + case "relevance_score": + parser.nextToken(); // move to VALUE_NUMBER + relevanceScore = parser.floatValue(); + parser.nextToken(); // move to next FIELD_NAME or END_OBJECT + break; + case "document": + parser.nextToken(); // move to START_OBJECT; document text is wrapped in an object + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); + do { + if (parser.currentToken() == XContentParser.Token.FIELD_NAME && parser.currentName().equals("text")) { + parser.nextToken(); // move to VALUE_STRING + documentText = parser.text(); + } + } while (parser.nextToken() != XContentParser.Token.END_OBJECT); + parser.nextToken();// move past END_OBJECT + // parser should now be at the next FIELD_NAME or END_OBJECT + break; + default: + XContentParserUtils.throwUnknownField(parser.currentName(), parser); + } + } else { + parser.nextToken(); + } + } + return new RankedDocsResults.RankedDoc(String.valueOf(index), String.valueOf(relevanceScore), String.valueOf(documentText)); + } + + private CohereRankedResponseEntity() {} + + static String FAILED_TO_FIND_FIELD_TEMPLATE = "Failed to find required field [%s] in Cohere embeddings response"; +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/SenderService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/SenderService.java index 98b004cd1aa7f..60fc219ba5c66 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/SenderService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/SenderService.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.core.IOUtils; +import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.inference.ChunkingOptions; import org.elasticsearch.inference.InferenceService; @@ -44,19 +45,36 @@ protected ServiceComponents getServiceComponents() { @Override public void infer( Model model, + @Nullable String query, List input, Map taskSettings, InputType inputType, ActionListener listener ) { init(); + if (query != null) { + doInfer(model, query, input, taskSettings, inputType, listener); + } else { + doInfer(model, input, taskSettings, inputType, listener); + } + } - doInfer(model, input, taskSettings, inputType, listener); + public void chunkedInfer( + Model model, + List input, + Map taskSettings, + InputType inputType, + ChunkingOptions chunkingOptions, + ActionListener> listener + ) { + init(); + chunkedInfer(model, null, input, taskSettings, inputType, chunkingOptions, listener); } @Override public void chunkedInfer( Model model, + @Nullable String query, List input, Map taskSettings, InputType inputType, @@ -64,7 +82,7 @@ public void chunkedInfer( ActionListener> listener ) { init(); - doChunkedInfer(model, input, taskSettings, inputType, chunkingOptions, listener); + doChunkedInfer(model, null, input, taskSettings, inputType, chunkingOptions, listener); } protected abstract void doInfer( @@ -75,8 +93,18 @@ protected abstract void doInfer( ActionListener listener ); + protected abstract void doInfer( + Model model, + String query, + List input, + Map taskSettings, + InputType inputType, + ActionListener listener + ); + protected abstract void doChunkedInfer( Model model, + @Nullable String query, List input, Map taskSettings, InputType inputType, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java index 96846f3f71142..11f8c6f53fb7b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java @@ -121,6 +121,10 @@ public static String mustBeNonEmptyString(String settingName, String scope) { return Strings.format("[%s] Invalid value empty string. [%s] must be a non-empty string", scope, settingName); } + public static String mustBeNonNonNull(String settingName, String scope) { + return Strings.format("[%s] Invalid value empty string. [%s] must be non-null", scope, settingName); + } + public static String invalidValue(String settingName, String scope, String invalidType, String[] requiredValues) { var copyOfRequiredValues = requiredValues.clone(); Arrays.sort(copyOfRequiredValues); @@ -232,6 +236,25 @@ public static String extractOptionalString( return optionalField; } + public static Integer extractOptionalPositiveInteger( + Map map, + String settingName, + String scope, + ValidationException validationException + ) { + Integer optionalField = ServiceUtils.removeAsType(map, settingName, Integer.class); + + if (optionalField != null && optionalField <= 0) { + validationException.addValidationError(ServiceUtils.mustBeAPositiveNumberErrorMessage(settingName, optionalField)); + } + + if (validationException.validationErrors().isEmpty() == false) { + return null; + } + + return optionalField; + } + public static > E extractOptionalEnum( Map map, String settingName, @@ -259,6 +282,21 @@ public static > E extractOptionalEnum( return null; } + public static Boolean extractOptionalBoolean( + Map map, + String settingName, + String scope, + ValidationException validationException + ) { + Boolean optionalField = ServiceUtils.removeAsType(map, settingName, Boolean.class); + + if (validationException.validationErrors().isEmpty() == false) { + return null; + } + + return optionalField; + } + private static > void validateEnumValue(E enumValue, EnumSet validValues) { if (validValues.contains(enumValue) == false) { throw new IllegalArgumentException(Strings.format("Enum value [%s] is not one of the acceptable values", enumValue.toString())); @@ -310,27 +348,36 @@ public static ElasticsearchStatusException createInvalidModelException(Model mod public static void getEmbeddingSize(Model model, InferenceService service, ActionListener listener) { assert model.getTaskType() == TaskType.TEXT_EMBEDDING; - service.infer(model, List.of(TEST_EMBEDDING_INPUT), Map.of(), InputType.INGEST, listener.delegateFailureAndWrap((delegate, r) -> { - if (r instanceof TextEmbedding embeddingResults) { - try { - delegate.onResponse(embeddingResults.getFirstEmbeddingSize()); - } catch (Exception e) { - delegate.onFailure(new ElasticsearchStatusException("Could not determine embedding size", RestStatus.BAD_REQUEST, e)); + service.infer( + model, + null, + List.of(TEST_EMBEDDING_INPUT), + Map.of(), + InputType.INGEST, + listener.delegateFailureAndWrap((delegate, r) -> { + if (r instanceof TextEmbedding embeddingResults) { + try { + delegate.onResponse(embeddingResults.getFirstEmbeddingSize()); + } catch (Exception e) { + delegate.onFailure( + new ElasticsearchStatusException("Could not determine embedding size", RestStatus.BAD_REQUEST, e) + ); + } + } else { + delegate.onFailure( + new ElasticsearchStatusException( + "Could not determine embedding size. " + + "Expected a result of type [" + + TextEmbeddingResults.NAME + + "] got [" + + r.getWriteableName() + + "]", + RestStatus.BAD_REQUEST + ) + ); } - } else { - delegate.onFailure( - new ElasticsearchStatusException( - "Could not determine embedding size. " - + "Expected a result of type [" - + TextEmbeddingResults.NAME - + "] got [" - + r.getWriteableName() - + "]", - RestStatus.BAD_REQUEST - ) - ); - } - })); + }) + ); } private static final String TEST_EMBEDDING_INPUT = "how big"; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java index 28ccccecb9627..40f3bcda57a47 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java @@ -29,13 +29,16 @@ import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; import org.elasticsearch.xpack.inference.external.action.cohere.CohereActionCreator; +import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; +import org.elasticsearch.xpack.inference.external.http.sender.QueryAndDocsInputs; import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.SenderService; import org.elasticsearch.xpack.inference.services.ServiceComponents; import org.elasticsearch.xpack.inference.services.ServiceUtils; import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsModel; import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsServiceSettings; +import org.elasticsearch.xpack.inference.services.cohere.rerank.CohereRerankModel; import java.util.List; import java.util.Map; @@ -130,6 +133,7 @@ private static CohereModel createModel( secretSettings, context ); + case RERANK -> new CohereRerankModel(inferenceEntityId, taskType, NAME, serviceSettings, taskSettings, secretSettings, context); default -> throw new ElasticsearchStatusException(failureMessage, RestStatus.BAD_REQUEST); }; } @@ -173,6 +177,7 @@ public CohereModel parsePersistedConfig(String inferenceEntityId, TaskType taskT @Override public void doInfer( Model model, + String query, List input, Map taskSettings, InputType inputType, @@ -187,12 +192,33 @@ public void doInfer( var actionCreator = new CohereActionCreator(getSender(), getServiceComponents()); var action = cohereModel.accept(actionCreator, taskSettings, inputType); - action.execute(input, listener); + action.execute(new QueryAndDocsInputs(query, input), listener); + } + + @Override + public void doInfer( + Model model, + List input, + Map taskSettings, + InputType inputType, + ActionListener listener + ) { + if (model instanceof CohereModel == false) { + listener.onFailure(createInvalidModelException(model)); + return; + } + + CohereModel cohereModel = (CohereModel) model; + var actionCreator = new CohereActionCreator(getSender(), getServiceComponents()); + + var action = cohereModel.accept(actionCreator, taskSettings, inputType); + action.execute(new DocumentsOnlyInput(input), listener); } @Override protected void doChunkedInfer( Model model, + @Nullable String query, List input, Map taskSettings, InputType inputType, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankModel.java new file mode 100644 index 0000000000000..78e0e419c418d --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankModel.java @@ -0,0 +1,94 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.cohere.rerank; + +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.InputType; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ModelSecrets; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.action.cohere.CohereActionVisitor; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; +import org.elasticsearch.xpack.inference.services.cohere.CohereModel; +import org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettings; + +import java.util.Map; + +public class CohereRerankModel extends CohereModel { + public static CohereRerankModel of(CohereRerankModel model, Map taskSettings) { + var requestTaskSettings = CohereRerankTaskSettings.fromMap(taskSettings); + return new CohereRerankModel(model, CohereRerankTaskSettings.of(model.getTaskSettings(), requestTaskSettings)); + } + + public CohereRerankModel( + String modelId, + TaskType taskType, + String service, + Map serviceSettings, + Map taskSettings, + @Nullable Map secrets, + ConfigurationParseContext context + ) { + this( + modelId, + taskType, + service, + CohereRerankServiceSettings.fromMap(serviceSettings, context), + CohereRerankTaskSettings.fromMap(taskSettings), + DefaultSecretSettings.fromMap(secrets) + ); + } + + // should only be used for testing + CohereRerankModel( + String modelId, + TaskType taskType, + String service, + CohereRerankServiceSettings serviceSettings, + CohereRerankTaskSettings taskSettings, + @Nullable DefaultSecretSettings secretSettings + ) { + super(new ModelConfigurations(modelId, taskType, service, serviceSettings, taskSettings), new ModelSecrets(secretSettings)); + } + + private CohereRerankModel(CohereRerankModel model, CohereRerankTaskSettings taskSettings) { + super(model, taskSettings); + } + + public CohereRerankModel(CohereRerankModel model, CohereRerankServiceSettings serviceSettings) { + super(model, serviceSettings); + } + + @Override + public CohereRerankServiceSettings getServiceSettings() { + return (CohereRerankServiceSettings) super.getServiceSettings(); + } + + @Override + public CohereRerankTaskSettings getTaskSettings() { + return (CohereRerankTaskSettings) super.getTaskSettings(); + } + + @Override + public DefaultSecretSettings getSecretSettings() { + return (DefaultSecretSettings) super.getSecretSettings(); + } + + /** + * Accepts a visitor to create an executable action. The returned action will not return documents in the response. + * @param visitor _ + * @param taskSettings _ + * @param inputType ignored for rerank task + * @return the rerank action + */ + @Override + public ExecutableAction accept(CohereActionVisitor visitor, Map taskSettings, InputType inputType) { + return visitor.create(this, taskSettings); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankServiceSettings.java new file mode 100644 index 0000000000000..a14ffb7ef8216 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankServiceSettings.java @@ -0,0 +1,95 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.cohere.rerank; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.inference.ServiceSettings; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; +import org.elasticsearch.xpack.inference.services.cohere.CohereServiceSettings; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +public class CohereRerankServiceSettings implements ServiceSettings { + public static final String NAME = "cohere_rerank_service_settings"; + + public static CohereRerankServiceSettings fromMap(Map map, ConfigurationParseContext parseContext) { + ValidationException validationException = new ValidationException(); + var commonServiceSettings = CohereServiceSettings.fromMap(map, parseContext); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return new CohereRerankServiceSettings(commonServiceSettings); + } + + private final CohereServiceSettings commonSettings; + + public CohereRerankServiceSettings(CohereServiceSettings commonSettings) { + this.commonSettings = commonSettings; + } + + public CohereRerankServiceSettings(StreamInput in) throws IOException { + commonSettings = new CohereServiceSettings(in); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + commonSettings.toXContentFragment(builder); + + builder.endObject(); + return builder; + } + + @Override + public ToXContentObject getFilteredXContentObject() { + return this; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ML_INFERENCE_COHERE_RERANK; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + commonSettings.writeTo(out); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CohereRerankServiceSettings that = (CohereRerankServiceSettings) o; + return Objects.equals(commonSettings, that.commonSettings); + } + + @Override + public int hashCode() { + return Objects.hash(commonSettings); + } + + public CohereServiceSettings getCommonSettings() { + return commonSettings; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankTaskSettings.java new file mode 100644 index 0000000000000..75588aa2b5036 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankTaskSettings.java @@ -0,0 +1,184 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.cohere.rerank; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.InputType; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.TaskSettings; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalBoolean; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalPositiveInteger; + +/** + * Defines the task settings for the cohere rerank service. + * + *

    + * See api docs for details. + *

    + */ +public class CohereRerankTaskSettings implements TaskSettings { + + public static final String NAME = "cohere_rerank_task_settings"; + public static final String RETURN_DOCUMENTS = "return_documents"; + public static final String TOP_N_DOCS_ONLY = "top_n"; + public static final String MAX_CHUNKS_PER_DOC = "max_chunks_per_doc"; + + static final CohereRerankTaskSettings EMPTY_SETTINGS = new CohereRerankTaskSettings(null, null, null); + + public static CohereRerankTaskSettings fromMap(Map map) { + ValidationException validationException = new ValidationException(); + + if (map == null || map.isEmpty()) { + return EMPTY_SETTINGS; + } + + Boolean returnDocuments = extractOptionalBoolean(map, RETURN_DOCUMENTS, ModelConfigurations.TASK_SETTINGS, validationException); + Integer topNDocumentsOnly = extractOptionalPositiveInteger( + map, + TOP_N_DOCS_ONLY, + ModelConfigurations.TASK_SETTINGS, + validationException + ); + Integer maxChunksPerDoc = extractOptionalPositiveInteger( + map, + MAX_CHUNKS_PER_DOC, + ModelConfigurations.TASK_SETTINGS, + validationException + ); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return of(topNDocumentsOnly, returnDocuments, maxChunksPerDoc); + } + + /** + * Creates a new {@link CohereRerankTaskSettings} by preferring non-null fields from the request settings over the original settings. + * + * @param originalSettings the settings stored as part of the inference entity configuration + * @param requestTaskSettings the settings passed in within the task_settings field of the request + * @return a constructed {@link CohereRerankTaskSettings} + */ + public static CohereRerankTaskSettings of(CohereRerankTaskSettings originalSettings, CohereRerankTaskSettings requestTaskSettings) { + return new CohereRerankTaskSettings( + requestTaskSettings.getTopNDocumentsOnly() != null + ? requestTaskSettings.getTopNDocumentsOnly() + : originalSettings.getTopNDocumentsOnly(), + requestTaskSettings.getReturnDocuments() != null + ? requestTaskSettings.getReturnDocuments() + : originalSettings.getReturnDocuments(), + requestTaskSettings.getMaxChunksPerDoc() != null + ? requestTaskSettings.getMaxChunksPerDoc() + : originalSettings.getMaxChunksPerDoc() + ); + } + + public static CohereRerankTaskSettings of(Integer topNDocumentsOnly, Boolean returnDocuments, Integer maxChunksPerDoc) { + return new CohereRerankTaskSettings(topNDocumentsOnly, returnDocuments, maxChunksPerDoc); + } + + private final Integer topNDocumentsOnly; + private final Boolean returnDocuments; + private final Integer maxChunksPerDoc; + + public CohereRerankTaskSettings(StreamInput in) throws IOException { + this(in.readOptionalInt(), in.readOptionalBoolean(), in.readOptionalInt()); + } + + public CohereRerankTaskSettings( + @Nullable Integer topNDocumentsOnly, + @Nullable Boolean doReturnDocuments, + @Nullable Integer maxChunksPerDoc + ) { + this.topNDocumentsOnly = topNDocumentsOnly; + this.returnDocuments = doReturnDocuments; + this.maxChunksPerDoc = maxChunksPerDoc; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + if (topNDocumentsOnly != null) { + builder.field(TOP_N_DOCS_ONLY, topNDocumentsOnly); + } + if (returnDocuments != null) { + builder.field(RETURN_DOCUMENTS, returnDocuments); + } + if (maxChunksPerDoc != null) { + builder.field(MAX_CHUNKS_PER_DOC, maxChunksPerDoc); + } + builder.endObject(); + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ML_INFERENCE_COHERE_RERANK; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalInt(topNDocumentsOnly); + out.writeOptionalBoolean(returnDocuments); + out.writeOptionalInt(maxChunksPerDoc); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CohereRerankTaskSettings that = (CohereRerankTaskSettings) o; + return Objects.equals(returnDocuments, that.returnDocuments) + && Objects.equals(topNDocumentsOnly, that.topNDocumentsOnly) + && Objects.equals(maxChunksPerDoc, that.maxChunksPerDoc); + } + + @Override + public int hashCode() { + return Objects.hash(returnDocuments, topNDocumentsOnly, maxChunksPerDoc); + } + + public static String invalidInputTypeMessage(InputType inputType) { + return Strings.format("received invalid input type value [%s]", inputType.toString()); + } + + public Boolean getDoesReturnDocuments() { + return returnDocuments; + } + + public Integer getTopNDocumentsOnly() { + return topNDocumentsOnly; + } + + public Boolean getReturnDocuments() { + return returnDocuments; + } + + public Integer getMaxChunksPerDoc() { + return maxChunksPerDoc; + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalServiceSettings.java index ee22d51914b15..86ac5bbaaa272 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalServiceSettings.java @@ -46,7 +46,8 @@ public static Builder fromMap(Map map) { validateParameters(numAllocations, validationException, numThreads); - String modelId = ServiceUtils.extractRequiredString(map, MODEL_ID, "ServiceSettings", validationException); + String modelId = ServiceUtils.extractRequiredString(map, MODEL_ID, "ServiceSettings", validationException); // TODO check if this is + // the correct scope if (validationException.validationErrors().isEmpty() == false) { throw validationException; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java index a9dc487bfca8a..3f91bcfe648e5 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java @@ -14,6 +14,7 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.internal.OriginSettingClient; +import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.inference.ChunkingOptions; @@ -210,6 +211,7 @@ public ElasticsearchModel parsePersistedConfig(String inferenceEntityId, TaskTyp @Override public void infer( Model model, + @Nullable String query, List input, Map taskSettings, InputType inputType, @@ -236,9 +238,22 @@ public void infer( ); } + public void chunkedInfer( + Model model, + List input, + Map taskSettings, + InputType inputType, + ChunkingOptions chunkingOptions, + ActionListener> listener + ) { + + chunkedInfer(model, null, input, taskSettings, inputType, chunkingOptions, listener); + } + @Override public void chunkedInfer( Model model, + @Nullable String query, List input, Map taskSettings, InputType inputType, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalService.java index bb88193612ff4..d6323635f0cec 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalService.java @@ -256,6 +256,7 @@ public void stop(String inferenceEntityId, ActionListener listener) { @Override public void infer( Model model, + @Nullable String query, List input, Map taskSettings, InputType inputType, @@ -283,9 +284,21 @@ public void infer( ); } + public void chunkedInfer( + Model model, + List input, + Map taskSettings, + InputType inputType, + @Nullable ChunkingOptions chunkingOptions, + ActionListener> listener + ) { + chunkedInfer(model, null, input, taskSettings, inputType, chunkingOptions, listener); + } + @Override public void chunkedInfer( Model model, + @Nullable String query, List input, Map taskSettings, InputType inputType, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseService.java index 7075ff63d60a7..1225c471ec9c3 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseService.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.inference.services.huggingface; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.inference.ChunkingOptions; import org.elasticsearch.inference.InferenceServiceResults; @@ -23,6 +24,7 @@ import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; import org.elasticsearch.xpack.inference.external.action.huggingface.HuggingFaceActionCreator; +import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; import org.elasticsearch.xpack.inference.services.SenderService; import org.elasticsearch.xpack.inference.services.ServiceComponents; @@ -122,12 +124,25 @@ public void doInfer( var actionCreator = new HuggingFaceActionCreator(getSender(), getServiceComponents()); var action = huggingFaceModel.accept(actionCreator); - action.execute(input, listener); + action.execute(new DocumentsOnlyInput(input), listener); + } + + @Override + protected void doInfer( + Model model, + String query, + List input, + Map taskSettings, + InputType inputType, + ActionListener listener + ) { + throw new UnsupportedOperationException("Hugging Face service does not support inference with query input"); } @Override protected void doChunkedInfer( Model model, + @Nullable String query, List input, Map taskSettings, InputType inputType, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java index 1a47f00519cb6..e30d6f2eae592 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java @@ -28,6 +28,7 @@ import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; import org.elasticsearch.xpack.inference.external.action.openai.OpenAiActionCreator; +import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.SenderService; @@ -202,12 +203,25 @@ public void doInfer( var actionCreator = new OpenAiActionCreator(getSender(), getServiceComponents()); var action = openAiModel.accept(actionCreator, taskSettings); - action.execute(input, listener); + action.execute(new DocumentsOnlyInput(input), listener); + } + + @Override + protected void doInfer( + Model model, + String query, + List input, + Map taskSettings, + InputType inputType, + ActionListener listener + ) { + throw new UnsupportedOperationException("OpenAI service does not support inference with query input"); } @Override protected void doChunkedInfer( Model model, + @Nullable String query, List input, Map taskSettings, InputType inputType, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionCreatorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionCreatorTests.java index 66ef9910a2649..3b3b1539367ff 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionCreatorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionCreatorTests.java @@ -19,6 +19,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; +import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import org.elasticsearch.xpack.inference.services.cohere.CohereTruncation; @@ -113,7 +114,7 @@ public void testCreate_CohereEmbeddingsModel() throws IOException { var action = actionCreator.create(model, overriddenTaskSettings, InputType.UNSPECIFIED); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var result = listener.actionGet(TIMEOUT); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsActionTests.java index b504744bfe5f3..b5220fbc0960e 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsActionTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests; import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.external.request.cohere.CohereUtils; @@ -118,7 +119,7 @@ public void testExecute_ReturnsSuccessfulResponse() throws IOException { ); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var result = listener.actionGet(TIMEOUT); @@ -199,7 +200,7 @@ public void testExecute_ReturnsSuccessfulResponse_ForInt8ResponseType() throws I ); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var result = listener.actionGet(TIMEOUT); @@ -257,7 +258,7 @@ public void testExecute_ThrowsElasticsearchException() { var action = createAction(getUrl(webServer), "secret", CohereEmbeddingsTaskSettings.EMPTY_SETTINGS, null, null, sender); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); @@ -278,7 +279,7 @@ public void testExecute_ThrowsElasticsearchException_WhenSenderOnFailureIsCalled var action = createAction(getUrl(webServer), "secret", CohereEmbeddingsTaskSettings.EMPTY_SETTINGS, null, null, sender); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); @@ -302,7 +303,7 @@ public void testExecute_ThrowsElasticsearchException_WhenSenderOnFailureIsCalled var action = createAction(null, "secret", CohereEmbeddingsTaskSettings.EMPTY_SETTINGS, null, null, sender); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); @@ -316,7 +317,7 @@ public void testExecute_ThrowsException() { var action = createAction(getUrl(webServer), "secret", CohereEmbeddingsTaskSettings.EMPTY_SETTINGS, null, null, sender); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); @@ -333,7 +334,7 @@ public void testExecute_ThrowsExceptionWithNullUrl() { var action = createAction(null, "secret", CohereEmbeddingsTaskSettings.EMPTY_SETTINGS, null, null, sender); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionCreatorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionCreatorTests.java index 6334c669d0c1f..5351d1db833e3 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionCreatorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionCreatorTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.common.TruncatorTests; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; +import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import org.elasticsearch.xpack.inference.results.SparseEmbeddingResultsTests; @@ -90,7 +91,7 @@ public void testExecute_ReturnsSuccessfulResponse_ForElserAction() throws IOExce var action = actionCreator.create(model); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var result = listener.actionGet(TIMEOUT); @@ -157,7 +158,7 @@ public void testSend_FailsFromInvalidResponseFormat_ForElserAction() throws IOEx var action = actionCreator.create(model); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); assertThat( @@ -205,7 +206,7 @@ public void testExecute_ReturnsSuccessfulResponse_ForEmbeddingsAction() throws I var action = actionCreator.create(model); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var result = listener.actionGet(TIMEOUT); @@ -263,7 +264,7 @@ public void testSend_FailsFromInvalidResponseFormat_ForEmbeddingsAction() throws var action = actionCreator.create(model); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); assertThat( @@ -318,7 +319,7 @@ public void testExecute_ReturnsSuccessfulResponse_AfterTruncating() throws IOExc var action = actionCreator.create(model); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abcd"), listener); + action.execute(new DocumentsOnlyInput(List.of("abcd")), listener); var result = listener.actionGet(TIMEOUT); @@ -376,7 +377,7 @@ public void testExecute_TruncatesInputBeforeSending() throws IOException { var action = actionCreator.create(model); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("123456"), listener); + action.execute(new DocumentsOnlyInput(List.of("123456")), listener); var result = listener.actionGet(TIMEOUT); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionTests.java index 7b332e8c6634d..0faee28d1af7d 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.inference.common.TruncatorTests; import org.elasticsearch.xpack.inference.external.http.retry.AlwaysRetryingResponseHandler; +import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import org.elasticsearch.xpack.inference.services.ServiceComponents; @@ -58,7 +59,7 @@ public void testExecute_ThrowsElasticsearchException_WhenSenderThrows() { var action = createAction(URL, sender); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); @@ -79,7 +80,7 @@ public void testExecute_ThrowsElasticsearchException_WhenSenderOnFailureIsCalled var action = createAction(URL, sender, "inferenceEntityId"); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); @@ -96,7 +97,7 @@ public void testExecute_ThrowsException() { var action = createAction(URL, sender, "inferenceEntityId"); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreatorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreatorTests.java index 9b14cf259522c..a1cc0321cb74e 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreatorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreatorTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; +import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import org.junit.After; @@ -103,7 +104,7 @@ public void testCreate_OpenAiEmbeddingsModel() throws IOException { var action = actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var result = listener.actionGet(TIMEOUT); @@ -156,7 +157,7 @@ public void testCreate_OpenAiEmbeddingsModel_WithoutUser() throws IOException { var action = actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var result = listener.actionGet(TIMEOUT); @@ -208,7 +209,7 @@ public void testCreate_OpenAiEmbeddingsModel_WithoutOrganization() throws IOExce var action = actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var result = listener.actionGet(TIMEOUT); @@ -267,7 +268,7 @@ public void testCreate_OpenAiEmbeddingsModel_FailsFromInvalidResponseFormat() th var action = actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); assertThat(thrownException.getMessage(), is(format("Failed to send OpenAI embeddings request to [%s]", getUrl(webServer)))); @@ -327,7 +328,7 @@ public void testCreate_OpenAiChatCompletionModel() throws IOException { var action = actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var result = listener.actionGet(TIMEOUT); @@ -390,7 +391,7 @@ public void testCreate_OpenAiChatCompletionModel_WithoutUser() throws IOExceptio var action = actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var result = listener.actionGet(TIMEOUT); @@ -452,7 +453,7 @@ public void testCreate_OpenAiChatCompletionModel_WithoutOrganization() throws IO var action = actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var result = listener.actionGet(TIMEOUT); @@ -520,7 +521,7 @@ public void testCreate_OpenAiChatCompletionModel_FailsFromInvalidResponseFormat( var action = actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); assertThat( @@ -597,7 +598,7 @@ public void testExecute_ReturnsSuccessfulResponse_AfterTruncating_From413StatusC var action = actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abcd"), listener); + action.execute(new DocumentsOnlyInput(List.of("abcd")), listener); var result = listener.actionGet(TIMEOUT); @@ -680,7 +681,7 @@ public void testExecute_ReturnsSuccessfulResponse_AfterTruncating_From400StatusC var action = actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abcd"), listener); + action.execute(new DocumentsOnlyInput(List.of("abcd")), listener); var result = listener.actionGet(TIMEOUT); @@ -748,7 +749,7 @@ public void testExecute_TruncatesInputBeforeSending() throws IOException { var action = actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("super long input"), listener); + action.execute(new DocumentsOnlyInput(List.of("super long input")), listener); var result = listener.actionGet(TIMEOUT); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionActionTests.java index 15998469d08d0..ff8cdeab61ad3 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionActionTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.inference.results.ChatCompletionResults; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; +import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests; import org.elasticsearch.xpack.inference.external.http.sender.Sender; @@ -112,7 +113,7 @@ public void testExecute_ReturnsSuccessfulResponse() throws IOException { var action = createAction(getUrl(webServer), "org", "secret", "model", "user", sender); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var result = listener.actionGet(TIMEOUT); @@ -152,7 +153,7 @@ public void testExecute_ThrowsElasticsearchException() { var action = createAction(getUrl(webServer), "org", "secret", "model", "user", sender); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); @@ -173,7 +174,7 @@ public void testExecute_ThrowsElasticsearchException_WhenSenderOnFailureIsCalled var action = createAction(getUrl(webServer), "org", "secret", "model", "user", sender); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); @@ -194,7 +195,7 @@ public void testExecute_ThrowsElasticsearchException_WhenSenderOnFailureIsCalled var action = createAction(null, "org", "secret", "model", "user", sender); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); @@ -208,7 +209,7 @@ public void testExecute_ThrowsException() { var action = createAction(getUrl(webServer), "org", "secret", "model", "user", sender); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); @@ -222,7 +223,7 @@ public void testExecute_ThrowsExceptionWithNullUrl() { var action = createAction(null, "org", "secret", "model", "user", sender); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); @@ -266,7 +267,7 @@ public void testExecute_ThrowsException_WhenInputIsGreaterThanOne() throws IOExc var action = createAction(getUrl(webServer), "org", "secret", "model", "user", sender); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc", "def"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc", "def")), listener); var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiEmbeddingsActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiEmbeddingsActionTests.java index c803121e6da79..6c83eaf96201a 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiEmbeddingsActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiEmbeddingsActionTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; +import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; @@ -104,7 +105,7 @@ public void testExecute_ReturnsSuccessfulResponse() throws IOException { var action = createAction(getUrl(webServer), "org", "secret", "model", "user", sender); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var result = listener.actionGet(TIMEOUT); @@ -140,7 +141,7 @@ public void testExecute_ThrowsElasticsearchException() { var action = createAction(getUrl(webServer), "org", "secret", "model", "user", sender); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); @@ -161,7 +162,7 @@ public void testExecute_ThrowsElasticsearchException_WhenSenderOnFailureIsCalled var action = createAction(getUrl(webServer), "org", "secret", "model", "user", sender); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); @@ -182,7 +183,7 @@ public void testExecute_ThrowsElasticsearchException_WhenSenderOnFailureIsCalled var action = createAction(null, "org", "secret", "model", "user", sender); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); @@ -196,7 +197,7 @@ public void testExecute_ThrowsException() { var action = createAction(getUrl(webServer), "org", "secret", "model", "user", sender); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); @@ -210,7 +211,7 @@ public void testExecute_ThrowsExceptionWithNullUrl() { var action = createAction(null, "org", "secret", "model", "user", sender); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/ExecutableRequestCreatorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/ExecutableRequestCreatorTests.java index b4e770141939b..9a85b00c01485 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/ExecutableRequestCreatorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/ExecutableRequestCreatorTests.java @@ -24,7 +24,7 @@ public class ExecutableRequestCreatorTests { public static ExecutableRequestCreator createMock() { var mockCreator = mock(ExecutableRequestCreator.class); - when(mockCreator.create(anyList(), any(), any(), any(), any())).thenReturn(() -> {}); + when(mockCreator.create(any(), anyList(), any(), any(), any(), any())).thenReturn(() -> {}); return mockCreator; } @@ -38,7 +38,7 @@ public static ExecutableRequestCreator createMock(RequestSender requestSender, S doAnswer(invocation -> { @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocation.getArguments()[4]; + ActionListener listener = (ActionListener) invocation.getArguments()[5]; return (Runnable) () -> requestSender.send( mock(Logger.class), RequestTests.mockRequest(modelId), @@ -47,7 +47,7 @@ public static ExecutableRequestCreator createMock(RequestSender requestSender, S mock(ResponseHandler.class), listener ); - }).when(mockCreator).create(anyList(), any(), any(), any(), any()); + }).when(mockCreator).create(any(), anyList(), any(), any(), any(), any()); return mockCreator; } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderTests.java index 79b17f8dff29d..829a6f981db4c 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderTests.java @@ -107,7 +107,7 @@ public void testCreateSender_SendsRequestAndReceivesResponse() throws Exception PlainActionFuture listener = new PlainActionFuture<>(); sender.send( OpenAiEmbeddingsExecutableRequestCreatorTests.makeCreator(getUrl(webServer), null, "key", "model", null), - List.of("abc"), + new DocumentsOnlyInput(List.of("abc")), listener ); @@ -138,7 +138,7 @@ public void testHttpRequestSender_Throws_WhenCallingSendBeforeStart() throws Exc PlainActionFuture listener = new PlainActionFuture<>(); var thrownException = expectThrows( AssertionError.class, - () -> sender.send(ExecutableRequestCreatorTests.createMock(), List.of(), listener) + () -> sender.send(ExecutableRequestCreatorTests.createMock(), new DocumentsOnlyInput(List.of()), listener) ); assertThat(thrownException.getMessage(), is("call start() before sending a request")); } @@ -162,7 +162,12 @@ public void testHttpRequestSender_Throws_WhenATimeoutOccurs() throws Exception { sender.start(); PlainActionFuture listener = new PlainActionFuture<>(); - sender.send(ExecutableRequestCreatorTests.createMock(), List.of(), TimeValue.timeValueNanos(1), listener); + sender.send( + ExecutableRequestCreatorTests.createMock(), + new DocumentsOnlyInput(List.of()), + TimeValue.timeValueNanos(1), + listener + ); var thrownException = expectThrows(ElasticsearchTimeoutException.class, () -> listener.actionGet(TIMEOUT)); @@ -187,7 +192,12 @@ public void testHttpRequestSenderWithTimeout_Throws_WhenATimeoutOccurs() throws sender.start(); PlainActionFuture listener = new PlainActionFuture<>(); - sender.send(ExecutableRequestCreatorTests.createMock(), List.of(), TimeValue.timeValueNanos(1), listener); + sender.send( + ExecutableRequestCreatorTests.createMock(), + new DocumentsOnlyInput(List.of()), + TimeValue.timeValueNanos(1), + listener + ); var thrownException = expectThrows(ElasticsearchTimeoutException.class, () -> listener.actionGet(TIMEOUT)); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceTests.java index 5e88c3f1bb8f5..24a261dfe47c4 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceTests.java @@ -70,7 +70,7 @@ public void testQueueSize_IsEmpty() { public void testQueueSize_IsOne() { var service = createRequestExecutorServiceWithMocks(); - service.execute(ExecutableRequestCreatorTests.createMock(), List.of(), null, new PlainActionFuture<>()); + service.execute(ExecutableRequestCreatorTests.createMock(), new DocumentsOnlyInput(List.of()), null, new PlainActionFuture<>()); assertThat(service.queueSize(), is(1)); } @@ -110,7 +110,7 @@ public void testIsTerminated_AfterStopFromSeparateThread() throws Exception { PlainActionFuture listener = new PlainActionFuture<>(); service.execute( OpenAiEmbeddingsExecutableRequestCreatorTests.makeCreator("url", null, "key", "id", null), - List.of(), + new DocumentsOnlyInput(List.of()), null, listener ); @@ -133,7 +133,7 @@ public void testSend_AfterShutdown_Throws() { service.shutdown(); var listener = new PlainActionFuture(); - service.execute(ExecutableRequestCreatorTests.createMock(), List.of(), null, listener); + service.execute(ExecutableRequestCreatorTests.createMock(), new DocumentsOnlyInput(List.of()), null, listener); var thrownException = expectThrows(EsRejectedExecutionException.class, () -> listener.actionGet(TIMEOUT)); @@ -149,13 +149,13 @@ public void testSend_Throws_WhenQueueIsFull() { "test_service", threadPool, null, - RequestExecutorServiceSettingsTests.createRequestExecutorServiceSettings(1), + createRequestExecutorServiceSettings(1), new SingleRequestManager(mock(RetryingHttpSender.class)) ); - service.execute(ExecutableRequestCreatorTests.createMock(), List.of(), null, new PlainActionFuture<>()); + service.execute(ExecutableRequestCreatorTests.createMock(), new DocumentsOnlyInput(List.of()), null, new PlainActionFuture<>()); var listener = new PlainActionFuture(); - service.execute(ExecutableRequestCreatorTests.createMock(), List.of(), null, listener); + service.execute(ExecutableRequestCreatorTests.createMock(), new DocumentsOnlyInput(List.of()), null, listener); var thrownException = expectThrows(EsRejectedExecutionException.class, () -> listener.actionGet(TIMEOUT)); @@ -180,7 +180,7 @@ public void testTaskThrowsError_CallsOnFailure() { service.execute( OpenAiEmbeddingsExecutableRequestCreatorTests.makeCreator("url", null, "key", "id", null), - List.of(), + new DocumentsOnlyInput(List.of()), null, listener ); @@ -207,7 +207,12 @@ public void testSend_CallsOnFailure_WhenRequestTimesOut() { var service = createRequestExecutorServiceWithMocks(); var listener = new PlainActionFuture(); - service.execute(ExecutableRequestCreatorTests.createMock(), List.of(), TimeValue.timeValueNanos(1), listener); + service.execute( + ExecutableRequestCreatorTests.createMock(), + new DocumentsOnlyInput(List.of()), + TimeValue.timeValueNanos(1), + listener + ); var thrownException = expectThrows(ElasticsearchTimeoutException.class, () -> listener.actionGet(TIMEOUT)); @@ -265,7 +270,7 @@ public void onFailure(Exception e) { } }; - service.execute(ExecutableRequestCreatorTests.createMock(requestSender), List.of(), null, listener); + service.execute(ExecutableRequestCreatorTests.createMock(requestSender), new DocumentsOnlyInput(List.of()), null, listener); Future executorTermination = submitShutdownRequest(waitToShutdown, waitToReturnFromSend, service); @@ -279,7 +284,7 @@ public void testSend_NotifiesTasksOfShutdown() { var service = createRequestExecutorServiceWithMocks(); var listener = new PlainActionFuture(); - service.execute(ExecutableRequestCreatorTests.createMock(), List.of(), null, listener); + service.execute(ExecutableRequestCreatorTests.createMock(), new DocumentsOnlyInput(List.of()), null, listener); service.shutdown(); service.start(); @@ -387,11 +392,16 @@ public void testChangingCapacity_SetsCapacityToTwo() throws ExecutionException, var settings = createRequestExecutorServiceSettings(1); var service = new RequestExecutorService("test_service", threadPool, null, settings, new SingleRequestManager(requestSender)); - service.execute(ExecutableRequestCreatorTests.createMock(requestSender), List.of(), null, new PlainActionFuture<>()); + service.execute( + ExecutableRequestCreatorTests.createMock(requestSender), + new DocumentsOnlyInput(List.of()), + null, + new PlainActionFuture<>() + ); assertThat(service.queueSize(), is(1)); PlainActionFuture listener = new PlainActionFuture<>(); - service.execute(ExecutableRequestCreatorTests.createMock(requestSender), List.of(), null, listener); + service.execute(ExecutableRequestCreatorTests.createMock(requestSender), new DocumentsOnlyInput(List.of()), null, listener); var thrownException = expectThrows(EsRejectedExecutionException.class, () -> listener.actionGet(TIMEOUT)); assertThat( @@ -426,11 +436,21 @@ public void testChangingCapacity_DoesNotRejectsOverflowTasks_BecauseOfQueueFull( var settings = createRequestExecutorServiceSettings(3); var service = new RequestExecutorService("test_service", threadPool, null, settings, new SingleRequestManager(requestSender)); - service.execute(ExecutableRequestCreatorTests.createMock(requestSender), List.of(), null, new PlainActionFuture<>()); - service.execute(ExecutableRequestCreatorTests.createMock(requestSender), List.of(), null, new PlainActionFuture<>()); + service.execute( + ExecutableRequestCreatorTests.createMock(requestSender), + new DocumentsOnlyInput(List.of()), + null, + new PlainActionFuture<>() + ); + service.execute( + ExecutableRequestCreatorTests.createMock(requestSender), + new DocumentsOnlyInput(List.of()), + null, + new PlainActionFuture<>() + ); PlainActionFuture listener = new PlainActionFuture<>(); - service.execute(ExecutableRequestCreatorTests.createMock(requestSender), List.of(), null, listener); + service.execute(ExecutableRequestCreatorTests.createMock(requestSender), new DocumentsOnlyInput(List.of()), null, listener); assertThat(service.queueSize(), is(3)); settings.setQueueCapacity(1); @@ -471,11 +491,16 @@ public void testChangingCapacity_ToZero_SetsQueueCapacityToUnbounded() throws IO var settings = createRequestExecutorServiceSettings(1); var service = new RequestExecutorService("test_service", threadPool, null, settings, new SingleRequestManager(requestSender)); - service.execute(ExecutableRequestCreatorTests.createMock(requestSender), List.of(), null, new PlainActionFuture<>()); + service.execute( + ExecutableRequestCreatorTests.createMock(requestSender), + new DocumentsOnlyInput(List.of()), + null, + new PlainActionFuture<>() + ); assertThat(service.queueSize(), is(1)); PlainActionFuture listener = new PlainActionFuture<>(); - service.execute(ExecutableRequestCreatorTests.createMock(requestSender), List.of(), null, listener); + service.execute(ExecutableRequestCreatorTests.createMock(requestSender), new DocumentsOnlyInput(List.of()), null, listener); var thrownException = expectThrows(EsRejectedExecutionException.class, () -> listener.actionGet(TIMEOUT)); assertThat( diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTaskTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTaskTests.java index 5c35d8ce49b60..14a7e28eb84db 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTaskTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTaskTests.java @@ -60,7 +60,7 @@ public void testExecuting_DoesNotCallOnFailureForTimeout_AfterIllegalArgumentExc var requestTask = new RequestTask( OpenAiEmbeddingsExecutableRequestCreatorTests.makeCreator("url", null, "key", "model", null, "id"), - List.of("abc"), + new DocumentsOnlyInput(List.of("abc")), TimeValue.timeValueMillis(1), mockThreadPool, listener @@ -80,7 +80,7 @@ public void testRequest_ReturnsTimeoutException() { PlainActionFuture listener = new PlainActionFuture<>(); var requestTask = new RequestTask( OpenAiEmbeddingsExecutableRequestCreatorTests.makeCreator("url", null, "key", "model", null, "id"), - List.of("abc"), + new DocumentsOnlyInput(List.of("abc")), TimeValue.timeValueMillis(1), threadPool, listener @@ -106,7 +106,7 @@ public void testRequest_DoesNotCallOnFailureTwiceWhenTimingOut() throws Exceptio var requestTask = new RequestTask( OpenAiEmbeddingsExecutableRequestCreatorTests.makeCreator("url", null, "key", "model", null, "id"), - List.of("abc"), + new DocumentsOnlyInput(List.of("abc")), TimeValue.timeValueMillis(1), threadPool, listener @@ -138,7 +138,7 @@ public void testRequest_DoesNotCallOnResponseAfterTimingOut() throws Exception { var requestTask = new RequestTask( OpenAiEmbeddingsExecutableRequestCreatorTests.makeCreator("url", null, "key", "model", null, "id"), - List.of("abc"), + new DocumentsOnlyInput(List.of("abc")), TimeValue.timeValueMillis(1), threadPool, listener @@ -168,7 +168,7 @@ public void testRequest_DoesNotCallOnFailureForTimeout_AfterAlreadyCallingOnResp var requestTask = new RequestTask( OpenAiEmbeddingsExecutableRequestCreatorTests.makeCreator("url", null, "key", "model", null, "id"), - List.of("abc"), + new DocumentsOnlyInput(List.of("abc")), TimeValue.timeValueMillis(1), mockThreadPool, listener diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereRankedResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereRankedResponseEntityTests.java new file mode 100644 index 0000000000000..441c0f15e8224 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereRankedResponseEntityTests.java @@ -0,0 +1,191 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.cohere; + +import org.apache.http.HttpResponse; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.inference.results.RankedDocsResults; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.hamcrest.MatcherAssert; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.List; + +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; + +public class CohereRankedResponseEntityTests extends ESTestCase { + + public void testResponseLiteral() throws IOException { + InferenceServiceResults parsedResults = CohereRankedResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseLiteral.getBytes(StandardCharsets.UTF_8)) + ); + + MatcherAssert.assertThat(parsedResults, instanceOf(RankedDocsResults.class)); + List expected = responseLiteralDocs(); + for (int i = 0; i < ((RankedDocsResults) parsedResults).getRankedDocs().size(); i++) { + assertEquals(((RankedDocsResults) parsedResults).getRankedDocs().get(i).index(), expected.get(i).index()); + } + } + + public void testGeneratedResponse() throws IOException { + int numDocs = randomIntBetween(1, 10); + + List expected = new ArrayList<>(numDocs); + StringBuilder responseBuilder = new StringBuilder(); + + responseBuilder.append("{"); + responseBuilder.append("\"index\":\"").append(randomAlphaOfLength(36)).append("\","); + responseBuilder.append("\"results\": ["); + List indices = linear(numDocs); + List scores = linearDoubles(numDocs); + for (int i = 0; i < numDocs; i++) { + int index = indices.remove(randomInt(indices.size() - 1)); + + responseBuilder.append("{"); + responseBuilder.append("\"index\":").append(index).append(","); + responseBuilder.append("\"relevance_score\":").append(scores.get(i).toString()).append("}"); + expected.add(new RankedDocsResults.RankedDoc(String.valueOf(index), scores.get(i).toString(), null)); + if (i < numDocs - 1) { + responseBuilder.append(","); + } + } + responseBuilder.append("],"); + responseBuilder.append("\"meta\": {"); + responseBuilder.append("\"api_version\": {"); + responseBuilder.append("\"version\": \"1\"},"); + responseBuilder.append("\"billed_units\": {"); + responseBuilder.append("\"search_units\":").append(randomIntBetween(1, 10)).append("}}}"); + + InferenceServiceResults parsedResults = CohereRankedResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseBuilder.toString().getBytes(StandardCharsets.UTF_8)) + ); + MatcherAssert.assertThat(parsedResults, instanceOf(RankedDocsResults.class)); + for (int i = 0; i < ((RankedDocsResults) parsedResults).getRankedDocs().size(); i++) { + assertEquals(((RankedDocsResults) parsedResults).getRankedDocs().get(i).index(), expected.get(i).index()); + } + } + + private ArrayList responseLiteralDocs() { + var list = new ArrayList(); + + list.add(new RankedDocsResults.RankedDoc("2", "0.98005307", null)); + list.add(new RankedDocsResults.RankedDoc("3", "0.27904198", null)); + list.add(new RankedDocsResults.RankedDoc("0", "0.10194652", null)); + return list; + + }; + + private final String responseLiteral = """ + { + "index": "d0760819-5a73-4d58-b163-3956d3648b62", + "results": [ + { + "index": 2, + "relevance_score": 0.98005307 + }, + { + "index": 3, + "relevance_score": 0.27904198 + }, + { + "index": 0, + "relevance_score": 0.10194652 + } + ], + "meta": { + "api_version": { + "version": "1" + }, + "billed_units": { + "search_units": 1 + } + } + } + """; + + public void testResponseLiteralWithDocuments() throws IOException { + InferenceServiceResults parsedResults = CohereRankedResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseLiteralWithDocuments.getBytes(StandardCharsets.UTF_8)) + ); + + MatcherAssert.assertThat(parsedResults, instanceOf(RankedDocsResults.class)); + MatcherAssert.assertThat(((RankedDocsResults) parsedResults).getRankedDocs(), is(responseLiteralDocsWithText)); + } + + private final String responseLiteralWithDocuments = """ + { + "index": "44873262-1315-4c06-8433-fdc90c9790d0", + "results": [ + { + "document": { + "text": "Washington, D.C.." + }, + "index": 2, + "relevance_score": 0.98005307 + }, + { + "document": { + "text": "Capital punishment has existed in the United States since beforethe United States was a country. " + }, + "index": 3, + "relevance_score": 0.27904198 + }, + { + "document": { + "text": "Carson City is the capital city of the American state of Nevada." + }, + "index": 0, + "relevance_score": 0.10194652 + } + ], + "meta": { + "api_version": { + "version": "1" + }, + "billed_units": { + "search_units": 1 + } + } + } + """; + + private final List responseLiteralDocsWithText = List.of( + new RankedDocsResults.RankedDoc("2", "0.98005307", "Washington, D.C.."), + new RankedDocsResults.RankedDoc( + "3", + "0.27904198", + "Capital punishment has existed in the United States since beforethe United States was a country. " + ), + new RankedDocsResults.RankedDoc("0", "0.10194652", "Carson City is the capital city of the American state of Nevada.") + ); + + private ArrayList linear(int n) { + ArrayList list = new ArrayList<>(); + for (int i = 0; i <= n; i++) { + list.add(i); + } + return list; + } + + // creates a list of doubles of monotonically decreasing magnitude + private ArrayList linearDoubles(int n) { + ArrayList list = new ArrayList<>(); + double startValue = 1.0; + double decrement = startValue / n + 1; + for (int i = 0; i <= n; i++) { + list.add(startValue - i * decrement); + } + return list; + } + +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/SenderServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/SenderServiceTests.java index 5c438644a18c5..33b8e10963c4c 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/SenderServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/SenderServiceTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.inference.ChunkingOptions; @@ -113,9 +114,22 @@ protected void doInfer( } + @Override + protected void doInfer( + Model model, + @Nullable String query, + List input, + Map taskSettings, + InputType inputType, + ActionListener listener + ) { + + } + @Override protected void doChunkedInfer( Model model, + @Nullable String query, List input, Map taskSettings, InputType inputType, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java index 1491511b84bb3..ebfa8be65c6df 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java @@ -336,11 +336,11 @@ public void testGetEmbeddingSize_ReturnsError_WhenTextEmbeddingResults_IsEmpty() doAnswer(invocation -> { @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocation.getArguments()[4]; + ActionListener listener = (ActionListener) invocation.getArguments()[5]; listener.onResponse(new TextEmbeddingResults(List.of())); return Void.TYPE; - }).when(service).infer(any(), any(), any(), any(), any()); + }).when(service).infer(any(), any(), any(), any(), any(), any()); PlainActionFuture listener = new PlainActionFuture<>(); getEmbeddingSize(model, service, listener); @@ -359,11 +359,11 @@ public void testGetEmbeddingSize_ReturnsError_WhenTextEmbeddingByteResults_IsEmp doAnswer(invocation -> { @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocation.getArguments()[4]; + ActionListener listener = (ActionListener) invocation.getArguments()[5]; listener.onResponse(new TextEmbeddingByteResults(List.of())); return Void.TYPE; - }).when(service).infer(any(), any(), any(), any(), any()); + }).when(service).infer(any(), any(), any(), any(), any(), any()); PlainActionFuture listener = new PlainActionFuture<>(); getEmbeddingSize(model, service, listener); @@ -384,11 +384,11 @@ public void testGetEmbeddingSize_ReturnsSize_ForTextEmbeddingResults() { doAnswer(invocation -> { @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocation.getArguments()[4]; + ActionListener listener = (ActionListener) invocation.getArguments()[5]; listener.onResponse(textEmbedding); return Void.TYPE; - }).when(service).infer(any(), any(), any(), any(), any()); + }).when(service).infer(any(), any(), any(), any(), any(), any()); PlainActionFuture listener = new PlainActionFuture<>(); getEmbeddingSize(model, service, listener); @@ -408,11 +408,11 @@ public void testGetEmbeddingSize_ReturnsSize_ForTextEmbeddingByteResults() { doAnswer(invocation -> { @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocation.getArguments()[4]; + ActionListener listener = (ActionListener) invocation.getArguments()[5]; listener.onResponse(textEmbedding); return Void.TYPE; - }).when(service).infer(any(), any(), any(), any(), any()); + }).when(service).infer(any(), any(), any(), any(), any(), any()); PlainActionFuture listener = new PlainActionFuture<>(); getEmbeddingSize(model, service, listener); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java index 97de300615b9b..fb45bb813fb3d 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java @@ -620,7 +620,7 @@ public void testInfer_ThrowsErrorWhenModelIsNotCohereModel() throws IOException try (var service = new CohereService(factory, createWithEmptySettings(threadPool))) { PlainActionFuture listener = new PlainActionFuture<>(); - service.infer(mockModel, List.of(""), new HashMap<>(), InputType.INGEST, listener); + service.infer(mockModel, null, List.of(""), new HashMap<>(), InputType.INGEST, listener); var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); MatcherAssert.assertThat( @@ -679,7 +679,7 @@ public void testInfer_SendsRequest() throws IOException { null ); PlainActionFuture listener = new PlainActionFuture<>(); - service.infer(model, List.of("abc"), new HashMap<>(), InputType.INGEST, listener); + service.infer(model, null, List.of("abc"), new HashMap<>(), InputType.INGEST, listener); var result = listener.actionGet(TIMEOUT); @@ -914,7 +914,7 @@ public void testInfer_UnauthorisedResponse() throws IOException { null ); PlainActionFuture listener = new PlainActionFuture<>(); - service.infer(model, List.of("abc"), new HashMap<>(), InputType.INGEST, listener); + service.infer(model, null, List.of("abc"), new HashMap<>(), InputType.INGEST, listener); var error = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); MatcherAssert.assertThat(error.getMessage(), containsString("Received an authentication error status code for request")); @@ -965,7 +965,7 @@ public void testInfer_SetsInputTypeToIngest_FromInferParameter_WhenTaskSettingsA null ); PlainActionFuture listener = new PlainActionFuture<>(); - service.infer(model, List.of("abc"), new HashMap<>(), InputType.INGEST, listener); + service.infer(model, null, List.of("abc"), new HashMap<>(), InputType.INGEST, listener); var result = listener.actionGet(TIMEOUT); @@ -1031,6 +1031,7 @@ public void testInfer_SetsInputTypeToIngestFromInferParameter_WhenModelSettingIs PlainActionFuture listener = new PlainActionFuture<>(); service.infer( model, + null, List.of("abc"), CohereEmbeddingsTaskSettingsTests.getTaskSettingsMap(InputType.SEARCH, null), InputType.INGEST, @@ -1098,7 +1099,7 @@ public void testInfer_DoesNotSetInputType_WhenNotPresentInTaskSettings_AndUnspec null ); PlainActionFuture listener = new PlainActionFuture<>(); - service.infer(model, List.of("abc"), new HashMap<>(), InputType.UNSPECIFIED, listener); + service.infer(model, null, List.of("abc"), new HashMap<>(), InputType.UNSPECIFIED, listener); var result = listener.actionGet(TIMEOUT); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java index 80ceb855f9e94..87121ffa87c74 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java @@ -405,6 +405,7 @@ public void testChunkInfer() { service.chunkedInfer( model, + null, List.of("foo", "bar"), Map.of(), InputType.SEARCH, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalServiceTests.java index dbb50260edaf1..b35e75fd6786d 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalServiceTests.java @@ -384,6 +384,7 @@ public void testChunkInfer() { service.chunkedInfer( model, + null, List.of("foo", "bar"), Map.of(), InputType.SEARCH, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseServiceTests.java index cd896cb18440a..b88ecc66c77b3 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseServiceTests.java @@ -64,7 +64,7 @@ public void testInfer_ThrowsErrorWhenModelIsNotHuggingFaceModel() throws IOExcep try (var service = new TestService(factory, createWithEmptySettings(threadPool))) { PlainActionFuture listener = new PlainActionFuture<>(); - service.infer(mockModel, List.of(""), new HashMap<>(), InputType.INGEST, listener); + service.infer(mockModel, null, List.of(""), new HashMap<>(), InputType.INGEST, listener); var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); assertThat( diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java index b47956ff00c81..d72888ee9cd4b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java @@ -435,7 +435,7 @@ public void testInfer_SendsEmbeddingsRequest() throws IOException { var model = HuggingFaceEmbeddingsModelTests.createModel(getUrl(webServer), "secret"); PlainActionFuture listener = new PlainActionFuture<>(); - service.infer(model, List.of("abc"), new HashMap<>(), InputType.INGEST, listener); + service.infer(model, null, List.of("abc"), new HashMap<>(), InputType.INGEST, listener); var result = listener.actionGet(TIMEOUT); @@ -470,7 +470,7 @@ public void testInfer_SendsElserRequest() throws IOException { var model = HuggingFaceElserModelTests.createModel(getUrl(webServer), "secret"); PlainActionFuture listener = new PlainActionFuture<>(); - service.infer(model, List.of("abc"), new HashMap<>(), InputType.INGEST, listener); + service.infer(model, null, List.of("abc"), new HashMap<>(), InputType.INGEST, listener); var result = listener.actionGet(TIMEOUT); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java index a1718488be5d0..5ebb5ca274816 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java @@ -681,7 +681,7 @@ public void testInfer_ThrowsErrorWhenModelIsNotOpenAiModel() throws IOException try (var service = new OpenAiService(factory, createWithEmptySettings(threadPool))) { PlainActionFuture listener = new PlainActionFuture<>(); - service.infer(mockModel, List.of(""), new HashMap<>(), InputType.INGEST, listener); + service.infer(mockModel, null, List.of(""), new HashMap<>(), InputType.INGEST, listener); var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); assertThat( @@ -727,7 +727,7 @@ public void testInfer_SendsRequest() throws IOException { var model = OpenAiEmbeddingsModelTests.createModel(getUrl(webServer), "org", "secret", "model", "user"); PlainActionFuture listener = new PlainActionFuture<>(); - service.infer(model, List.of("abc"), new HashMap<>(), InputType.INGEST, listener); + service.infer(model, null, List.of("abc"), new HashMap<>(), InputType.INGEST, listener); var result = listener.actionGet(TIMEOUT); @@ -1152,7 +1152,7 @@ public void testInfer_UnauthorisedResponse() throws IOException { var model = OpenAiEmbeddingsModelTests.createModel(getUrl(webServer), "org", "secret", "model", "user"); PlainActionFuture listener = new PlainActionFuture<>(); - service.infer(model, List.of("abc"), new HashMap<>(), InputType.INGEST, listener); + service.infer(model, null, List.of("abc"), new HashMap<>(), InputType.INGEST, listener); var error = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); assertThat(error.getMessage(), containsString("Received an authentication error status code for request")); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCoordinatedInferenceAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCoordinatedInferenceAction.java index 710ffe14b31e2..c657d80bccbcb 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCoordinatedInferenceAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCoordinatedInferenceAction.java @@ -119,7 +119,14 @@ private void doInferenceServiceModel(CoordinatedInferenceAction.Request request, client, INFERENCE_ORIGIN, InferenceAction.INSTANCE, - new InferenceAction.Request(TaskType.ANY, request.getModelId(), request.getInputs(), request.getTaskSettings(), inputType), + new InferenceAction.Request( + TaskType.ANY, + request.getModelId(), + null, + request.getInputs(), + request.getTaskSettings(), + inputType + ), listener.delegateFailureAndWrap((l, r) -> l.onResponse(translateInferenceServiceResponse(r.getResults()))) ); } From 147f5a00a41d55dd2331ee896d85adec0665e54a Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Wed, 3 Apr 2024 21:23:51 +0200 Subject: [PATCH 102/264] ESQL: Introduce language versioning to REST API (#106824) For the _query endpoint, add a parameter for the ESQL language version to the JSON payload. For now, it is optional and is only validated with no further action. --- docs/changelog/106824.yaml | 5 + .../core/esql/action/EsqlQueryRequest.java | 3 + .../esql/action/EsqlQueryRequestBuilder.java | 2 + .../xpack/esql/action/EsqlQueryRequest.java | 51 ++++++- .../esql/action/EsqlQueryRequestBuilder.java | 6 + .../xpack/esql/action/RequestXContent.java | 6 +- .../xpack/esql/version/EsqlVersion.java | 111 +++++++++++++++ .../esql/action/EsqlQueryRequestTests.java | 130 +++++++++++++++++- .../xpack/esql/version/EsqlVersionTests.java | 81 +++++++++++ 9 files changed, 384 insertions(+), 11 deletions(-) create mode 100644 docs/changelog/106824.yaml create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/version/EsqlVersion.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/version/EsqlVersionTests.java diff --git a/docs/changelog/106824.yaml b/docs/changelog/106824.yaml new file mode 100644 index 0000000000000..0a2001df5039a --- /dev/null +++ b/docs/changelog/106824.yaml @@ -0,0 +1,5 @@ +pr: 106824 +summary: "ESQL: Introduce language versioning to REST API" +area: ES|QL +type: enhancement +issues: [] diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/action/EsqlQueryRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/action/EsqlQueryRequest.java index 9faa78d3b34f9..dcd89c200db26 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/action/EsqlQueryRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/action/EsqlQueryRequest.java @@ -21,6 +21,9 @@ protected EsqlQueryRequest(StreamInput in) throws IOException { super(in); } + // Use the unparsed version String, so we don't have to serialize a version object. + public abstract String esqlVersion(); + public abstract String query(); public abstract QueryBuilder filter(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/action/EsqlQueryRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/action/EsqlQueryRequestBuilder.java index a0a2bbc3bed19..acd44165cad65 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/action/EsqlQueryRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/action/EsqlQueryRequestBuilder.java @@ -35,6 +35,8 @@ public final ActionType action() { return action; } + public abstract EsqlQueryRequestBuilder esqlVersion(String esqlVersion); + public abstract EsqlQueryRequestBuilder query(String query); public abstract EsqlQueryRequestBuilder filter(QueryBuilder filter); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequest.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequest.java index e5ff790619d14..54ae2f4c90fc1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequest.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequest.java @@ -20,6 +20,7 @@ import org.elasticsearch.tasks.TaskId; import org.elasticsearch.xpack.esql.parser.TypedParamValue; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; +import org.elasticsearch.xpack.esql.version.EsqlVersion; import java.io.IOException; import java.util.List; @@ -35,6 +36,7 @@ public class EsqlQueryRequest extends org.elasticsearch.xpack.core.esql.action.E private boolean async; + private String esqlVersion; private String query; private boolean columnar; private boolean profile; @@ -45,6 +47,7 @@ public class EsqlQueryRequest extends org.elasticsearch.xpack.core.esql.action.E private TimeValue waitForCompletionTimeout = DEFAULT_WAIT_FOR_COMPLETION; private TimeValue keepAlive = DEFAULT_KEEP_ALIVE; private boolean keepOnCompletion; + private boolean onSnapshotBuild = Build.current().isSnapshot(); static EsqlQueryRequest syncEsqlQueryRequest() { return new EsqlQueryRequest(false); @@ -65,17 +68,54 @@ public EsqlQueryRequest(StreamInput in) throws IOException { @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; + if (Strings.hasText(esqlVersion) == false) { + // TODO: make this required + // "https://github.com/elastic/elasticsearch/issues/104890" + // validationException = addValidationError(invalidVersion("is required"), validationException); + } else { + EsqlVersion version = EsqlVersion.parse(esqlVersion); + if (version == null) { + validationException = addValidationError(invalidVersion("has invalid value [" + esqlVersion + "]"), validationException); + } else if (version == EsqlVersion.SNAPSHOT && onSnapshotBuild == false) { + validationException = addValidationError( + invalidVersion("with value [" + esqlVersion + "] only allowed in snapshot builds"), + validationException + ); + } + } if (Strings.hasText(query) == false) { - validationException = addValidationError("[query] is required", validationException); + validationException = addValidationError("[" + RequestXContent.QUERY_FIELD + "] is required", validationException); } - if (Build.current().isSnapshot() == false && pragmas.isEmpty() == false) { - validationException = addValidationError("[pragma] only allowed in snapshot builds", validationException); + if (onSnapshotBuild == false && pragmas.isEmpty() == false) { + validationException = addValidationError( + "[" + RequestXContent.PRAGMA_FIELD + "] only allowed in snapshot builds", + validationException + ); } return validationException; } + private static String invalidVersion(String reason) { + return "[" + + RequestXContent.ESQL_VERSION_FIELD + + "] " + + reason + + ", latest available version is [" + + EsqlVersion.latestReleased().versionStringWithoutEmoji() + + "]"; + } + public EsqlQueryRequest() {} + public void esqlVersion(String esqlVersion) { + this.esqlVersion = esqlVersion; + } + + @Override + public String esqlVersion() { + return esqlVersion; + } + public void query(String query) { this.query = query; } @@ -174,4 +214,9 @@ public Task createTask(long id, String type, String action, TaskId parentTaskId, // Pass the query as the description return new CancellableTask(id, type, action, query, parentTaskId, headers); } + + // Setter for tests + void onSnapshotBuild(boolean onSnapshotBuild) { + this.onSnapshotBuild = onSnapshotBuild; + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestBuilder.java index 7df5c95cbc953..511fbd9f1c275 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestBuilder.java @@ -29,6 +29,12 @@ private EsqlQueryRequestBuilder(ElasticsearchClient client, EsqlQueryRequest req super(client, EsqlQueryAction.INSTANCE, request); } + @Override + public EsqlQueryRequestBuilder esqlVersion(String esqlVersion) { + request.esqlVersion(esqlVersion); + return this; + } + @Override public EsqlQueryRequestBuilder query(String query) { request.query(query); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RequestXContent.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RequestXContent.java index 8db940d5a4779..ef82f666ce904 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RequestXContent.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RequestXContent.java @@ -46,10 +46,11 @@ final class RequestXContent { PARAM_PARSER.declareString(constructorArg(), TYPE); } - private static final ParseField QUERY_FIELD = new ParseField("query"); + static final ParseField ESQL_VERSION_FIELD = new ParseField("version"); + static final ParseField QUERY_FIELD = new ParseField("query"); private static final ParseField COLUMNAR_FIELD = new ParseField("columnar"); private static final ParseField FILTER_FIELD = new ParseField("filter"); - private static final ParseField PRAGMA_FIELD = new ParseField("pragma"); + static final ParseField PRAGMA_FIELD = new ParseField("pragma"); private static final ParseField PARAMS_FIELD = new ParseField("params"); private static final ParseField LOCALE_FIELD = new ParseField("locale"); private static final ParseField PROFILE_FIELD = new ParseField("profile"); @@ -72,6 +73,7 @@ static EsqlQueryRequest parseAsync(XContentParser parser) { } private static void objectParserCommon(ObjectParser parser) { + parser.declareString(EsqlQueryRequest::esqlVersion, ESQL_VERSION_FIELD); parser.declareString(EsqlQueryRequest::query, QUERY_FIELD); parser.declareBoolean(EsqlQueryRequest::columnar, COLUMNAR_FIELD); parser.declareObject(EsqlQueryRequest::filter, (p, c) -> AbstractQueryBuilder.parseTopLevelQuery(p), FILTER_FIELD); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/version/EsqlVersion.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/version/EsqlVersion.java new file mode 100644 index 0000000000000..9f96ba0e64e17 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/version/EsqlVersion.java @@ -0,0 +1,111 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.version; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.VersionId; + +import java.util.Arrays; +import java.util.Comparator; +import java.util.LinkedHashMap; +import java.util.Map; + +public enum EsqlVersion implements VersionId { + /** + * Breaking changes go here until the next version is released. + */ + SNAPSHOT(Integer.MAX_VALUE, 12, 99, "📷"), + ROCKET(2024, 4, "🚀"); + + static final Map VERSION_MAP_WITH_AND_WITHOUT_EMOJI = versionMapWithAndWithoutEmoji(); + + private static Map versionMapWithAndWithoutEmoji() { + Map stringToVersion = new LinkedHashMap<>(EsqlVersion.values().length * 2); + + for (EsqlVersion version : EsqlVersion.values()) { + putVersionCheckNoDups(stringToVersion, version.versionStringWithoutEmoji(), version); + putVersionCheckNoDups(stringToVersion, version.toString(), version); + } + + return stringToVersion; + } + + private static void putVersionCheckNoDups(Map stringToVersion, String versionString, EsqlVersion version) { + EsqlVersion existingVersionForKey = stringToVersion.put(versionString, version); + if (existingVersionForKey != null) { + throw new IllegalArgumentException("Duplicate esql version with version string [" + versionString + "]"); + } + } + + /** + * Accepts a version string with the emoji suffix or without it. + * E.g. both "2024.04.01.🚀" and "2024.04.01" will be interpreted as {@link EsqlVersion#ROCKET}. + */ + public static EsqlVersion parse(String versionString) { + return VERSION_MAP_WITH_AND_WITHOUT_EMOJI.get(versionString); + } + + public static EsqlVersion latestReleased() { + return Arrays.stream(EsqlVersion.values()).filter(v -> v != SNAPSHOT).max(Comparator.comparingInt(EsqlVersion::id)).get(); + } + + private int year; + private byte month; + private byte revision; + private String emoji; + + EsqlVersion(int year, int month, String emoji) { + this(year, month, 1, emoji); + } + + EsqlVersion(int year, int month, int revision, String emoji) { + if ((1 <= revision && revision <= 99) == false) { + throw new IllegalArgumentException("Version revision number must be between 1 and 99 but was [" + revision + "]"); + } + if ((1 <= month && month <= 12) == false) { + throw new IllegalArgumentException("Version month must be between 1 and 12 but was [" + month + "]"); + } + if ((emoji.codePointCount(0, emoji.length()) == 1) == false) { + throw new IllegalArgumentException("Version emoji must be a single unicode character but was [" + emoji + "]"); + } + this.year = year; + this.month = (byte) month; + this.revision = (byte) revision; + this.emoji = emoji; + } + + public int year() { + return year; + } + + public byte month() { + return month; + } + + public byte revision() { + return revision; + } + + public String emoji() { + return emoji; + } + + public String versionStringWithoutEmoji() { + return this == SNAPSHOT ? "snapshot" : Strings.format("%d.%02d.%02d", year, month, revision); + } + + @Override + public String toString() { + return versionStringWithoutEmoji() + "." + emoji; + } + + @Override + public int id() { + return this == SNAPSHOT ? Integer.MAX_VALUE : (10000 * year + 100 * month + revision); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java index 5b16691bcee77..44066ff3d091d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java @@ -24,6 +24,8 @@ import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.esql.parser.TypedParamValue; +import org.elasticsearch.xpack.esql.version.EsqlVersion; +import org.elasticsearch.xpack.esql.version.EsqlVersionTests; import java.io.IOException; import java.util.ArrayList; @@ -44,20 +46,23 @@ public void testParseFields() throws IOException { boolean columnar = randomBoolean(); Locale locale = randomLocale(random()); QueryBuilder filter = randomQueryBuilder(); + EsqlVersion esqlVersion = randomFrom(EsqlVersion.values()); List params = randomParameters(); boolean hasParams = params.isEmpty() == false; StringBuilder paramsString = paramsString(params, hasParams); String json = String.format(Locale.ROOT, """ { + "version": "%s", "query": "%s", "columnar": %s, "locale": "%s", "filter": %s - %s""", query, columnar, locale.toLanguageTag(), filter, paramsString); + %s""", esqlVersion, query, columnar, locale.toLanguageTag(), filter, paramsString); EsqlQueryRequest request = parseEsqlQueryRequestSync(json); + assertEquals(esqlVersion.toString(), request.esqlVersion()); assertEquals(query, request.query()); assertEquals(columnar, request.columnar()); assertEquals(locale.toLanguageTag(), request.locale().toLanguageTag()); @@ -75,6 +80,7 @@ public void testParseFieldsForAsync() throws IOException { boolean columnar = randomBoolean(); Locale locale = randomLocale(random()); QueryBuilder filter = randomQueryBuilder(); + EsqlVersion esqlVersion = randomFrom(EsqlVersion.values()); List params = randomParameters(); boolean hasParams = params.isEmpty() == false; @@ -86,6 +92,7 @@ public void testParseFieldsForAsync() throws IOException { Locale.ROOT, """ { + "version": "%s", "query": "%s", "columnar": %s, "locale": "%s", @@ -94,6 +101,7 @@ public void testParseFieldsForAsync() throws IOException { "wait_for_completion_timeout": "%s", "keep_alive": "%s" %s""", + esqlVersion, query, columnar, locale.toLanguageTag(), @@ -106,6 +114,7 @@ public void testParseFieldsForAsync() throws IOException { EsqlQueryRequest request = parseEsqlQueryRequestAsync(json); + assertEquals(esqlVersion.toString(), request.esqlVersion()); assertEquals(query, request.query()); assertEquals(columnar, request.columnar()); assertEquals(locale.toLanguageTag(), request.locale().toLanguageTag()); @@ -149,18 +158,123 @@ public void testRejectUnknownFields() { }""", "unknown field [asdf]"); } - public void testMissingQueryIsNotValidation() throws IOException { + public void testKnownVersionIsValid() throws IOException { + for (EsqlVersion version : EsqlVersion.values()) { + String validVersionString = randomBoolean() ? version.versionStringWithoutEmoji() : version.toString(); + + String json = String.format(Locale.ROOT, """ + { + "version": "%s", + "query": "ROW x = 1" + } + """, validVersionString); + + EsqlQueryRequest request = parseEsqlQueryRequest(json, randomBoolean()); + assertNull(request.validate()); + + request = parseEsqlQueryRequestAsync(json); + assertNull(request.validate()); + } + } + + public void testUnknownVersionIsNotValid() throws IOException { + String invalidVersionString = EsqlVersionTests.randomInvalidVersionString(); + + String json = String.format(Locale.ROOT, """ + { + "version": "%s", + "query": "ROW x = 1" + } + """, invalidVersionString); + + EsqlQueryRequest request = parseEsqlQueryRequest(json, randomBoolean()); + assertNotNull(request.validate()); + assertThat( + request.validate().getMessage(), + containsString( + "[version] has invalid value [" + + invalidVersionString + + "], latest available version is [" + + EsqlVersion.latestReleased().versionStringWithoutEmoji() + + "]" + ) + ); + } + + public void testSnapshotVersionIsOnlyValidOnSnapshot() throws IOException { + String esqlVersion = randomBoolean() ? "snapshot" : "snapshot.📷"; + String json = String.format(Locale.ROOT, """ + { + "version": "%s", + "query": "ROW x = 1" + } + """, esqlVersion); + + EsqlQueryRequest request = parseEsqlQueryRequest(json, randomBoolean()); + request.onSnapshotBuild(true); + assertNull(request.validate()); + + request.onSnapshotBuild(false); + assertNotNull(request.validate()); + assertThat( + request.validate().getMessage(), + containsString( + "[version] with value [" + + esqlVersion + + "] only allowed in snapshot builds, latest available version is [" + + EsqlVersion.latestReleased().versionStringWithoutEmoji() + + "]" + ) + ); + } + + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104890") + public void testMissingVersionIsNotValid() throws IOException { + String missingVersion = randomBoolean() ? "" : ", \"version\": \"\""; + String json = String.format(Locale.ROOT, """ + { + "columnar": true, + "query": "row x = 1" + %s + }""", missingVersion); + + EsqlQueryRequest request = parseEsqlQueryRequest(json, randomBoolean()); + assertNotNull(request.validate()); + assertThat( + request.validate().getMessage(), + containsString( + "[version] is required, latest available version is [" + EsqlVersion.latestReleased().versionStringWithoutEmoji() + "]" + ) + ); + } + + public void testMissingQueryIsNotValid() throws IOException { String json = """ { - "columnar": true + "columnar": true, + "version": "snapshot" }"""; - EsqlQueryRequest request = parseEsqlQueryRequestSync(json); + EsqlQueryRequest request = parseEsqlQueryRequest(json, randomBoolean()); assertNotNull(request.validate()); assertThat(request.validate().getMessage(), containsString("[query] is required")); + } + + public void testPragmasOnlyValidOnSnapshot() throws IOException { + String json = """ + { + "version": "2024.04.01", + "query": "ROW x = 1", + "pragma": {"foo": "bar"} + } + """; + + EsqlQueryRequest request = parseEsqlQueryRequest(json, randomBoolean()); + request.onSnapshotBuild(true); + assertNull(request.validate()); - request = parseEsqlQueryRequestAsync(json); + request.onSnapshotBuild(false); assertNotNull(request.validate()); - assertThat(request.validate().getMessage(), containsString("[query] is required")); + assertThat(request.validate().getMessage(), containsString("[pragma] only allowed in snapshot builds")); } public void testTask() throws IOException { @@ -260,6 +374,10 @@ private static void assertParserErrorMessage(String json, String message) { assertThat(e.getMessage(), containsString(message)); } + static EsqlQueryRequest parseEsqlQueryRequest(String json, boolean sync) throws IOException { + return sync ? parseEsqlQueryRequestSync(json) : parseEsqlQueryRequestAsync(json); + } + static EsqlQueryRequest parseEsqlQueryRequestSync(String json) throws IOException { var request = parseEsqlQueryRequest(json, RequestXContent::parseSync); assertFalse(request.async()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/version/EsqlVersionTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/version/EsqlVersionTests.java new file mode 100644 index 0000000000000..cd4fd77a8dd22 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/version/EsqlVersionTests.java @@ -0,0 +1,81 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.version; + +import org.elasticsearch.test.ESTestCase; + +import java.util.Arrays; +import java.util.Comparator; +import java.util.List; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; + +public class EsqlVersionTests extends ESTestCase { + public void testLatestReleased() { + assertThat(EsqlVersion.latestReleased(), is(EsqlVersion.ROCKET)); + } + + public void testVersionString() { + assertThat(EsqlVersion.SNAPSHOT.toString(), equalTo("snapshot.📷")); + assertThat(EsqlVersion.ROCKET.toString(), equalTo("2024.04.01.🚀")); + } + + public void testVersionId() { + assertThat(EsqlVersion.SNAPSHOT.id(), equalTo(Integer.MAX_VALUE)); + assertThat(EsqlVersion.ROCKET.id(), equalTo(20240401)); + + for (EsqlVersion version : EsqlVersion.values()) { + assertTrue(EsqlVersion.SNAPSHOT.onOrAfter(version)); + if (version != EsqlVersion.SNAPSHOT) { + assertTrue(version.before(EsqlVersion.SNAPSHOT)); + } else { + assertTrue(version.onOrAfter(EsqlVersion.SNAPSHOT)); + } + } + + List versionsSortedAsc = Arrays.stream(EsqlVersion.values()) + .sorted(Comparator.comparing(EsqlVersion::year).thenComparing(EsqlVersion::month).thenComparing(EsqlVersion::revision)) + .toList(); + for (int i = 0; i < versionsSortedAsc.size() - 1; i++) { + assertTrue(versionsSortedAsc.get(i).before(versionsSortedAsc.get(i + 1))); + } + } + + public void testVersionStringNoEmoji() { + for (EsqlVersion version : EsqlVersion.values()) { + String[] versionSegments = version.toString().split("\\."); + String[] parsingPrefixSegments = Arrays.copyOf(versionSegments, versionSegments.length - 1); + + String expectedParsingPrefix = String.join(".", parsingPrefixSegments); + assertThat(version.versionStringWithoutEmoji(), equalTo(expectedParsingPrefix)); + } + } + + public void testParsing() { + for (EsqlVersion version : EsqlVersion.values()) { + String versionStringWithoutEmoji = version.versionStringWithoutEmoji(); + + assertThat(EsqlVersion.parse(versionStringWithoutEmoji), is(version)); + assertThat(EsqlVersion.parse(versionStringWithoutEmoji + "." + version.emoji()), is(version)); + } + + assertNull(EsqlVersion.parse(randomInvalidVersionString())); + } + + public static String randomInvalidVersionString() { + String[] invalidVersionString = new String[1]; + + do { + int length = randomIntBetween(1, 10); + invalidVersionString[0] = randomAlphaOfLength(length); + } while (EsqlVersion.VERSION_MAP_WITH_AND_WITHOUT_EMOJI.containsKey(invalidVersionString[0])); + + return invalidVersionString[0]; + } +} From a37debdbea8b98ae8d785eb56a45e8436aa51239 Mon Sep 17 00:00:00 2001 From: Jake Landis Date: Wed, 3 Apr 2024 16:40:41 -0500 Subject: [PATCH 103/264] Ensure getUser() is the logical user, not API key creator for RCS 2.0 (#107023) This commit changes SecurityContext#getUser() to provide the original user that initiated the call when run across clusters for RCS 2.0. Before this change the getUser() would provide the RCS 2.0 API key creator as the current user. --- .../xpack/core/security/SecurityContext.java | 6 ++++++ .../xpack/security/SecurityContextTests.java | 15 +++++++++++++++ 2 files changed, 21 insertions(+) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityContext.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityContext.java index e13102796ae48..05ef5d3f70fd9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityContext.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityContext.java @@ -35,6 +35,7 @@ import java.util.function.Consumer; import java.util.function.Function; +import static org.elasticsearch.xpack.core.security.authc.Authentication.getAuthenticationFromCrossClusterAccessMetadata; import static org.elasticsearch.xpack.core.security.authc.AuthenticationField.AUTHENTICATION_KEY; import static org.elasticsearch.xpack.core.security.authz.AuthorizationServiceField.AUTHORIZATION_INFO_KEY; @@ -71,6 +72,11 @@ public User requireUser() { @Nullable public User getUser() { Authentication authentication = getAuthentication(); + if (authentication != null) { + if (authentication.isCrossClusterAccess()) { + authentication = getAuthenticationFromCrossClusterAccessMetadata(authentication); + } + } return authentication == null ? null : authentication.getEffectiveSubject().getUser(); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityContextTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityContextTests.java index 07c858f10f447..22488334d85c0 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityContextTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityContextTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.xpack.core.security.authc.Authentication.RealmRef; import org.elasticsearch.xpack.core.security.authc.AuthenticationField; import org.elasticsearch.xpack.core.security.authc.AuthenticationTestHelper; +import org.elasticsearch.xpack.core.security.authc.CrossClusterAccessSubjectInfo; import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.AuthorizationInfo; import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.ParentActionAuthorization; import org.elasticsearch.xpack.core.security.authz.AuthorizationServiceField; @@ -78,6 +79,20 @@ public void testGetAuthenticationAndUser() throws IOException { assertEquals(user, securityContext.getUser()); } + public void testGetUserForAPIKeyBasedCrossCluster() throws IOException { + final User user = new User("test"); + final CrossClusterAccessSubjectInfo crossClusterAccessSubjectInfo = AuthenticationTestHelper.randomCrossClusterAccessSubjectInfo( + AuthenticationTestHelper.builder().user(user).realmRef(new RealmRef("ldap", "foo", "node1")).build(false) + ); + final Authentication authentication = AuthenticationTestHelper.builder() + .crossClusterAccess(randomAlphaOfLengthBetween(10, 20), crossClusterAccessSubjectInfo) + .build(false); + User apiKeyUser = authentication.getEffectiveSubject().getUser(); + authentication.writeToContext(threadContext); + assertEquals(user, securityContext.getUser()); + assertNotEquals(apiKeyUser, securityContext.getUser()); + } + public void testGetAuthenticationDoesNotSwallowIOException() { threadContext.putHeader(AuthenticationField.AUTHENTICATION_KEY, ""); // an intentionally corrupt header final SecurityContext securityContext = new SecurityContext(Settings.EMPTY, threadContext); From 72a824819b7d8e8982904d75f10f8f0ab054843a Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Thu, 4 Apr 2024 07:05:18 +0200 Subject: [PATCH 104/264] [Profiling] Use default task cancellation check (#107037) With this commit we remove our custom implementation of whether a task has been cancelled and instead use the standard implementation that is already provided by the task API. --- .../TransportGetStackTracesAction.java | 21 ++----------------- 1 file changed, 2 insertions(+), 19 deletions(-) diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java index aa5f3efb179a2..8fff0dab53b08 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java @@ -45,7 +45,6 @@ import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; -import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.ObjectPath; @@ -155,22 +154,6 @@ protected void doExecute(Task submitTask, GetStackTracesRequest request, ActionL } } - /** - * Checks whether a task has been cancelled and notifies the provided listener if required. - * @param task The task to check. May be a cancelable task. - * @param listener Listener to notify. - * @return true iff the task has been cancelled. Callers must terminate as early as possible. - */ - private boolean mayNotifyOfCancellation(Task task, ActionListener listener) { - if (task instanceof CancellableTask && ((CancellableTask) task).isCancelled()) { - log.info("{} got cancelled.", task); - listener.onFailure(new TaskCancelledException("get stacktraces task cancelled")); - return true; - } else { - return false; - } - } - private void searchProfilingEvents( Task submitTask, Client client, @@ -447,7 +430,7 @@ private void retrieveStackTraces( GetStackTracesResponseBuilder responseBuilder, ActionListener submitListener ) { - if (mayNotifyOfCancellation(submitTask, submitListener)) { + if (submitTask instanceof CancellableTask c && c.notifyIfCancelled(submitListener)) { return; } List eventIds = new ArrayList<>(responseBuilder.getStackTraceEvents().keySet()); @@ -670,7 +653,7 @@ private void retrieveStackTraceDetails( List executableIds, ActionListener submitListener ) { - if (mayNotifyOfCancellation(submitTask, submitListener)) { + if (submitTask instanceof CancellableTask c && c.notifyIfCancelled(submitListener)) { return; } List stackFrameIndices = resolver.resolve( From 9a2f8a80eb729c33b33a819a6246d03459e168c9 Mon Sep 17 00:00:00 2001 From: David Turner Date: Thu, 4 Apr 2024 07:37:13 +0100 Subject: [PATCH 105/264] Add remote cluster network troubleshooting docs (#107072) Spells out in a little more detail our expectations for remote cluster connections, including an example log message when the network is unreliable and some suggestions for how to troubleshoot further. --- .../remote-clusters-troubleshooting.asciidoc | 40 +++++++++++++++++++ 1 file changed, 40 insertions(+) diff --git a/docs/reference/modules/cluster/remote-clusters-troubleshooting.asciidoc b/docs/reference/modules/cluster/remote-clusters-troubleshooting.asciidoc index f7b08b40bb7ef..df3c54794dc06 100644 --- a/docs/reference/modules/cluster/remote-clusters-troubleshooting.asciidoc +++ b/docs/reference/modules/cluster/remote-clusters-troubleshooting.asciidoc @@ -77,6 +77,46 @@ org.elasticsearch.transport.ConnectTransportException: [][192.168.0.42:9443] *co server is enabled>> on the remote cluster. * Ensure no firewall is blocking the communication. +[[remote-clusters-unreliable-network]] +===== Remote cluster connection is unreliable + +====== Symptom + +The local cluster can connect to the remote cluster, but the connection does +not work reliably. For example, some cross-cluster requests may succeed while +others report connection errors, time out, or appear to be stuck waiting for +the remote cluster to respond. + +When {es} detects that the remote cluster connection is not working, it will +report the following message in its logs: +[source,txt,subs=+quotes] +---- +[2023-06-28T16:36:47,264][INFO ][o.e.t.ClusterConnectionManager] [local-node] transport connection to [{my-remote#192.168.0.42:9443}{...}] closed by remote +---- +This message will also be logged if the node of the remote cluster to which +{es} is connected is shut down or restarted. + +Note that with some network configurations it could take minutes or hours for +the operating system to detect that a connection has stopped working. Until the +failure is detected and reported to {es}, requests involving the remote cluster +may time out or may appear to be stuck. + +====== Resolution + +* Ensure that the network between the clusters is as reliable as possible. + +* Ensure that the network is configured to permit <>. + +* Ensure that the network is configured to detect faulty connections quickly. + In particular, you must enable and fully support TCP keepalives, and set a + short <>. + +* On Linux systems, execute `ss -tonie` to verify the details of the + configuration of each network connection between the clusters. + +* If the problems persist, capture network packets at both ends of the + connection and analyse the traffic to look for delays and lost messages. + [[remote-clusters-troubleshooting-tls-trust]] ===== TLS trust not established From daa90069305cdf5c837c5cfd404fd88464956ee5 Mon Sep 17 00:00:00 2001 From: David Turner Date: Thu, 4 Apr 2024 08:17:14 +0100 Subject: [PATCH 106/264] Apply snapshot `?after` filter inline (#107003) In `TransportGetSnapshotsAction` today we build a list of all candidate snapshots and then copy them into another list to apply the `?after` filter. With this commit we construct the final filtered list directly. --- .../get/TransportGetSnapshotsAction.java | 82 ++++++++++++------- 1 file changed, 52 insertions(+), 30 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java index cb4942cc0efb8..190c4c565f1b7 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java @@ -179,8 +179,18 @@ private class GetSnapshotsOperation { // results private final Map failuresByRepository = ConcurrentCollections.newConcurrentMap(); private final Queue> allSnapshotInfos = ConcurrentCollections.newQueue(); + + /** + * Accumulates number of snapshots that match the name/fromSortValue/slmPolicy predicates, to be returned in the response. + */ private final AtomicInteger totalCount = new AtomicInteger(); + /** + * Accumulates the number of snapshots that match the name/fromSortValue/slmPolicy/after predicates, for sizing the final result + * list. + */ + private final AtomicInteger resultsCount = new AtomicInteger(); + GetSnapshotsOperation( CancellableTask cancellableTask, ResolvedRepositories resolvedRepositories, @@ -261,7 +271,7 @@ void getMultipleReposSnapshotInfo(ActionListener listener) } }) - .addListener(listener.map(ignored -> buildResponse())); + .addListener(listener.map(ignored -> buildResponse()), executor, threadPool.getThreadContext()); } private boolean skipRepository(String repositoryName) { @@ -306,7 +316,7 @@ private void loadSnapshotInfos(String repo, @Nullable RepositoryData repositoryD } if (verbose) { - snapshots(repo, toResolve.stream().map(Snapshot::getSnapshotId).toList(), listener); + loadSnapshotInfos(repo, toResolve.stream().map(Snapshot::getSnapshotId).toList(), listener); } else { assert fromSortValuePredicates.isMatchAll() : "filtering is not supported in non-verbose mode"; assert slmPolicyPredicate == SlmPolicyPredicate.MATCH_ALL_POLICIES : "filtering is not supported in non-verbose mode"; @@ -321,10 +331,11 @@ private void loadSnapshotInfos(String repo, @Nullable RepositoryData repositoryD } } - private void snapshots(String repositoryName, Collection snapshotIds, ActionListener listener) { + private void loadSnapshotInfos(String repositoryName, Collection snapshotIds, ActionListener listener) { if (cancellableTask.notifyIfCancelled(listener)) { return; } + final AtomicInteger repositoryTotalCount = new AtomicInteger(); final List snapshots = new ArrayList<>(snapshotIds.size()); final Set snapshotIdsToIterate = new HashSet<>(snapshotIds); // first, look at the snapshots in progress @@ -337,7 +348,10 @@ private void snapshots(String repositoryName, Collection snapshotIds if (snapshotIdsToIterate.remove(entry.snapshot().getSnapshotId())) { final SnapshotInfo snapshotInfo = SnapshotInfo.inProgress(entry); if (matchesPredicates(snapshotInfo)) { - snapshots.add(snapshotInfo.maybeWithoutIndices(indices)); + repositoryTotalCount.incrementAndGet(); + if (afterPredicate.test(snapshotInfo)) { + snapshots.add(snapshotInfo.maybeWithoutIndices(indices)); + } } } } @@ -372,7 +386,10 @@ private void snapshots(String repositoryName, Collection snapshotIds @Override public void onResponse(SnapshotInfo snapshotInfo) { if (matchesPredicates(snapshotInfo)) { - syncSnapshots.add(snapshotInfo.maybeWithoutIndices(indices)); + repositoryTotalCount.incrementAndGet(); + if (afterPredicate.test(snapshotInfo)) { + syncSnapshots.add(snapshotInfo.maybeWithoutIndices(indices)); + } } refListener.onResponse(null); } @@ -398,11 +415,16 @@ public void onFailure(Exception e) { } }) - .addListener(listener.safeMap(v -> { - // no need to synchronize access to snapshots: Repository#getSnapshotInfo fails fast but we're on the success path here - applyAfterPredicateAndAdd(snapshots); - return null; - }), executor, threadPool.getThreadContext()); + // no need to synchronize access to snapshots: Repository#getSnapshotInfo fails fast but we're on the success path here + .andThenAccept(ignored -> addResults(repositoryTotalCount.get(), snapshots)) + + .addListener(listener); + } + + private void addResults(int repositoryTotalCount, List snapshots) { + totalCount.addAndGet(repositoryTotalCount); + resultsCount.addAndGet(snapshots.size()); + allSnapshotInfos.add(snapshots); } private void addSimpleSnapshotInfos( @@ -413,15 +435,19 @@ private void addSimpleSnapshotInfos( ) { if (repositoryData == null) { // only want current snapshots - applyAfterPredicateAndAdd(currentSnapshots); + addResults(currentSnapshots.size(), currentSnapshots.stream().filter(afterPredicate).toList()); return; } // else want non-current snapshots as well, which are found in the repository data - List snapshotInfos = new ArrayList<>(); + List snapshotInfos = new ArrayList<>(currentSnapshots.size() + toResolve.size()); + int repositoryTotalCount = 0; for (SnapshotInfo snapshotInfo : currentSnapshots) { assert snapshotInfo.startTime() == 0L && snapshotInfo.endTime() == 0L && snapshotInfo.totalShards() == 0L : snapshotInfo; if (toResolve.remove(snapshotInfo.snapshot())) { - snapshotInfos.add(snapshotInfo); + repositoryTotalCount += 1; + if (afterPredicate.test(snapshotInfo)) { + snapshotInfos.add(snapshotInfo); + } } } Map> snapshotsToIndices = new HashMap<>(); @@ -435,22 +461,19 @@ private void addSimpleSnapshotInfos( } } for (Snapshot snapshot : toResolve) { - snapshotInfos.add( - new SnapshotInfo( - snapshot, - snapshotsToIndices.getOrDefault(snapshot.getSnapshotId(), Collections.emptyList()), - Collections.emptyList(), - Collections.emptyList(), - repositoryData.getSnapshotState(snapshot.getSnapshotId()) - ) + final var snapshotInfo = new SnapshotInfo( + snapshot, + snapshotsToIndices.getOrDefault(snapshot.getSnapshotId(), Collections.emptyList()), + Collections.emptyList(), + Collections.emptyList(), + repositoryData.getSnapshotState(snapshot.getSnapshotId()) ); + repositoryTotalCount += 1; + if (afterPredicate.test(snapshotInfo)) { + snapshotInfos.add(snapshotInfo); + } } - applyAfterPredicateAndAdd(snapshotInfos); - } - - private void applyAfterPredicateAndAdd(List snapshotInfos) { - allSnapshotInfos.add(snapshotInfos.stream().filter(afterPredicate).toList()); - totalCount.addAndGet(snapshotInfos.size()); + addResults(repositoryTotalCount, snapshotInfos); } private GetSnapshotsResponse buildResponse() { @@ -463,11 +486,10 @@ private GetSnapshotsResponse buildResponse() { .sorted(sortBy.getSnapshotInfoComparator(order)) .skip(offset); final List snapshotInfos; - if (size == GetSnapshotsRequest.NO_LIMIT) { + if (size == GetSnapshotsRequest.NO_LIMIT || resultsCount.get() <= size) { snapshotInfos = resultsStream.toList(); } else { - final var allocateSize = Math.min(size, 1000); // ignore excessively-large sizes in request params - snapshotInfos = new ArrayList<>(allocateSize); + snapshotInfos = new ArrayList<>(size); for (var iterator = resultsStream.iterator(); iterator.hasNext();) { final var snapshotInfo = iterator.next(); if (snapshotInfos.size() < size) { From 996a164bd572e07961964112cf7b6ac18d9d92f4 Mon Sep 17 00:00:00 2001 From: Niels Bauman <33722607+nielsbauman@users.noreply.github.com> Date: Thu, 4 Apr 2024 09:47:14 +0200 Subject: [PATCH 107/264] Set visibility of failure_store param of Rollover API to feature_flag (#107061) --- .../main/resources/rest-api-spec/api/indices.rollover.json | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json index e3c06ab080597..e04786ec14cf7 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json @@ -66,7 +66,9 @@ }, "failure_store":{ "type":"boolean", - "description":"If set to true, the rollover action will be applied on the failure store of the data stream." + "description":"If set to true, the rollover action will be applied on the failure store of the data stream.", + "visibility": "feature_flag", + "feature_flag": "es.failure_store_feature_flag_enabled" } }, "body":{ From 7b254218fb048a0e9245f05b4b08cc57ef5755e7 Mon Sep 17 00:00:00 2001 From: Ioana Tagirta Date: Thu, 4 Apr 2024 09:48:35 +0200 Subject: [PATCH 108/264] Add ES|QL signum function (#106866) * Add ES|QL signum function * Update docs/changelog/106866.yaml * Skip csv tests for versions older than 8.14 * Reference layout docs file and fix instructions for adding functions * Break csv specs by param type * More tests --- docs/changelog/106866.yaml | 5 + .../functions/description/signum.asciidoc | 5 + .../esql/functions/examples/signum.asciidoc | 13 +++ .../esql/functions/layout/signum.asciidoc | 15 +++ .../esql/functions/math-functions.asciidoc | 2 + .../esql/functions/parameters/signum.asciidoc | 6 + .../esql/functions/signature/signum.svg | 1 + .../esql/functions/types/signum.asciidoc | 12 ++ .../src/main/resources/floats.csv-spec | 47 ++++++++ .../src/main/resources/ints.csv-spec | 67 +++++++++++ .../src/main/resources/math.csv-spec | 13 +++ .../src/main/resources/meta.csv-spec | 6 +- .../src/main/resources/unsigned_long.csv-spec | 31 +++++ .../scalar/math/SignumDoubleEvaluator.java | 108 +++++++++++++++++ .../scalar/math/SignumIntEvaluator.java | 110 ++++++++++++++++++ .../scalar/math/SignumLongEvaluator.java | 110 ++++++++++++++++++ .../math/SignumUnsignedLongEvaluator.java | 110 ++++++++++++++++++ .../function/EsqlFunctionRegistry.java | 2 + .../function/scalar/math/Signum.java | 102 ++++++++++++++++ .../function/scalar/package-info.java | 21 +++- .../xpack/esql/io/stream/PlanNamedTypes.java | 3 + .../function/scalar/math/SignumTests.java | 82 +++++++++++++ 22 files changed, 864 insertions(+), 7 deletions(-) create mode 100644 docs/changelog/106866.yaml create mode 100644 docs/reference/esql/functions/description/signum.asciidoc create mode 100644 docs/reference/esql/functions/examples/signum.asciidoc create mode 100644 docs/reference/esql/functions/layout/signum.asciidoc create mode 100644 docs/reference/esql/functions/parameters/signum.asciidoc create mode 100644 docs/reference/esql/functions/signature/signum.svg create mode 100644 docs/reference/esql/functions/types/signum.asciidoc create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumDoubleEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumIntEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumLongEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumUnsignedLongEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Signum.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumTests.java diff --git a/docs/changelog/106866.yaml b/docs/changelog/106866.yaml new file mode 100644 index 0000000000000..ffc34e5962850 --- /dev/null +++ b/docs/changelog/106866.yaml @@ -0,0 +1,5 @@ +pr: 106866 +summary: Add ES|QL signum function +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/reference/esql/functions/description/signum.asciidoc b/docs/reference/esql/functions/description/signum.asciidoc new file mode 100644 index 0000000000000..db44c019e247e --- /dev/null +++ b/docs/reference/esql/functions/description/signum.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns the sign of the given number. It returns `-1` for negative numbers, `0` for `0` and `1` for positive numbers. diff --git a/docs/reference/esql/functions/examples/signum.asciidoc b/docs/reference/esql/functions/examples/signum.asciidoc new file mode 100644 index 0000000000000..190c1d0f71136 --- /dev/null +++ b/docs/reference/esql/functions/examples/signum.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/math.csv-spec[tag=signum] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/math.csv-spec[tag=signum-result] +|=== + diff --git a/docs/reference/esql/functions/layout/signum.asciidoc b/docs/reference/esql/functions/layout/signum.asciidoc new file mode 100644 index 0000000000000..f5b565993f392 --- /dev/null +++ b/docs/reference/esql/functions/layout/signum.asciidoc @@ -0,0 +1,15 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-signum]] +=== `SIGNUM` + +*Syntax* + +[.text-center] +image::esql/functions/signature/signum.svg[Embedded,opts=inline] + +include::../parameters/signum.asciidoc[] +include::../description/signum.asciidoc[] +include::../types/signum.asciidoc[] +include::../examples/signum.asciidoc[] diff --git a/docs/reference/esql/functions/math-functions.asciidoc b/docs/reference/esql/functions/math-functions.asciidoc index 8748b35443e8e..dd5b8a0a3d4e0 100644 --- a/docs/reference/esql/functions/math-functions.asciidoc +++ b/docs/reference/esql/functions/math-functions.asciidoc @@ -23,6 +23,7 @@ * <> * <> * <> +* <> * <> * <> * <> @@ -46,6 +47,7 @@ include::layout/log10.asciidoc[] include::pi.asciidoc[] include::pow.asciidoc[] include::round.asciidoc[] +include::layout/signum.asciidoc[] include::layout/sin.asciidoc[] include::layout/sinh.asciidoc[] include::sqrt.asciidoc[] diff --git a/docs/reference/esql/functions/parameters/signum.asciidoc b/docs/reference/esql/functions/parameters/signum.asciidoc new file mode 100644 index 0000000000000..65013f4c21265 --- /dev/null +++ b/docs/reference/esql/functions/parameters/signum.asciidoc @@ -0,0 +1,6 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Parameters* + +`number`:: +Numeric expression. If `null`, the function returns `null`. diff --git a/docs/reference/esql/functions/signature/signum.svg b/docs/reference/esql/functions/signature/signum.svg new file mode 100644 index 0000000000000..76d2972f18f42 --- /dev/null +++ b/docs/reference/esql/functions/signature/signum.svg @@ -0,0 +1 @@ +SIGNUM(number) \ No newline at end of file diff --git a/docs/reference/esql/functions/types/signum.asciidoc b/docs/reference/esql/functions/types/signum.asciidoc new file mode 100644 index 0000000000000..7cda278abdb56 --- /dev/null +++ b/docs/reference/esql/functions/types/signum.asciidoc @@ -0,0 +1,12 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +number | result +double | double +integer | double +long | double +unsigned_long | double +|=== diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec index 9c343083275cd..0882fec5ec0bf 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec @@ -544,3 +544,50 @@ required_feature: esql.agg_values [1.56, 1.78] | Tech Lead [1.7, 1.83, 2.05] | null ; + +signumOfPositiveDouble#[skip:-8.13.99,reason:new scalar function added in 8.14] +row d = to_double(100) | eval s = signum(d); + +d:double | s:double +100 | 1.0 +; + +signumOfNegativeDouble#[skip:-8.13.99,reason:new scalar function added in 8.14] +row d = to_double(-100) | eval s = signum(d); + +d:double | s:double +-100 | -1.0 +; + +signumOfZeroDouble#[skip:-8.13.99,reason:new scalar function added in 8.14] +row d = to_double(0) | eval s = signum(d); + +d:double | s:double +0 | 0.0 +; + +signumWithEvalWhereAndStats#[skip:-8.13.99,reason:new scalar function added in 8.14] + +from employees +| where emp_no <= 10009 +| eval s = signum(mv_min(salary_change)) +| where signum(mv_max(salary_change)) >= 0 +| STATS x = AVG(signum(60000 - salary)); + +x:double +0.14285714285714285 +; + +signumWithEvalAndSort#[skip:-8.13.99,reason:new scalar function added in 8.14] +from employees +| eval s = signum(mv_min(salary_change)) +| where signum(mv_max(salary_change)) >= 0 +| keep s, emp_no, salary, salary_change +| sort s, emp_no +| limit 3; + +s:double | emp_no:integer | salary:integer | salary_change:double +-1.0 | 10002 | 56371 | [-7.23, 11.17] +-1.0 | 10004 | 36174 | [-0.35, 1.13, 3.65, 13.48] +-1.0 | 10005 | 63528 | [-2.14, 13.07] +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec index 8657602e7b16f..3e1d1b19a7f67 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec @@ -988,3 +988,70 @@ required_feature: esql.agg_values [3, 5] | Tech Lead [1, 4] | null ; + +signumOfPositiveInteger#[skip:-8.13.99,reason:new scalar function added in 8.14] +row i = 100 | eval s = signum(i); + +i:integer | s:double +100 | 1.0 +; + +signumOfNegativeInteger#[skip:-8.13.99,reason:new scalar function added in 8.14] +row i = -100 | eval s = signum(i); + +i:integer | s:double +-100 | -1.0 +; + +signumOfZeroInteger#[skip:-8.13.99,reason:new scalar function added in 8.14] +row i = 0 | eval s = signum(i); + +i:integer | s:double +0 | 0.0 +; + +signumOfPositiveLong#[skip:-8.13.99,reason:new scalar function added in 8.14] +row l = to_long(100) | eval s = signum(l); + +l:long | s:double +100 | 1.0 +; + +signumOfNegativeLong#[skip:-8.13.99,reason:new scalar function added in 8.14] +row l = to_long(-100) | eval s = signum(l); + +l:long | s:double +-100 | -1.0 +; + +signumOfZeroLong#[skip:-8.13.99,reason:new scalar function added in 8.14] +row l = to_long(0) | eval s = signum(l); + +l:long | s:double +0 | 0.0 +; + +signumWithEvalWhereAndStats#[skip:-8.13.99,reason:new scalar function added in 8.14] + +from employees +| eval s = signum(mv_min(salary_change.int)) +| where signum(mv_max(salary_change.int)) >= 0 +| STATS x=AVG(signum(60000 - salary)); + +x:double +0.5409836065573771 +; + +signumWithEvalAndSort#[skip:-8.13.99,reason:new scalar function added in 8.14] +from employees +| eval s = signum(60000 - salary) +| where signum(salary - 55000) >= 0 +| keep s, emp_no, salary +| sort s DESC, salary ASC +| limit 3; + +s:double | emp_no:integer | salary:integer +1.0 | 10052 | 55360 +1.0 | 10002 | 56371 +1.0 | 10041 | 56415 +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec index 8491919b3ee93..6caeade1af58c 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec @@ -1249,6 +1249,19 @@ i:ul | c:ul | f:ul 1000000000000000000 | 1000000000000000000 | 1000000000000000000 ; +signum#[skip:-8.13.99,reason:new scalar function added in 8.14] +// tag::signum[] +ROW d = 100.0 +| EVAL s = SIGNUM(d) +// end::signum[] +; + +// tag::signum-result[] +d: double | s:double +100 | 1.0 +// end::signum-result[] +; + sqrt // tag::sqrt[] ROW d = 100.0 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec index 524de7c2c3b67..746684aca3e38 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -58,6 +58,7 @@ double pi() "keyword right(string:keyword|text, length:integer)" "double round(number:double, ?decimals:integer)" "keyword|text rtrim(string:keyword|text)" +"double signum(number:double|integer|long|unsigned_long)" "double sin(angle:double|integer|long|unsigned_long)" "double sinh(angle:double|integer|long|unsigned_long)" "keyword split(string:keyword|text, delim:keyword|text)" @@ -165,6 +166,7 @@ replace |[string, regex, newString] |["keyword|text", "keyword|te right |[string, length] |["keyword|text", integer] |[, ] round |[number, decimals] |[double, integer] |[The numeric value to round, The number of decimal places to round to. Defaults to 0.] rtrim |string |"keyword|text" |[""] +signum |number |"double|integer|long|unsigned_long" |"Numeric expression. If `null`, the function returns `null`." sin |angle |"double|integer|long|unsigned_long" |An angle, in radians. If `null`, the function returns `null`. sinh |angle |"double|integer|long|unsigned_long" |An angle, in radians. If `null`, the function returns `null`. split |[string, delim] |["keyword|text", "keyword|text"] |[, ] @@ -273,6 +275,7 @@ replace |The function substitutes in the string any match of the regular e right |Return the substring that extracts length chars from the string starting from the right. round |Rounds a number to the closest number with the specified number of digits. rtrim |Removes trailing whitespaces from a string. +signum |Returns the sign of the given number. It returns `-1` for negative numbers, `0` for `0` and `1` for positive numbers. sin |Returns ths {wikipedia}/Sine_and_cosine[Sine] trigonometric function of an angle. sinh |Returns the {wikipedia}/Hyperbolic_functions[hyperbolic sine] of an angle. split |Split a single valued string into multiple strings. @@ -382,6 +385,7 @@ replace |keyword right |keyword |[false, false] |false |false round |double |[false, true] |false |false rtrim |"keyword|text" |false |false |false +signum |double |false |false |false sin |double |false |false |false sinh |double |false |false |false split |keyword |[false, false] |false |false @@ -443,5 +447,5 @@ countFunctions#[skip:-8.13.99] meta functions | stats a = count(*), b = count(*), c = count(*) | mv_expand c; a:long | b:long | c:long -99 | 99 | 99 +100 | 100 | 100 ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec index c6f24d876240f..f1a15f41af7b3 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec @@ -187,3 +187,34 @@ warning:Line 1:22: java.lang.IllegalArgumentException: single-value function enc bytes_in:ul | rad:double 16002960716282089759 | 2.79304354566432608E17 ; + +signumOfPositiveUnsignedLong#[skip:-8.13.99,reason:new scalar function added in 8.14] +row l = to_ul(100) | eval s = signum(l); + +l:ul | s:double +100 | 1.0 +; + +signumOfZeroUnsignedLong#[skip:-8.13.99,reason:new scalar function added in 8.14] +row l = to_ul(0) | eval s = signum(l); + +l:ul | s:double +0 | 0.0 +; + +signumWithEvalAndWhere#[skip:-8.13.99,reason:new scalar function added in 8.14] + +from ul_logs | +where signum(bytes_in) >= 0.0 | +eval s = signum(bytes_out) | +keep s, bytes_in, bytes_out | +sort bytes_out, s | +limit 2; + +warning:Line 2:7: evaluation of [signum(bytes_in)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 2:7: java.lang.IllegalArgumentException: single-value function encountered multi-value + +s:double | bytes_in:ul | bytes_out:ul +1.0 | 1957665857956635540 | 352442273299370793 +1.0 | 2408213296071189837 | 419872666232023984 +; diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumDoubleEvaluator.java new file mode 100644 index 0000000000000..c7d21a7b9c5a0 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumDoubleEvaluator.java @@ -0,0 +1,108 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Signum}. + * This class is generated. Do not edit it. + */ +public final class SignumDoubleEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator val; + + private final DriverContext driverContext; + + public SignumDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.val = val; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (DoubleBlock valBlock = (DoubleBlock) val.eval(page)) { + DoubleVector valVector = valBlock.asVector(); + if (valVector == null) { + return eval(page.getPositionCount(), valBlock); + } + return eval(page.getPositionCount(), valVector).asBlock(); + } + } + + public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { + try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendDouble(Signum.process(valBlock.getDouble(valBlock.getFirstValueIndex(p)))); + } + return result.build(); + } + } + + public DoubleVector eval(int positionCount, DoubleVector valVector) { + try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + result.appendDouble(Signum.process(valVector.getDouble(p))); + } + return result.build(); + } + } + + @Override + public String toString() { + return "SignumDoubleEvaluator[" + "val=" + val + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(val); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory val; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; + this.val = val; + } + + @Override + public SignumDoubleEvaluator get(DriverContext context) { + return new SignumDoubleEvaluator(source, val.get(context), context); + } + + @Override + public String toString() { + return "SignumDoubleEvaluator[" + "val=" + val + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumIntEvaluator.java new file mode 100644 index 0000000000000..939807d8deffa --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumIntEvaluator.java @@ -0,0 +1,110 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Signum}. + * This class is generated. Do not edit it. + */ +public final class SignumIntEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator val; + + private final DriverContext driverContext; + + public SignumIntEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.val = val; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (IntBlock valBlock = (IntBlock) val.eval(page)) { + IntVector valVector = valBlock.asVector(); + if (valVector == null) { + return eval(page.getPositionCount(), valBlock); + } + return eval(page.getPositionCount(), valVector).asBlock(); + } + } + + public DoubleBlock eval(int positionCount, IntBlock valBlock) { + try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendDouble(Signum.process(valBlock.getInt(valBlock.getFirstValueIndex(p)))); + } + return result.build(); + } + } + + public DoubleVector eval(int positionCount, IntVector valVector) { + try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + result.appendDouble(Signum.process(valVector.getInt(p))); + } + return result.build(); + } + } + + @Override + public String toString() { + return "SignumIntEvaluator[" + "val=" + val + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(val); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory val; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; + this.val = val; + } + + @Override + public SignumIntEvaluator get(DriverContext context) { + return new SignumIntEvaluator(source, val.get(context), context); + } + + @Override + public String toString() { + return "SignumIntEvaluator[" + "val=" + val + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumLongEvaluator.java new file mode 100644 index 0000000000000..0c4af4671672a --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumLongEvaluator.java @@ -0,0 +1,110 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Signum}. + * This class is generated. Do not edit it. + */ +public final class SignumLongEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator val; + + private final DriverContext driverContext; + + public SignumLongEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.val = val; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock valBlock = (LongBlock) val.eval(page)) { + LongVector valVector = valBlock.asVector(); + if (valVector == null) { + return eval(page.getPositionCount(), valBlock); + } + return eval(page.getPositionCount(), valVector).asBlock(); + } + } + + public DoubleBlock eval(int positionCount, LongBlock valBlock) { + try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendDouble(Signum.process(valBlock.getLong(valBlock.getFirstValueIndex(p)))); + } + return result.build(); + } + } + + public DoubleVector eval(int positionCount, LongVector valVector) { + try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + result.appendDouble(Signum.process(valVector.getLong(p))); + } + return result.build(); + } + } + + @Override + public String toString() { + return "SignumLongEvaluator[" + "val=" + val + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(val); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory val; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; + this.val = val; + } + + @Override + public SignumLongEvaluator get(DriverContext context) { + return new SignumLongEvaluator(source, val.get(context), context); + } + + @Override + public String toString() { + return "SignumLongEvaluator[" + "val=" + val + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumUnsignedLongEvaluator.java new file mode 100644 index 0000000000000..d3b20c98139c4 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumUnsignedLongEvaluator.java @@ -0,0 +1,110 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Signum}. + * This class is generated. Do not edit it. + */ +public final class SignumUnsignedLongEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator val; + + private final DriverContext driverContext; + + public SignumUnsignedLongEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.val = val; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock valBlock = (LongBlock) val.eval(page)) { + LongVector valVector = valBlock.asVector(); + if (valVector == null) { + return eval(page.getPositionCount(), valBlock); + } + return eval(page.getPositionCount(), valVector).asBlock(); + } + } + + public DoubleBlock eval(int positionCount, LongBlock valBlock) { + try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendDouble(Signum.processUnsignedLong(valBlock.getLong(valBlock.getFirstValueIndex(p)))); + } + return result.build(); + } + } + + public DoubleVector eval(int positionCount, LongVector valVector) { + try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + result.appendDouble(Signum.processUnsignedLong(valVector.getLong(p))); + } + return result.build(); + } + } + + @Override + public String toString() { + return "SignumUnsignedLongEvaluator[" + "val=" + val + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(val); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory val; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; + this.val = val; + } + + @Override + public SignumUnsignedLongEvaluator get(DriverContext context) { + return new SignumUnsignedLongEvaluator(source, val.get(context), context); + } + + @Override + public String toString() { + return "SignumUnsignedLongEvaluator[" + "val=" + val + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index 9f0976e0045d3..1a27c7b69c1e6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -59,6 +59,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.math.Pi; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Pow; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Round; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Signum; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Sin; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Sinh; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Sqrt; @@ -152,6 +153,7 @@ private FunctionDefinition[][] functions() { def(Pi.class, Pi::new, "pi"), def(Pow.class, Pow::new, "pow"), def(Round.class, Round::new, "round"), + def(Signum.class, Signum::new, "signum"), def(Sin.class, Sin::new, "sin"), def(Sinh.class, Sinh::new, "sinh"), def(Sqrt.class, Sqrt::new, "sqrt"), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Signum.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Signum.java new file mode 100644 index 0000000000000..ede41c10f3ac2 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Signum.java @@ -0,0 +1,102 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import org.elasticsearch.compute.ann.Evaluator; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.expression.function.Example; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.NodeInfo; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; +import org.elasticsearch.xpack.ql.util.NumericUtils; + +import java.util.List; +import java.util.function.Function; + +public class Signum extends UnaryScalarFunction { + @FunctionInfo( + returnType = { "double" }, + description = "Returns the sign of the given number.\n" + + "It returns `-1` for negative numbers, `0` for `0` and `1` for positive numbers.", + examples = @Example(file = "math", tag = "signum") + ) + public Signum( + Source source, + @Param( + name = "number", + type = { "double", "integer", "long", "unsigned_long" }, + description = "Numeric expression. If `null`, the function returns `null`." + ) Expression n + ) { + super(source, n); + } + + @Override + public EvalOperator.ExpressionEvaluator.Factory toEvaluator( + Function toEvaluator + ) { + var field = toEvaluator.apply(field()); + var fieldType = field().dataType(); + + if (fieldType == DataTypes.DOUBLE) { + return new SignumDoubleEvaluator.Factory(source(), field); + } + if (fieldType == DataTypes.INTEGER) { + return new SignumIntEvaluator.Factory(source(), field); + } + if (fieldType == DataTypes.LONG) { + return new SignumLongEvaluator.Factory(source(), field); + } + if (fieldType == DataTypes.UNSIGNED_LONG) { + return new SignumUnsignedLongEvaluator.Factory(source(), field); + } + + throw EsqlIllegalArgumentException.illegalDataType(fieldType); + } + + @Override + public Expression replaceChildren(List newChildren) { + return new Signum(source(), newChildren.get(0)); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, Signum::new, field()); + } + + @Override + public DataType dataType() { + return DataTypes.DOUBLE; + } + + @Evaluator(extraName = "Double") + static double process(double val) { + return Math.signum(val); + } + + @Evaluator(extraName = "Int") + static double process(int val) { + return Math.signum(val); + } + + @Evaluator(extraName = "Long") + static double process(long val) { + return Math.signum(val); + } + + @Evaluator(extraName = "UnsignedLong") + static double processUnsignedLong(long val) { + return Math.signum(NumericUtils.unsignedLongToDouble(val)); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java index f30425158b1b3..9469889285fd3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java @@ -127,12 +127,21 @@ *
  • * Generate a syntax diagram and a table with supported types by running the tests via * gradle: {@code ./gradlew x-pack:plugin:esql:test} - * The generated files can be found here - * {@code docs/reference/esql/functions/signature/myfunction.svg } - * and here - * {@code docs/reference/esql/functions/types/myfunction.asciidoc} - * Make sure to commit them and reference them in your doc file. There are plenty of examples on how - * to reference those files e.g. {@code docs/reference/esql/functions/sin.asciidoc}. + * The generated files are + *
      + *
    1. {@code docs/reference/esql/functions/description/myfunction.asciidoc}
    2. + *
    3. {@code docs/reference/esql/functions/examples/myfunction.asciidoc}
    4. + *
    5. {@code docs/reference/esql/functions/layout/myfunction.asciidoc}
    6. + *
    7. {@code docs/reference/esql/functions/parameters/myfunction.asciidoc}
    8. + *
    9. {@code docs/reference/esql/functions/signature/myfunction.svg}
    10. + *
    11. {@code docs/reference/esql/functions/types/myfunction.asciidoc}
    12. + *
    + * + * Make sure to commit them. Add a reference to the + * {@code docs/reference/esql/functions/layout/myfunction.asciidoc} in the function list + * docs. There are plenty of examples on how + * to reference those files e.g. if you are writing a Math function, you will want to + * list it in {@code docs/reference/esql/functions/math-functions.asciidoc}. *
  • *
  • * Build the docs by cloning the docs repo diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java index 21c17110ad4fe..a85ddac532241 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java @@ -80,6 +80,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.math.Pi; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Pow; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Round; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Signum; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Sin; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Sinh; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Sqrt; @@ -349,6 +350,7 @@ public static List namedTypeEntries() { of(ESQL_UNARY_SCLR_CLS, Log10.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), of(ESQL_UNARY_SCLR_CLS, LTrim.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), of(ESQL_UNARY_SCLR_CLS, RTrim.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), + of(ESQL_UNARY_SCLR_CLS, Signum.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), of(ESQL_UNARY_SCLR_CLS, Sin.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), of(ESQL_UNARY_SCLR_CLS, Sinh.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), of(ESQL_UNARY_SCLR_CLS, Sqrt.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), @@ -1296,6 +1298,7 @@ static void writeBinaryLogic(PlanStreamOutput out, BinaryLogic binaryLogic) thro entry(name(LTrim.class), LTrim::new), entry(name(RTrim.class), RTrim::new), entry(name(Neg.class), Neg::new), + entry(name(Signum.class), Signum::new), entry(name(Sin.class), Sin::new), entry(name(Sinh.class), Sinh::new), entry(name(Sqrt.class), Sqrt::new), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumTests.java new file mode 100644 index 0000000000000..4167029010950 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumTests.java @@ -0,0 +1,82 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataTypes; +import org.elasticsearch.xpack.ql.util.NumericUtils; + +import java.math.BigInteger; +import java.util.ArrayList; +import java.util.List; +import java.util.function.Supplier; + +public class SignumTests extends AbstractFunctionTestCase { + public SignumTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + String read = "Attribute[channel=0]"; + List suppliers = new ArrayList<>(); + TestCaseSupplier.forUnaryInt( + suppliers, + "SignumIntEvaluator[val=" + read + "]", + DataTypes.DOUBLE, + i -> (double) Math.signum(i), + Integer.MIN_VALUE, + Integer.MAX_VALUE, + List.of() + ); + + TestCaseSupplier.forUnaryLong( + suppliers, + "SignumLongEvaluator[val=" + read + "]", + DataTypes.DOUBLE, + l -> (double) Math.signum(l), + Long.MIN_VALUE, + Long.MAX_VALUE, + List.of() + ); + + TestCaseSupplier.forUnaryUnsignedLong( + suppliers, + "SignumUnsignedLongEvaluator[val=" + read + "]", + DataTypes.DOUBLE, + ul -> Math.signum(NumericUtils.unsignedLongToDouble(NumericUtils.asLongUnsigned(ul))), + BigInteger.ZERO, + UNSIGNED_LONG_MAX, + List.of() + ); + TestCaseSupplier.forUnaryDouble( + suppliers, + "SignumDoubleEvaluator[val=" + read + "]", + DataTypes.DOUBLE, + Math::signum, + -Double.MAX_VALUE, + Double.MAX_VALUE, + List.of() + ); + + suppliers = anyNullIsNull(true, suppliers); + + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(suppliers)); + } + + @Override + protected Expression build(Source source, List args) { + return new Signum(source, args.get(0)); + } +} From a32512fe0f588e2a03dd3892aef0bed5828fbc9d Mon Sep 17 00:00:00 2001 From: Jedr Blaszyk Date: Thu, 4 Apr 2024 10:51:15 +0200 Subject: [PATCH 109/264] Connector API: Followup on #106060 (#107058) --- .../connector/ConnectorConfiguration.java | 66 ++++++++++--------- .../ConfigurationValidation.java | 6 +- 2 files changed, 37 insertions(+), 35 deletions(-) diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorConfiguration.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorConfiguration.java index fc2c0920f49df..75dba46f8e29c 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorConfiguration.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorConfiguration.java @@ -34,6 +34,7 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Optional; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; @@ -45,10 +46,14 @@ public class ConnectorConfiguration implements Writeable, ToXContentObject { @Nullable private final String category; + @Nullable private final Object defaultValue; + @Nullable private final List dependsOn; + @Nullable private final ConfigurationDisplayType display; private final String label; + @Nullable private final List options; @Nullable private final Integer order; @@ -58,9 +63,13 @@ public class ConnectorConfiguration implements Writeable, ToXContentObject { private final boolean sensitive; @Nullable private final String tooltip; + @Nullable private final ConfigurationFieldType type; + @Nullable private final List uiRestrictions; + @Nullable private final List validations; + @Nullable private final Object value; /** @@ -380,41 +389,38 @@ public void writeTo(StreamOutput out) throws IOException { public Map toMap() { Map map = new HashMap<>(); - if (category != null) { - map.put(CATEGORY_FIELD.getPreferredName(), category); - } + + Optional.ofNullable(category).ifPresent(c -> map.put(CATEGORY_FIELD.getPreferredName(), c)); map.put(DEFAULT_VALUE_FIELD.getPreferredName(), defaultValue); - if (dependsOn != null) { - map.put(DEPENDS_ON_FIELD.getPreferredName(), dependsOn.stream().map(ConfigurationDependency::toMap).toList()); - } - if (display != null) { - map.put(DISPLAY_FIELD.getPreferredName(), display.toString()); - } + + Optional.ofNullable(dependsOn) + .ifPresent(d -> map.put(DEPENDS_ON_FIELD.getPreferredName(), d.stream().map(ConfigurationDependency::toMap).toList())); + + Optional.ofNullable(display).ifPresent(d -> map.put(DISPLAY_FIELD.getPreferredName(), d.toString())); + map.put(LABEL_FIELD.getPreferredName(), label); - if (options != null) { - map.put(OPTIONS_FIELD.getPreferredName(), options.stream().map(ConfigurationSelectOption::toMap).toList()); - } - if (order != null) { - map.put(ORDER_FIELD.getPreferredName(), order); - } - if (placeholder != null) { - map.put(PLACEHOLDER_FIELD.getPreferredName(), placeholder); - } + + Optional.ofNullable(options) + .ifPresent(o -> map.put(OPTIONS_FIELD.getPreferredName(), o.stream().map(ConfigurationSelectOption::toMap).toList())); + + Optional.ofNullable(order).ifPresent(o -> map.put(ORDER_FIELD.getPreferredName(), o)); + + Optional.ofNullable(placeholder).ifPresent(p -> map.put(PLACEHOLDER_FIELD.getPreferredName(), p)); + map.put(REQUIRED_FIELD.getPreferredName(), required); map.put(SENSITIVE_FIELD.getPreferredName(), sensitive); - if (tooltip != null) { - map.put(TOOLTIP_FIELD.getPreferredName(), tooltip); - } - if (type != null) { - map.put(TYPE_FIELD.getPreferredName(), type.toString()); - } - if (uiRestrictions != null) { - map.put(UI_RESTRICTIONS_FIELD.getPreferredName(), uiRestrictions); - } - if (validations != null) { - map.put(VALIDATIONS_FIELD.getPreferredName(), validations.stream().map(ConfigurationValidation::toMap).toList()); - } + + Optional.ofNullable(tooltip).ifPresent(t -> map.put(TOOLTIP_FIELD.getPreferredName(), t)); + + Optional.ofNullable(type).ifPresent(t -> map.put(TYPE_FIELD.getPreferredName(), t.toString())); + + Optional.ofNullable(uiRestrictions).ifPresent(u -> map.put(UI_RESTRICTIONS_FIELD.getPreferredName(), u)); + + Optional.ofNullable(validations) + .ifPresent(v -> map.put(VALIDATIONS_FIELD.getPreferredName(), v.stream().map(ConfigurationValidation::toMap).toList())); + map.put(VALUE_FIELD.getPreferredName(), value); + return map; } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationValidation.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationValidation.java index 51e912650bc1d..4ed5a22d2a4bf 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationValidation.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationValidation.java @@ -19,7 +19,6 @@ import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; -import java.util.HashMap; import java.util.Map; import java.util.Objects; @@ -103,10 +102,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } public Map toMap() { - Map map = new HashMap<>(); - map.put(CONSTRAINT_FIELD.getPreferredName(), constraint); - map.put(TYPE_FIELD.getPreferredName(), type.toString()); - return map; + return Map.of(CONSTRAINT_FIELD.getPreferredName(), constraint, TYPE_FIELD.getPreferredName(), type.toString()); } public static ConfigurationValidation fromXContent(XContentParser parser) throws IOException { From d6582cf1afcd460767bc4f60f8270fcfef5066c4 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 4 Apr 2024 12:42:23 +0200 Subject: [PATCH 110/264] Address concurrency issue in top hits aggregation (#106990) Top hits aggregation runs the fetch phase concurrently when the query phase is executed across multiple slices. This is problematic as the fetch phase does not support concurrent execution yet. The core of the issue is that the search execution context is shared across slices, which call setLookupProviders against it concurrently, setting each time different instances of preloaded source and field lookup providers. This makes us cross streams between slices, and hit lucene assertions that ensure that stored fields loaded from a certain thread are not read from a different thread. We have not hit this before because the problem revolves around SearchLookup which is used by runtime fields. TopHitsIT is the main test we have for top hits agg, but it uses a mock script engine which bypasses painless and SearchLookup. --- docs/changelog/106990.yaml | 5 ++ .../bucket/terms/RareTermsIT.java | 39 ++++++++++++ .../bucket/terms/StringTermsIT.java | 51 +++++++++++++++ .../aggregations/metrics/TopHitsIT.java | 62 +++++++++++++++---- .../metrics/TopHitsAggregator.java | 17 ++++- .../search/fetch/FetchPhase.java | 5 ++ .../search/internal/SubSearchContext.java | 21 ++++++- .../terms/RareTermsAggregatorTests.java | 45 -------------- .../bucket/terms/TermsAggregatorTests.java | 54 ---------------- 9 files changed, 184 insertions(+), 115 deletions(-) create mode 100644 docs/changelog/106990.yaml diff --git a/docs/changelog/106990.yaml b/docs/changelog/106990.yaml new file mode 100644 index 0000000000000..26646e742a5ee --- /dev/null +++ b/docs/changelog/106990.yaml @@ -0,0 +1,5 @@ +pr: 106990 +summary: Address concurrency issue in top hits aggregation +area: Aggregations +type: bug +issues: [] diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsIT.java index 2dccda385bf53..c45cabf425b14 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsIT.java @@ -12,12 +12,22 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; +import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; +import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.global.InternalGlobal; +import org.elasticsearch.search.aggregations.metrics.InternalTopHits; +import org.elasticsearch.search.aggregations.metrics.TopHitsAggregationBuilder; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.xcontent.XContentType; import org.hamcrest.Matchers; +import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; /** * Test that index enough data to trigger the creation of Cuckoo filters. @@ -64,4 +74,33 @@ private void assertNumRareTerms(int maxDocs, int rareTerms) { } ); } + + public void testGlobalAggregationWithScore() { + createIndex("global", Settings.EMPTY, "_doc", "keyword", "type=keyword"); + prepareIndex("global").setSource("keyword", "a").setRefreshPolicy(IMMEDIATE).get(); + prepareIndex("global").setSource("keyword", "c").setRefreshPolicy(IMMEDIATE).get(); + prepareIndex("global").setSource("keyword", "e").setRefreshPolicy(IMMEDIATE).get(); + GlobalAggregationBuilder globalBuilder = new GlobalAggregationBuilder("global").subAggregation( + new RareTermsAggregationBuilder("terms").field("keyword") + .subAggregation( + new RareTermsAggregationBuilder("sub_terms").field("keyword") + .subAggregation(new TopHitsAggregationBuilder("top_hits").storedField("_none_")) + ) + ); + assertNoFailuresAndResponse(client().prepareSearch("global").addAggregation(globalBuilder), response -> { + InternalGlobal result = response.getAggregations().get("global"); + InternalMultiBucketAggregation terms = result.getAggregations().get("terms"); + assertThat(terms.getBuckets().size(), equalTo(3)); + for (MultiBucketsAggregation.Bucket bucket : terms.getBuckets()) { + InternalMultiBucketAggregation subTerms = bucket.getAggregations().get("sub_terms"); + assertThat(subTerms.getBuckets().size(), equalTo(1)); + MultiBucketsAggregation.Bucket subBucket = subTerms.getBuckets().get(0); + InternalTopHits topHits = subBucket.getAggregations().get("top_hits"); + assertThat(topHits.getHits().getHits().length, equalTo(1)); + for (SearchHit hit : topHits.getHits()) { + assertThat(hit.getScore(), greaterThan(0f)); + } + } + }); + } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java index 1b2d66fc12c76..662744ddfe77e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java @@ -18,16 +18,24 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; +import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.AggregationTestScriptsPlugin; +import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.elasticsearch.search.aggregations.BucketOrder; +import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.bucket.AbstractTermsTestCase; +import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; import org.elasticsearch.search.aggregations.bucket.filter.Filter; +import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.global.InternalGlobal; import org.elasticsearch.search.aggregations.metrics.Avg; import org.elasticsearch.search.aggregations.metrics.ExtendedStats; +import org.elasticsearch.search.aggregations.metrics.InternalTopHits; import org.elasticsearch.search.aggregations.metrics.Stats; import org.elasticsearch.search.aggregations.metrics.Sum; +import org.elasticsearch.search.aggregations.metrics.TopHitsAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.test.ESIntegTestCase; @@ -63,6 +71,7 @@ import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.startsWith; import static org.hamcrest.core.IsNull.notNullValue; @@ -1376,4 +1385,46 @@ private void assertOrderByKeyResponse( } ); } + + public void testGlobalAggregationWithScore() throws Exception { + assertAcked(prepareCreate("global").setMapping("keyword", "type=keyword")); + indexRandom( + true, + prepareIndex("global").setSource("keyword", "a"), + prepareIndex("global").setSource("keyword", "c"), + prepareIndex("global").setSource("keyword", "e") + ); + String executionHint = randomFrom(TermsAggregatorFactory.ExecutionMode.values()).toString(); + Aggregator.SubAggCollectionMode collectionMode = randomFrom(Aggregator.SubAggCollectionMode.values()); + GlobalAggregationBuilder globalBuilder = new GlobalAggregationBuilder("global").subAggregation( + new TermsAggregationBuilder("terms").userValueTypeHint(ValueType.STRING) + .executionHint(executionHint) + .collectMode(collectionMode) + .field("keyword") + .order(BucketOrder.key(true)) + .subAggregation( + new TermsAggregationBuilder("sub_terms").userValueTypeHint(ValueType.STRING) + .executionHint(executionHint) + .collectMode(collectionMode) + .field("keyword") + .order(BucketOrder.key(true)) + .subAggregation(new TopHitsAggregationBuilder("top_hits").storedField("_none_")) + ) + ); + assertNoFailuresAndResponse(prepareSearch("global").addAggregation(globalBuilder), response -> { + InternalGlobal result = response.getAggregations().get("global"); + InternalMultiBucketAggregation terms = result.getAggregations().get("terms"); + assertThat(terms.getBuckets().size(), equalTo(3)); + for (MultiBucketsAggregation.Bucket bucket : terms.getBuckets()) { + InternalMultiBucketAggregation subTerms = bucket.getAggregations().get("sub_terms"); + assertThat(subTerms.getBuckets().size(), equalTo(1)); + MultiBucketsAggregation.Bucket subBucket = subTerms.getBuckets().get(0); + InternalTopHits topHits = subBucket.getAggregations().get("top_hits"); + assertThat(topHits.getHits().getHits().length, equalTo(1)); + for (SearchHit hit : topHits.getHits()) { + assertThat(hit.getScore(), greaterThan(0f)); + } + } + }); + } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java index 6cf274cb69fb3..991fe98612e3d 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java @@ -7,6 +7,7 @@ */ package org.elasticsearch.search.aggregations.metrics; +import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.util.ArrayUtil; @@ -20,6 +21,7 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.script.MockScriptEngine; import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.Script; @@ -34,8 +36,13 @@ import org.elasticsearch.search.aggregations.bucket.nested.Nested; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorFactory.ExecutionMode; +import org.elasticsearch.search.fetch.FetchSubPhase; +import org.elasticsearch.search.fetch.FetchSubPhaseProcessor; +import org.elasticsearch.search.fetch.StoredFieldsSpec; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.fetch.subphase.highlight.HighlightField; +import org.elasticsearch.search.lookup.FieldLookup; +import org.elasticsearch.search.lookup.LeafSearchLookup; import org.elasticsearch.search.rescore.QueryRescorerBuilder; import org.elasticsearch.search.sort.ScriptSortBuilder.ScriptSortType; import org.elasticsearch.search.sort.SortBuilders; @@ -43,6 +50,7 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.xcontent.XContentBuilder; +import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -87,7 +95,7 @@ public class TopHitsIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return Collections.singleton(CustomScriptPlugin.class); + return List.of(CustomScriptPlugin.class, FetchPlugin.class); } public static class CustomScriptPlugin extends MockScriptPlugin { @@ -110,7 +118,7 @@ public static String randomExecutionHint() { @Override public void setupSuiteScopeCluster() throws Exception { - assertAcked(prepareCreate("idx").setMapping(TERMS_AGGS_FIELD, "type=keyword")); + assertAcked(prepareCreate("idx").setMapping(TERMS_AGGS_FIELD, "type=keyword", "text", "type=text,store=true")); assertAcked(prepareCreate("field-collapsing").setMapping("group", "type=keyword")); createIndex("empty"); assertAcked( @@ -592,7 +600,7 @@ public void testFieldCollapsing() throws Exception { ); } - public void testFetchFeatures() { + public void testFetchFeatures() throws IOException { final boolean seqNoAndTerm = randomBoolean(); assertNoFailuresAndResponse( prepareSearch("idx").setQuery(matchQuery("text", "text").queryName("test")) @@ -642,19 +650,14 @@ public void testFetchFeatures() { assertThat(hit.getMatchedQueries()[0], equalTo("test")); - DocumentField field1 = hit.field("field1"); - assertThat(field1.getValue(), equalTo(5L)); - - DocumentField field2 = hit.field("field2"); - assertThat(field2.getValue(), equalTo(2.71f)); - - assertThat(hit.getSourceAsMap().get("text").toString(), equalTo("some text to entertain")); - - field2 = hit.field("script"); - assertThat(field2.getValue().toString(), equalTo("5")); + assertThat(hit.field("field1").getValue(), equalTo(5L)); + assertThat(hit.field("field2").getValue(), equalTo(2.71f)); + assertThat(hit.field("script").getValue().toString(), equalTo("5")); assertThat(hit.getSourceAsMap().size(), equalTo(1)); assertThat(hit.getSourceAsMap().get("text").toString(), equalTo("some text to entertain")); + assertEquals("some text to entertain", hit.getFields().get("text").getValue()); + assertEquals("some text to entertain", hit.getFields().get("text_stored_lookup").getValue()); } } ); @@ -1263,4 +1266,37 @@ public void testWithRescore() { } ); } + + public static class FetchPlugin extends Plugin implements SearchPlugin { + @Override + public List getFetchSubPhases(FetchPhaseConstructionContext context) { + return Collections.singletonList(fetchContext -> { + if (fetchContext.getIndexName().equals("idx")) { + return new FetchSubPhaseProcessor() { + + private LeafSearchLookup leafSearchLookup; + + @Override + public void setNextReader(LeafReaderContext ctx) { + leafSearchLookup = fetchContext.getSearchExecutionContext().lookup().getLeafSearchLookup(ctx); + } + + @Override + public void process(FetchSubPhase.HitContext hitContext) { + leafSearchLookup.setDocument(hitContext.docId()); + FieldLookup fieldLookup = leafSearchLookup.fields().get("text"); + hitContext.hit() + .setDocumentField("text_stored_lookup", new DocumentField("text_stored_lookup", fieldLookup.getValues())); + } + + @Override + public StoredFieldsSpec storedFieldsSpec() { + return StoredFieldsSpec.NO_REQUIREMENTS; + } + }; + } + return null; + }); + } + } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java index 75f5c472c6665..92fb09b017b2c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.util.LongObjectPagedHashMap; import org.elasticsearch.common.util.LongObjectPagedHashMap.Cursor; import org.elasticsearch.core.Releasables; +import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.AggregationExecutionContext; @@ -191,8 +192,7 @@ public InternalAggregation buildAggregation(long owningBucketOrdinal) throws IOE for (int i = 0; i < topDocs.scoreDocs.length; i++) { docIdsToLoad[i] = topDocs.scoreDocs[i].doc; } - subSearchContext.fetchPhase().execute(subSearchContext, docIdsToLoad); - FetchSearchResult fetchResult = subSearchContext.fetchResult(); + FetchSearchResult fetchResult = runFetchPhase(subSearchContext, docIdsToLoad); if (fetchProfiles != null) { fetchProfiles.add(fetchResult.profileResult()); } @@ -216,6 +216,19 @@ public InternalAggregation buildAggregation(long owningBucketOrdinal) throws IOE ); } + private static FetchSearchResult runFetchPhase(SubSearchContext subSearchContext, int[] docIdsToLoad) { + // Fork the search execution context for each slice, because the fetch phase does not support concurrent execution yet. + SearchExecutionContext searchExecutionContext = new SearchExecutionContext(subSearchContext.getSearchExecutionContext()); + SubSearchContext fetchSubSearchContext = new SubSearchContext(subSearchContext) { + @Override + public SearchExecutionContext getSearchExecutionContext() { + return searchExecutionContext; + } + }; + fetchSubSearchContext.fetchPhase().execute(fetchSubSearchContext, docIdsToLoad); + return fetchSubSearchContext.fetchResult(); + } + @Override public InternalTopHits buildEmptyAggregation() { TopDocs topDocs; diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java index c106d9b6f4cb2..2fa3e903a0074 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java @@ -104,6 +104,11 @@ private SearchHits buildSearchHits(SearchContext context, int[] docIdsToLoad, Pr PreloadedSourceProvider sourceProvider = new PreloadedSourceProvider(); PreloadedFieldLookupProvider fieldLookupProvider = new PreloadedFieldLookupProvider(); + // The following relies on the fact that we fetch sequentially one segment after another, from a single thread + // This needs to be revised once we add concurrency to the fetch phase, and needs a work-around for situations + // where we run fetch as part of the query phase, where inter-segment concurrency is leveraged. + // One problem is the global setLookupProviders call against the shared execution context. + // Another problem is that the above provider implementations are not thread-safe context.getSearchExecutionContext().setLookupProviders(sourceProvider, ctx -> fieldLookupProvider); List processors = getProcessors(context.shardTarget(), fetchContext, profiler); diff --git a/server/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java b/server/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java index 8567677aca30a..f31b319882b5a 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java @@ -29,7 +29,7 @@ public class SubSearchContext extends FilteredSearchContext { // By default return 3 hits per bucket. A higher default would make the response really large by default, since - // the to hits are returned per bucket. + // the top hits are returned per bucket. private static final int DEFAULT_SIZE = 3; private int from; @@ -62,6 +62,25 @@ public SubSearchContext(SearchContext context) { this.querySearchResult = new QuerySearchResult(); } + public SubSearchContext(SubSearchContext subSearchContext) { + this((SearchContext) subSearchContext); + this.from = subSearchContext.from; + this.size = subSearchContext.size; + this.sort = subSearchContext.sort; + this.parsedQuery = subSearchContext.parsedQuery; + this.query = subSearchContext.query; + this.storedFields = subSearchContext.storedFields; + this.scriptFields = subSearchContext.scriptFields; + this.fetchSourceContext = subSearchContext.fetchSourceContext; + this.docValuesContext = subSearchContext.docValuesContext; + this.fetchFieldsContext = subSearchContext.fetchFieldsContext; + this.highlight = subSearchContext.highlight; + this.explain = subSearchContext.explain; + this.trackScores = subSearchContext.trackScores; + this.version = subSearchContext.version; + this.seqNoAndPrimaryTerm = subSearchContext.seqNoAndPrimaryTerm; + } + @Override public void preProcess() {} diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java index 2d240f74b91a4..dff5c090f818e 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java @@ -40,7 +40,6 @@ import org.elasticsearch.index.mapper.RangeType; import org.elasticsearch.index.mapper.SeqNoFieldMapper; import org.elasticsearch.index.mapper.Uid; -import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorTestCase; @@ -49,8 +48,6 @@ import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.MultiBucketConsumerService; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; -import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder; -import org.elasticsearch.search.aggregations.bucket.global.InternalGlobal; import org.elasticsearch.search.aggregations.bucket.nested.InternalNested; import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregatorTests; @@ -72,7 +69,6 @@ import static java.util.stream.Collectors.toList; import static org.elasticsearch.index.mapper.SeqNoFieldMapper.PRIMARY_TERM_NAME; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThan; public class RareTermsAggregatorTests extends AggregatorTestCase { @@ -334,47 +330,6 @@ public void testInsideTerms() throws IOException { } } - public void testGlobalAggregationWithScore() throws IOException { - try (Directory directory = newDirectory()) { - try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { - Document document = new Document(); - document.add(new SortedDocValuesField("keyword", new BytesRef("a"))); - indexWriter.addDocument(document); - document = new Document(); - document.add(new SortedDocValuesField("keyword", new BytesRef("c"))); - indexWriter.addDocument(document); - document = new Document(); - document.add(new SortedDocValuesField("keyword", new BytesRef("e"))); - indexWriter.addDocument(document); - try (DirectoryReader indexReader = maybeWrapReaderEs(indexWriter.getReader())) { - GlobalAggregationBuilder globalBuilder = new GlobalAggregationBuilder("global").subAggregation( - new RareTermsAggregationBuilder("terms").field("keyword") - .subAggregation( - new RareTermsAggregationBuilder("sub_terms").field("keyword") - .subAggregation(new TopHitsAggregationBuilder("top_hits").storedField("_none_")) - ) - ); - - MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType("keyword"); - - InternalGlobal result = searchAndReduce(indexReader, new AggTestConfig(globalBuilder, fieldType)); - InternalMultiBucketAggregation terms = result.getAggregations().get("terms"); - assertThat(terms.getBuckets().size(), equalTo(3)); - for (MultiBucketsAggregation.Bucket bucket : terms.getBuckets()) { - InternalMultiBucketAggregation subTerms = bucket.getAggregations().get("sub_terms"); - assertThat(subTerms.getBuckets().size(), equalTo(1)); - MultiBucketsAggregation.Bucket subBucket = subTerms.getBuckets().get(0); - InternalTopHits topHits = subBucket.getAggregations().get("top_hits"); - assertThat(topHits.getHits().getHits().length, equalTo(1)); - for (SearchHit hit : topHits.getHits()) { - assertThat(hit.getScore(), greaterThan(0f)); - } - } - } - } - } - } - public void testWithNestedAggregations() throws IOException { try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java index 183d1d0ab6ed0..788249fee1187 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java @@ -76,7 +76,6 @@ import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptType; import org.elasticsearch.script.StringFieldScript; -import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.AggregationExecutionException; @@ -91,8 +90,6 @@ import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.filter.InternalFilter; -import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder; -import org.elasticsearch.search.aggregations.bucket.global.InternalGlobal; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.InternalDateHistogram; @@ -1308,57 +1305,6 @@ public void testMixLongAndDouble() throws Exception { } } - public void testGlobalAggregationWithScore() throws IOException { - try (Directory directory = newDirectory()) { - try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { - Document document = new Document(); - document.add(new SortedDocValuesField("keyword", new BytesRef("a"))); - indexWriter.addDocument(document); - document = new Document(); - document.add(new SortedDocValuesField("keyword", new BytesRef("c"))); - indexWriter.addDocument(document); - document = new Document(); - document.add(new SortedDocValuesField("keyword", new BytesRef("e"))); - indexWriter.addDocument(document); - try (DirectoryReader indexReader = maybeWrapReaderEs(indexWriter.getReader())) { - String executionHint = randomFrom(TermsAggregatorFactory.ExecutionMode.values()).toString(); - Aggregator.SubAggCollectionMode collectionMode = randomFrom(Aggregator.SubAggCollectionMode.values()); - GlobalAggregationBuilder globalBuilder = new GlobalAggregationBuilder("global").subAggregation( - new TermsAggregationBuilder("terms").userValueTypeHint(ValueType.STRING) - .executionHint(executionHint) - .collectMode(collectionMode) - .field("keyword") - .order(BucketOrder.key(true)) - .subAggregation( - new TermsAggregationBuilder("sub_terms").userValueTypeHint(ValueType.STRING) - .executionHint(executionHint) - .collectMode(collectionMode) - .field("keyword") - .order(BucketOrder.key(true)) - .subAggregation(new TopHitsAggregationBuilder("top_hits").storedField("_none_")) - ) - ); - - MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType("keyword"); - - InternalGlobal result = searchAndReduce(indexReader, new AggTestConfig(globalBuilder, fieldType)); - InternalMultiBucketAggregation terms = result.getAggregations().get("terms"); - assertThat(terms.getBuckets().size(), equalTo(3)); - for (MultiBucketsAggregation.Bucket bucket : terms.getBuckets()) { - InternalMultiBucketAggregation subTerms = bucket.getAggregations().get("sub_terms"); - assertThat(subTerms.getBuckets().size(), equalTo(1)); - MultiBucketsAggregation.Bucket subBucket = subTerms.getBuckets().get(0); - InternalTopHits topHits = subBucket.getAggregations().get("top_hits"); - assertThat(topHits.getHits().getHits().length, equalTo(1)); - for (SearchHit hit : topHits.getHits()) { - assertThat(hit.getScore(), greaterThan(0f)); - } - } - } - } - } - } - public void testWithNestedAggregations() throws IOException { try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { From e33fd1b5a4d72c332d92e0513f75487672f9c280 Mon Sep 17 00:00:00 2001 From: Jedr Blaszyk Date: Thu, 4 Apr 2024 14:06:12 +0200 Subject: [PATCH 111/264] [Connector API] Support numeric for configuration select option value type (#107059) --- docs/changelog/107059.yaml | 5 ++ .../335_connector_update_configuration.yml | 40 ++++++++++++++++ .../ConfigurationSelectOption.java | 25 ++++++---- .../ConnectorConfigurationTests.java | 48 +++++++++++++++++++ 4 files changed, 110 insertions(+), 8 deletions(-) create mode 100644 docs/changelog/107059.yaml diff --git a/docs/changelog/107059.yaml b/docs/changelog/107059.yaml new file mode 100644 index 0000000000000..6c7ee48f9b53b --- /dev/null +++ b/docs/changelog/107059.yaml @@ -0,0 +1,5 @@ +pr: 107059 +summary: "[Connector API] Support numeric for configuration select option value type" +area: Application +type: bug +issues: [] diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/335_connector_update_configuration.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/335_connector_update_configuration.yml index 0bde4bafaffd4..418a3cf6de94a 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/335_connector_update_configuration.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/335_connector_update_configuration.yml @@ -161,6 +161,46 @@ setup: - match: { configuration.some_field.tooltip: null } +--- +"Update Connector Configuration with numeric select options": + - do: + connector.update_configuration: + connector_id: test-connector + body: + configuration: + some_field: + default_value: null + depends_on: + - field: some_field + value: 31 + display: numeric + label: Very important field + options: + - label: ten + value: 10 + - label: five + value: 5 + order: 4 + required: true + sensitive: false + tooltip: null + type: str + ui_restrictions: [ ] + validations: + - constraint: 0 + type: greater_than + value: 123 + + + - match: { result: updated } + + - do: + connector.get: + connector_id: test-connector + + - match: { configuration.some_field.options.0.value: 10 } + - match: { configuration.some_field.options.1.value: 5 } + --- "Update Connector Configuration - Connector doesn't exist": - do: diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationSelectOption.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationSelectOption.java index 3c17f97ead51d..9728faaac3dd4 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationSelectOption.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationSelectOption.java @@ -11,9 +11,11 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParseException; import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; @@ -25,16 +27,16 @@ public class ConfigurationSelectOption implements Writeable, ToXContentObject { private final String label; - private final String value; + private final Object value; - private ConfigurationSelectOption(String label, String value) { + private ConfigurationSelectOption(String label, Object value) { this.label = label; this.value = value; } public ConfigurationSelectOption(StreamInput in) throws IOException { this.label = in.readString(); - this.value = in.readString(); + this.value = in.readGenericValue(); } private static final ParseField LABEL_FIELD = new ParseField("label"); @@ -43,12 +45,19 @@ public ConfigurationSelectOption(StreamInput in) throws IOException { private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "connector_configuration_select_option", true, - args -> new ConfigurationSelectOption.Builder().setLabel((String) args[0]).setValue((String) args[1]).build() + args -> new ConfigurationSelectOption.Builder().setLabel((String) args[0]).setValue(args[1]).build() ); static { PARSER.declareString(constructorArg(), LABEL_FIELD); - PARSER.declareString(constructorArg(), VALUE_FIELD); + PARSER.declareField(constructorArg(), (p, c) -> { + if (p.currentToken() == XContentParser.Token.VALUE_STRING) { + return p.text(); + } else if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) { + return p.numberValue(); + } + throw new XContentParseException("Unsupported token [" + p.currentToken() + "]"); + }, VALUE_FIELD, ObjectParser.ValueType.VALUE); } @Override @@ -76,7 +85,7 @@ public static ConfigurationSelectOption fromXContent(XContentParser parser) thro @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(label); - out.writeString(value); + out.writeGenericValue(value); } @Override @@ -95,14 +104,14 @@ public int hashCode() { public static class Builder { private String label; - private String value; + private Object value; public Builder setLabel(String label) { this.label = label; return this; } - public Builder setValue(String value) { + public Builder setValue(Object value) { this.value = value; return this; } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorConfigurationTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorConfigurationTests.java index 3a7ff819ecbf5..caedb526b0b7e 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorConfigurationTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorConfigurationTests.java @@ -89,6 +89,54 @@ public void testToXContent() throws IOException { assertToXContentEquivalent(originalBytes, toXContent(parsed, XContentType.JSON, humanReadable), XContentType.JSON); } + public void testToXContent_WithNumericSelectOptions() throws IOException { + String content = XContentHelper.stripWhitespace(""" + { + "default_value": null, + "depends_on": [ + { + "field": "some_field", + "value": true + } + ], + "display": "textbox", + "label": "Very important field", + "options": [ + { + "label": "five", + "value": 5 + }, + { + "label": "ten", + "value": 10 + } + ], + "order": 4, + "required": true, + "sensitive": false, + "tooltip": "Wow, this tooltip is useful.", + "type": "str", + "ui_restrictions": [], + "validations": [ + { + "constraint": 0, + "type": "greater_than" + } + ], + "value": "" + } + """); + + ConnectorConfiguration configuration = ConnectorConfiguration.fromXContentBytes(new BytesArray(content), XContentType.JSON); + boolean humanReadable = true; + BytesReference originalBytes = toShuffledXContent(configuration, XContentType.JSON, ToXContent.EMPTY_PARAMS, humanReadable); + ConnectorConfiguration parsed; + try (XContentParser parser = createParser(XContentType.JSON.xContent(), originalBytes)) { + parsed = ConnectorConfiguration.fromXContent(parser); + } + assertToXContentEquivalent(originalBytes, toXContent(parsed, XContentType.JSON, humanReadable), XContentType.JSON); + } + public void testToXContentCrawlerConfig_WithNullValue() throws IOException { String content = XContentHelper.stripWhitespace(""" { From 90351ef63903c0ea5453d27b14575dbb2d07e6aa Mon Sep 17 00:00:00 2001 From: David Turner Date: Thu, 4 Apr 2024 14:04:14 +0100 Subject: [PATCH 112/264] Capture hot threads during slow cluster state application (#107087) Today we emit a warning when applying the cluster state takes more than 30s by default. Experience shows that it's almost always one task that exceeds the limit, and usually that task is the `IndicesClusterStateService` (see #89821). This commit adds a `DEBUG` logger that will capture a thread dump if a task is running for longer than a configurable duration so we can get more insight into the cause of the slow execution. --- .../ClusterApplierRecordingService.java | 55 ++++++++-- .../service/ClusterApplierService.java | 21 ++-- .../common/settings/ClusterSettings.java | 1 + .../ClusterApplierRecordingServiceTests.java | 101 ++++++++++++++---- .../ml/integration/AnomalyJobCRUDIT.java | 3 +- .../AutodetectResultProcessorIT.java | 3 +- .../ml/integration/EstablishedMemUsageIT.java | 3 +- .../integration/JobModelSnapshotCRUDIT.java | 3 +- .../ml/integration/JobResultsProviderIT.java | 3 +- .../integration/JobStorageDeletionTaskIT.java | 3 +- ...sportGetTrainedModelsStatsActionTests.java | 3 +- .../ml/datafeed/DatafeedJobBuilderTests.java | 3 +- .../InferenceProcessorFactoryTests.java | 3 +- .../persistence/JobResultsPersisterTests.java | 3 +- .../OpenJobPersistentTasksExecutorTests.java | 1 + .../ResultsPersisterServiceTests.java | 3 +- .../slm/SnapshotLifecycleServiceTests.java | 74 +++++++------ 17 files changed, 201 insertions(+), 85 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierRecordingService.java b/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierRecordingService.java index b1a2726e468e9..e7bd3b938504e 100644 --- a/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierRecordingService.java +++ b/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierRecordingService.java @@ -7,7 +7,14 @@ */ package org.elasticsearch.cluster.service; +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchTimeoutException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.cluster.service.ClusterApplierRecordingService.Stats.Recording; +import org.elasticsearch.common.ReferenceDocs; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -16,6 +23,8 @@ import org.elasticsearch.core.Releasable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; +import org.elasticsearch.monitor.jvm.HotThreads; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; @@ -28,10 +37,11 @@ import java.util.Map; import java.util.Objects; import java.util.Set; -import java.util.function.LongSupplier; public final class ClusterApplierRecordingService { + private static final Logger logger = LogManager.getLogger(ClusterApplierRecordingService.class); + private final Map recordedActions = new HashMap<>(); synchronized Stats getStats() { @@ -59,13 +69,16 @@ synchronized void updateStats(Recorder recorder) { static final class Recorder { private String currentAction; - private long startTimeMS; + private long startMillis; private boolean recording; + private SubscribableListener currentListener; private final List> recordings = new LinkedList<>(); - private final LongSupplier currentTimeSupplier; + private final ThreadPool threadPool; + private final TimeValue debugLoggingTimeout; - Recorder(LongSupplier currentTimeSupplier) { - this.currentTimeSupplier = currentTimeSupplier; + Recorder(ThreadPool threadPool, TimeValue debugLoggingTimeout) { + this.threadPool = threadPool; + this.debugLoggingTimeout = debugLoggingTimeout; } Releasable record(String action) { @@ -75,14 +88,40 @@ Releasable record(String action) { this.recording = true; this.currentAction = action; - this.startTimeMS = currentTimeSupplier.getAsLong(); + this.startMillis = threadPool.rawRelativeTimeInMillis(); + + if (logger.isDebugEnabled()) { + currentListener = new SubscribableListener<>(); + currentListener.addTimeout(debugLoggingTimeout, threadPool, threadPool.generic()); + currentListener.addListener(new ActionListener<>() { + @Override + public void onResponse(Void unused) {} + + @Override + public void onFailure(Exception e) { + assert e instanceof ElasticsearchTimeoutException : e; // didn't complete in time + HotThreads.logLocalHotThreads( + logger, + Level.DEBUG, + "hot threads while applying cluster state [" + currentAction + ']', + ReferenceDocs.LOGGING + ); + } + }); + } + return this::stop; } void stop() { recording = false; - long timeSpentMS = currentTimeSupplier.getAsLong() - this.startTimeMS; - recordings.add(new Tuple<>(currentAction, timeSpentMS)); + long elapsedMillis = threadPool.rawRelativeTimeInMillis() - this.startMillis; + recordings.add(new Tuple<>(currentAction, elapsedMillis)); + + if (currentListener != null) { + currentListener.onResponse(null); + currentListener = null; + } } List> getRecordings() { diff --git a/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java b/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java index 4230838a97592..c2b35adb738f6 100644 --- a/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java +++ b/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java @@ -64,12 +64,20 @@ public class ClusterApplierService extends AbstractLifecycleComponent implements Setting.Property.NodeScope ); + public static final Setting CLUSTER_SERVICE_SLOW_TASK_THREAD_DUMP_TIMEOUT_SETTING = Setting.positiveTimeSetting( + "cluster.service.slow_task_thread_dump_timeout", + TimeValue.timeValueSeconds(30), + Setting.Property.Dynamic, + Setting.Property.NodeScope + ); + public static final String CLUSTER_UPDATE_THREAD_NAME = "clusterApplierService#updateTask"; private final ClusterSettings clusterSettings; private final ThreadPool threadPool; private volatile TimeValue slowTaskLoggingThreshold; + private volatile TimeValue slowTaskThreadDumpTimeout; private volatile PrioritizedEsThreadPoolExecutor threadPoolExecutor; @@ -98,15 +106,8 @@ public ClusterApplierService(String nodeName, Settings settings, ClusterSettings this.nodeName = nodeName; this.recordingService = new ClusterApplierRecordingService(); - this.slowTaskLoggingThreshold = CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING.get(settings); - this.clusterSettings.addSettingsUpdateConsumer( - CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, - this::setSlowTaskLoggingThreshold - ); - } - - private void setSlowTaskLoggingThreshold(TimeValue slowTaskLoggingThreshold) { - this.slowTaskLoggingThreshold = slowTaskLoggingThreshold; + clusterSettings.initializeAndWatch(CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, t -> slowTaskLoggingThreshold = t); + clusterSettings.initializeAndWatch(CLUSTER_SERVICE_SLOW_TASK_THREAD_DUMP_TIMEOUT_SETTING, t -> slowTaskThreadDumpTimeout = t); } public synchronized void setNodeConnectionsService(NodeConnectionsService nodeConnectionsService) { @@ -391,7 +392,7 @@ private void runTask(String source, Function updateF final ClusterState previousClusterState = state.get(); final long startTimeMillis = threadPool.relativeTimeInMillis(); - final Recorder stopWatch = new Recorder(threadPool::rawRelativeTimeInMillis); + final Recorder stopWatch = new Recorder(threadPool, slowTaskThreadDumpTimeout); final ClusterState newClusterState; try { try (Releasable ignored = stopWatch.record("running task [" + source + ']')) { diff --git a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index ac5255f58622a..3493206e00bf6 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -340,6 +340,7 @@ public void apply(Settings value, Settings current, Settings previous) { IndexModule.NODE_STORE_ALLOW_MMAP, IndexSettings.NODE_DEFAULT_REFRESH_INTERVAL_SETTING, ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_THREAD_DUMP_TIMEOUT_SETTING, ClusterService.USER_DEFINED_METADATA, MasterService.MASTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, MasterService.MASTER_SERVICE_STARVATION_LOGGING_THRESHOLD_SETTING, diff --git a/server/src/test/java/org/elasticsearch/cluster/service/ClusterApplierRecordingServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/service/ClusterApplierRecordingServiceTests.java index e4bad62b6834a..be7ca6d2f0616 100644 --- a/server/src/test/java/org/elasticsearch/cluster/service/ClusterApplierRecordingServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/service/ClusterApplierRecordingServiceTests.java @@ -8,11 +8,18 @@ package org.elasticsearch.cluster.service; +import org.apache.logging.log4j.Level; import org.elasticsearch.cluster.service.ClusterApplierRecordingService.Recorder; import org.elasticsearch.cluster.service.ClusterApplierRecordingService.Stats.Recording; +import org.elasticsearch.common.util.concurrent.DeterministicTaskQueue; import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.MockLogAppender; +import org.elasticsearch.test.junit.annotations.TestLogging; +import org.elasticsearch.threadpool.ThreadPool; +import org.junit.Before; import java.util.Map; @@ -20,22 +27,37 @@ public class ClusterApplierRecordingServiceTests extends ESTestCase { + private DeterministicTaskQueue deterministicTaskQueue; + private ThreadPool threadPool; + + @Before + public void createThreadPool() { + deterministicTaskQueue = new DeterministicTaskQueue(); + deterministicTaskQueue.scheduleAt(between(0, 1000000), () -> {}); + deterministicTaskQueue.runAllTasks(); + threadPool = deterministicTaskQueue.getThreadPool(); + } + + private void advanceTime(long millis) { + deterministicTaskQueue.scheduleAt(deterministicTaskQueue.getCurrentTimeMillis() + millis, () -> {}); + deterministicTaskQueue.runAllTasks(); + } + public void testRecorder() { - long[] currentTime = new long[1]; - var recorder = new Recorder(() -> currentTime[0]); + var recorder = new Recorder(threadPool, TimeValue.ZERO); { Releasable releasable = recorder.record("action1"); - currentTime[0] = 5; + advanceTime(5); releasable.close(); } { Releasable releasable = recorder.record("action2"); - currentTime[0] = 42; + advanceTime(37); releasable.close(); } { Releasable releasable = recorder.record("action3"); - currentTime[0] = 45; + advanceTime(3); releasable.close(); } @@ -44,8 +66,8 @@ public void testRecorder() { } public void testRecorderAlreadyRecording() { - var recorder = new Recorder(() -> 1L); - Releasable releasable = recorder.record("action1"); + var recorder = new Recorder(threadPool, TimeValue.ZERO); + Releasable ignored = recorder.record("action1"); expectThrows(IllegalStateException.class, () -> recorder.record("action2")); } @@ -53,16 +75,15 @@ public void testRecordingServiceStats() { var service = new ClusterApplierRecordingService(); { - long[] currentTime = new long[1]; - var recorder = new Recorder(() -> currentTime[0]); + var recorder = new Recorder(threadPool, TimeValue.ZERO); try (var r = recorder.record("action1")) { - currentTime[0] = 5; + advanceTime(5); } try (var r = recorder.record("action2")) { - currentTime[0] = 42; + advanceTime(37); } try (var r = recorder.record("action3")) { - currentTime[0] = 45; + advanceTime(3); } service.updateStats(recorder); var stats = service.getStats(); @@ -76,16 +97,15 @@ public void testRecordingServiceStats() { ); } { - long[] currentTime = new long[1]; - var recorder = new Recorder(() -> currentTime[0]); + var recorder = new Recorder(threadPool, TimeValue.ZERO); try (var r = recorder.record("action1")) { - currentTime[0] = 3; + advanceTime(3); } try (var r = recorder.record("action2")) { - currentTime[0] = 35; + advanceTime(32); } try (var r = recorder.record("action3")) { - currentTime[0] = 41; + advanceTime(6); } service.updateStats(recorder); var stats = service.getStats(); @@ -99,13 +119,12 @@ public void testRecordingServiceStats() { ); } { - long[] currentTime = new long[1]; - var recorder = new Recorder(() -> currentTime[0]); + var recorder = new Recorder(threadPool, TimeValue.ZERO); try (var r = recorder.record("action1")) { - currentTime[0] = 2; + advanceTime(2); } try (var r = recorder.record("action3")) { - currentTime[0] = 6; + advanceTime(4); } service.updateStats(recorder); var stats = service.getStats(); @@ -116,4 +135,44 @@ public void testRecordingServiceStats() { } } + @TestLogging(reason = "testing debug logging", value = "org.elasticsearch.cluster.service.ClusterApplierRecordingService:DEBUG") + public void testSlowTaskDebugLogging() { + final var debugLoggingTimeout = TimeValue.timeValueMillis(between(1, 100000)); + var recorder = new Recorder(threadPool, debugLoggingTimeout); + + // ensure hot threads is logged if the action is too slow + var slowAction = recorder.record("slow_action"); + deterministicTaskQueue.scheduleAt( + deterministicTaskQueue.getCurrentTimeMillis() + debugLoggingTimeout.millis() + between(1, 1000), + slowAction::close + ); + MockLogAppender.assertThatLogger( + deterministicTaskQueue::runAllTasksInTimeOrder, + ClusterApplierRecordingService.class, + new MockLogAppender.SeenEventExpectation( + "hot threads", + ClusterApplierRecordingService.class.getCanonicalName(), + Level.DEBUG, + "hot threads while applying cluster state [slow_action]" + ) + ); + + // ensure hot threads is _NOT_ logged if the action completes quickly enough + var fastAction = recorder.record("fast_action"); + deterministicTaskQueue.scheduleAt( + randomLongBetween(0, deterministicTaskQueue.getCurrentTimeMillis() + debugLoggingTimeout.millis() - 1), + fastAction::close + ); + MockLogAppender.assertThatLogger( + deterministicTaskQueue::runAllTasksInTimeOrder, + ClusterApplierRecordingService.class, + new MockLogAppender.UnseenEventExpectation( + "hot threads", + ClusterApplierRecordingService.class.getCanonicalName(), + Level.DEBUG, + "*" + ) + ); + } + } diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AnomalyJobCRUDIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AnomalyJobCRUDIT.java index 35d99b97f99c9..08fda90f9fd73 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AnomalyJobCRUDIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AnomalyJobCRUDIT.java @@ -68,7 +68,8 @@ public void createComponents() throws Exception { OperationRouting.USE_ADAPTIVE_REPLICA_SELECTION_SETTING, ResultsPersisterService.PERSIST_RESULTS_MAX_RETRIES, ClusterService.USER_DEFINED_METADATA, - ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_THREAD_DUMP_TIMEOUT_SETTING ) ) ); diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java index c24c1c1becb18..6cb467af525c9 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java @@ -165,7 +165,8 @@ public void createComponents() throws Exception { OperationRouting.USE_ADAPTIVE_REPLICA_SELECTION_SETTING, ClusterService.USER_DEFINED_METADATA, ResultsPersisterService.PERSIST_RESULTS_MAX_RETRIES, - ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_THREAD_DUMP_TIMEOUT_SETTING ) ) ); diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/EstablishedMemUsageIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/EstablishedMemUsageIT.java index e09df368ecbdc..96b4aea1a55b9 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/EstablishedMemUsageIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/EstablishedMemUsageIT.java @@ -56,7 +56,8 @@ public void createComponents() { ResultsPersisterService.PERSIST_RESULTS_MAX_RETRIES, OperationRouting.USE_ADAPTIVE_REPLICA_SELECTION_SETTING, ClusterService.USER_DEFINED_METADATA, - ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_THREAD_DUMP_TIMEOUT_SETTING ) ) ); diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobModelSnapshotCRUDIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobModelSnapshotCRUDIT.java index 1538d7a94fb81..dbc8ec3f99a97 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobModelSnapshotCRUDIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobModelSnapshotCRUDIT.java @@ -68,7 +68,8 @@ public void createComponents() throws Exception { OperationRouting.USE_ADAPTIVE_REPLICA_SELECTION_SETTING, ResultsPersisterService.PERSIST_RESULTS_MAX_RETRIES, ClusterService.USER_DEFINED_METADATA, - ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_THREAD_DUMP_TIMEOUT_SETTING ) ) ); diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java index b4ffe46e6ea92..ae128b507c795 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java @@ -128,7 +128,8 @@ public void createComponents() throws Exception { OperationRouting.USE_ADAPTIVE_REPLICA_SELECTION_SETTING, ResultsPersisterService.PERSIST_RESULTS_MAX_RETRIES, ClusterService.USER_DEFINED_METADATA, - ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_THREAD_DUMP_TIMEOUT_SETTING ) ) ); diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobStorageDeletionTaskIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobStorageDeletionTaskIT.java index b7bd8fed3e83c..4493a680d25cf 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobStorageDeletionTaskIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobStorageDeletionTaskIT.java @@ -72,7 +72,8 @@ public void createComponents() { ResultsPersisterService.PERSIST_RESULTS_MAX_RETRIES, OperationRouting.USE_ADAPTIVE_REPLICA_SELECTION_SETTING, ClusterService.USER_DEFINED_METADATA, - ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_THREAD_DUMP_TIMEOUT_SETTING ) ) ); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsActionTests.java index 5c2c3abf232f5..442c0095b3001 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsActionTests.java @@ -121,7 +121,8 @@ public void setUpVariables() { MasterService.MASTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, OperationRouting.USE_ADAPTIVE_REPLICA_SELECTION_SETTING, ClusterService.USER_DEFINED_METADATA, - ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_THREAD_DUMP_TIMEOUT_SETTING ) ) ); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobBuilderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobBuilderTests.java index be768e1a01e20..98bcb5d7f0d8e 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobBuilderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobBuilderTests.java @@ -82,7 +82,8 @@ public void init() { MasterService.MASTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, OperationRouting.USE_ADAPTIVE_REPLICA_SELECTION_SETTING, ClusterService.USER_DEFINED_METADATA, - ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_THREAD_DUMP_TIMEOUT_SETTING ) ) ); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessorFactoryTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessorFactoryTests.java index 0698c266400b0..9adbb3b3dd89a 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessorFactoryTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessorFactoryTests.java @@ -98,7 +98,8 @@ public void setUpVariables() { MasterService.MASTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, OperationRouting.USE_ADAPTIVE_REPLICA_SELECTION_SETTING, ClusterService.USER_DEFINED_METADATA, - ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_THREAD_DUMP_TIMEOUT_SETTING ) ) ); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java index 0a7fc75115d2a..7b0d9d3051dcc 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java @@ -436,7 +436,8 @@ private ResultsPersisterService buildResultsPersisterService(OriginSettingClient OperationRouting.USE_ADAPTIVE_REPLICA_SELECTION_SETTING, ResultsPersisterService.PERSIST_RESULTS_MAX_RETRIES, ClusterService.USER_DEFINED_METADATA, - ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_THREAD_DUMP_TIMEOUT_SETTING ) ) ); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutorTests.java index c3db184759d3f..0440a66bdbcaa 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutorTests.java @@ -100,6 +100,7 @@ public void setUpMocks() { OperationRouting.USE_ADAPTIVE_REPLICA_SELECTION_SETTING, ClusterService.USER_DEFINED_METADATA, ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_THREAD_DUMP_TIMEOUT_SETTING, MachineLearning.CONCURRENT_JOB_ALLOCATIONS, MachineLearning.MAX_MACHINE_MEMORY_PERCENT, MachineLearning.MAX_LAZY_ML_NODES, diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterServiceTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterServiceTests.java index 7aaeabac3af8b..2acf2e3da3cf6 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterServiceTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterServiceTests.java @@ -408,7 +408,8 @@ public static ResultsPersisterService buildResultsPersisterService(OriginSetting OperationRouting.USE_ADAPTIVE_REPLICA_SELECTION_SETTING, ClusterService.USER_DEFINED_METADATA, ResultsPersisterService.PERSIST_RESULTS_MAX_RETRIES, - ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_THREAD_DUMP_TIMEOUT_SETTING ) ) ); diff --git a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleServiceTests.java b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleServiceTests.java index 2013a8ff53301..b65f3587ffbf0 100644 --- a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleServiceTests.java +++ b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleServiceTests.java @@ -443,43 +443,47 @@ public void testValidateMinimumInterval() { public void testStoppedPriority() { ClockMock clock = new ClockMock(); ThreadPool threadPool = new TestThreadPool("name"); - ClusterSettings clusterSettings = new ClusterSettings( - Settings.EMPTY, - new HashSet<>( - Arrays.asList( - MasterService.MASTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, - OperationRouting.USE_ADAPTIVE_REPLICA_SELECTION_SETTING, - ClusterService.USER_DEFINED_METADATA, - ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING + try { + ClusterSettings clusterSettings = new ClusterSettings( + Settings.EMPTY, + new HashSet<>( + Arrays.asList( + MasterService.MASTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, + OperationRouting.USE_ADAPTIVE_REPLICA_SELECTION_SETTING, + ClusterService.USER_DEFINED_METADATA, + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_THREAD_DUMP_TIMEOUT_SETTING + ) ) - ) - ); - final SetOnce task = new SetOnce<>(); - ClusterService fakeService = new ClusterService(Settings.EMPTY, clusterSettings, threadPool, null) { - @Override - public void submitUnbatchedStateUpdateTask(String source, ClusterStateUpdateTask updateTask) { - logger.info("--> got task: [source: {}]: {}", source, updateTask); - if (updateTask instanceof OperationModeUpdateTask operationModeUpdateTask) { - task.set(operationModeUpdateTask); + ); + final SetOnce task = new SetOnce<>(); + ClusterService fakeService = new ClusterService(Settings.EMPTY, clusterSettings, threadPool, null) { + @Override + public void submitUnbatchedStateUpdateTask(String source, ClusterStateUpdateTask updateTask) { + logger.info("--> got task: [source: {}]: {}", source, updateTask); + if (updateTask instanceof OperationModeUpdateTask operationModeUpdateTask) { + task.set(operationModeUpdateTask); + } } - } - }; - - SnapshotLifecycleService service = new SnapshotLifecycleService( - Settings.EMPTY, - () -> new SnapshotLifecycleTask(null, null, null), - fakeService, - clock - ); - ClusterState state = createState( - new SnapshotLifecycleMetadata(Map.of(), OperationMode.STOPPING, new SnapshotLifecycleStats(0, 0, 0, 0, Map.of())), - true - ); - service.clusterChanged(new ClusterChangedEvent("blah", state, ClusterState.EMPTY_STATE)); - assertEquals(task.get().priority(), Priority.IMMEDIATE); - assertNull(task.get().getILMOperationMode()); - assertEquals(task.get().getSLMOperationMode(), OperationMode.STOPPED); - threadPool.shutdownNow(); + }; + + SnapshotLifecycleService service = new SnapshotLifecycleService( + Settings.EMPTY, + () -> new SnapshotLifecycleTask(null, null, null), + fakeService, + clock + ); + ClusterState state = createState( + new SnapshotLifecycleMetadata(Map.of(), OperationMode.STOPPING, new SnapshotLifecycleStats(0, 0, 0, 0, Map.of())), + true + ); + service.clusterChanged(new ClusterChangedEvent("blah", state, ClusterState.EMPTY_STATE)); + assertEquals(task.get().priority(), Priority.IMMEDIATE); + assertNull(task.get().getILMOperationMode()); + assertEquals(task.get().getSLMOperationMode(), OperationMode.STOPPED); + } finally { + ThreadPool.terminate(threadPool, 10, TimeUnit.SECONDS); + } } class FakeSnapshotTask extends SnapshotLifecycleTask { From 43d0ef94cb5f5104c3b5a1cea363217b140bbb81 Mon Sep 17 00:00:00 2001 From: David Turner Date: Thu, 4 Apr 2024 14:05:28 +0100 Subject: [PATCH 113/264] AwaitsFix for #107043 --- .../admin/cluster/node/tasks/TransportTasksActionTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java index 7168b2c1edcdd..9ddcf8a596226 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java @@ -563,6 +563,7 @@ public void testCancellingTasksThatDontSupportCancellation() throws Exception { responseLatch.await(10, TimeUnit.SECONDS); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107043") public void testFailedTasksCount() throws Exception { Settings settings = Settings.builder().put(MockTaskManager.USE_MOCK_TASK_MANAGER_SETTING.getKey(), true).build(); setupTestNodes(settings); From f30b79e9727ab88b0dbb9d2c013a0554ad73cc69 Mon Sep 17 00:00:00 2001 From: Jedr Blaszyk Date: Thu, 4 Apr 2024 15:19:18 +0200 Subject: [PATCH 114/264] [Connector API] Support updating single schedule type (full, incremental or access_control) (#107090) --- .../331_connector_update_scheduling.yml | 35 +++++++++++++--- .../connector/ConnectorScheduling.java | 42 +++++++++++++++---- .../UpdateConnectorSchedulingAction.java | 9 ++++ .../connector/ConnectorIndexServiceTests.java | 38 +++++++++++++++++ .../connector/ConnectorTestUtils.java | 2 +- 5 files changed, 111 insertions(+), 15 deletions(-) diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/331_connector_update_scheduling.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/331_connector_update_scheduling.yml index e8e3fa0e87068..dd74fa7e27c2f 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/331_connector_update_scheduling.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/331_connector_update_scheduling.yml @@ -44,6 +44,34 @@ setup: - match: { scheduling.incremental.enabled: false } - match: { scheduling.incremental.interval: "3 0 0 * * ?" } + +--- +"Update Connector Scheduling - Update single schedule only": + - do: + connector.update_scheduling: + connector_id: test-connector + body: + scheduling: + incremental: + enabled: true + interval: 3 0 0 * * ? + + + - match: { result: updated } + + - do: + connector.get: + connector_id: test-connector + + - match: { scheduling.incremental.enabled: true } + - match: { scheduling.incremental.interval: "3 0 0 * * ?" } + + # Other schedules are unchanged (those are defaults when connector is created) + - match: { scheduling.full.enabled: false } + - match: { scheduling.full.interval: "0 0 0 * * ?" } + - match: { scheduling.access_control.enabled: false } + - match: { scheduling.access_control.interval: "0 0 0 * * ?" } + --- "Update Connector Scheduling - Connector doesn't exist": - do: @@ -63,16 +91,13 @@ setup: interval: 3 0 0 * * ? --- -"Update Connector Scheduling - Required fields are missing": +"Update Connector Scheduling - Schedules are missing": - do: catch: "bad_request" connector.update_scheduling: connector_id: test-connector body: - scheduling: - incremental: - enabled: false - interval: 3 0 0 * * ? + scheduling: {} --- "Update Connector Scheduling - Wrong CRON expression": diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorScheduling.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorScheduling.java index 98b6bdf1f3250..3c08a5ac1e218 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorScheduling.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorScheduling.java @@ -27,6 +27,7 @@ import java.util.Objects; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; public class ConnectorScheduling implements Writeable, ToXContentObject { @@ -45,9 +46,9 @@ public class ConnectorScheduling implements Writeable, ToXContentObject { * @param incremental connector incremental sync schedule represented as {@link ScheduleConfig} */ private ConnectorScheduling(ScheduleConfig accessControl, ScheduleConfig full, ScheduleConfig incremental) { - this.accessControl = Objects.requireNonNull(accessControl, ACCESS_CONTROL_FIELD.getPreferredName()); - this.full = Objects.requireNonNull(full, FULL_FIELD.getPreferredName()); - this.incremental = Objects.requireNonNull(incremental, INCREMENTAL_FIELD.getPreferredName()); + this.accessControl = accessControl; + this.full = full; + this.incremental = incremental; } public ConnectorScheduling(StreamInput in) throws IOException { @@ -56,6 +57,18 @@ public ConnectorScheduling(StreamInput in) throws IOException { this.incremental = new ScheduleConfig(in); } + public ScheduleConfig getAccessControl() { + return accessControl; + } + + public ScheduleConfig getFull() { + return full; + } + + public ScheduleConfig getIncremental() { + return incremental; + } + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "connector_scheduling", true, @@ -67,13 +80,18 @@ public ConnectorScheduling(StreamInput in) throws IOException { static { PARSER.declareField( - constructorArg(), + optionalConstructorArg(), (p, c) -> ScheduleConfig.fromXContent(p), ACCESS_CONTROL_FIELD, ObjectParser.ValueType.OBJECT ); - PARSER.declareField(constructorArg(), (p, c) -> ScheduleConfig.fromXContent(p), FULL_FIELD, ObjectParser.ValueType.OBJECT); - PARSER.declareField(constructorArg(), (p, c) -> ScheduleConfig.fromXContent(p), INCREMENTAL_FIELD, ObjectParser.ValueType.OBJECT); + PARSER.declareField(optionalConstructorArg(), (p, c) -> ScheduleConfig.fromXContent(p), FULL_FIELD, ObjectParser.ValueType.OBJECT); + PARSER.declareField( + optionalConstructorArg(), + (p, c) -> ScheduleConfig.fromXContent(p), + INCREMENTAL_FIELD, + ObjectParser.ValueType.OBJECT + ); } public static ConnectorScheduling fromXContentBytes(BytesReference source, XContentType xContentType) { @@ -92,9 +110,15 @@ public static ConnectorScheduling fromXContent(XContentParser parser) throws IOE public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); { - builder.field(ACCESS_CONTROL_FIELD.getPreferredName(), accessControl); - builder.field(FULL_FIELD.getPreferredName(), full); - builder.field(INCREMENTAL_FIELD.getPreferredName(), incremental); + if (accessControl != null) { + builder.field(ACCESS_CONTROL_FIELD.getPreferredName(), accessControl); + } + if (full != null) { + builder.field(FULL_FIELD.getPreferredName(), full); + } + if (incremental != null) { + builder.field(INCREMENTAL_FIELD.getPreferredName(), incremental); + } } builder.endObject(); return builder; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorSchedulingAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorSchedulingAction.java index b7d3c0c2e5d10..578639f065a0b 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorSchedulingAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorSchedulingAction.java @@ -73,6 +73,15 @@ public ActionRequestValidationException validate() { validationException = addValidationError("[scheduling] cannot be [null].", validationException); } + if (Objects.isNull(scheduling.getFull()) + && Objects.isNull(scheduling.getIncremental()) + && Objects.isNull(scheduling.getIncremental())) { + validationException = addValidationError( + "[scheduling] object needs to define at least one schedule type: [full | incremental | access_control]", + validationException + ); + } + return validationException; } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java index 601c1597a39b0..00cc08a3b0bb7 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java @@ -54,6 +54,7 @@ import java.util.stream.Collectors; import java.util.stream.IntStream; +import static org.elasticsearch.xpack.application.connector.ConnectorTestUtils.getRandomCronExpression; import static org.hamcrest.CoreMatchers.anyOf; import static org.hamcrest.CoreMatchers.equalTo; @@ -320,6 +321,43 @@ public void testUpdateConnectorScheduling() throws Exception { assertThat(updatedScheduling, equalTo(indexedConnector.getScheduling())); } + public void testUpdateConnectorScheduling_OnlyFullSchedule() throws Exception { + Connector connector = ConnectorTestUtils.getRandomConnector(); + String connectorId = randomUUID(); + + DocWriteResponse resp = buildRequestAndAwaitPutConnector(connectorId, connector); + assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); + + // Update scheduling for full, incremental and access_control + ConnectorScheduling initialScheduling = ConnectorTestUtils.getRandomConnectorScheduling(); + UpdateConnectorSchedulingAction.Request updateSchedulingRequest = new UpdateConnectorSchedulingAction.Request( + connectorId, + initialScheduling + ); + DocWriteResponse updateResponse = awaitUpdateConnectorScheduling(updateSchedulingRequest); + assertThat(updateResponse.status(), equalTo(RestStatus.OK)); + + // Update full scheduling only + ConnectorScheduling.ScheduleConfig fullSyncSchedule = new ConnectorScheduling.ScheduleConfig.Builder().setEnabled(randomBoolean()) + .setInterval(getRandomCronExpression()) + .build(); + + UpdateConnectorSchedulingAction.Request updateSchedulingRequestWithFullSchedule = new UpdateConnectorSchedulingAction.Request( + connectorId, + new ConnectorScheduling.Builder().setFull(fullSyncSchedule).build() + ); + + updateResponse = awaitUpdateConnectorScheduling(updateSchedulingRequestWithFullSchedule); + assertThat(updateResponse.status(), equalTo(RestStatus.OK)); + + Connector indexedConnector = awaitGetConnector(connectorId); + // Assert that full schedule is updated + assertThat(fullSyncSchedule, equalTo(indexedConnector.getScheduling().getFull())); + // Assert that other schedules stay unchanged + assertThat(initialScheduling.getAccessControl(), equalTo(indexedConnector.getScheduling().getAccessControl())); + assertThat(initialScheduling.getIncremental(), equalTo(indexedConnector.getScheduling().getIncremental())); + } + public void testUpdateConnectorIndexName() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); String connectorId = randomUUID(); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java index 0eade25eaa03f..48168c2f45827 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java @@ -325,7 +325,7 @@ private static ConnectorFeatures.SyncRulesFeatures randomSyncRulesFeatures() { /** * Second (0 - 59) Minute (0 - 59) Hour (0 - 23) Day of month (1 - 31) Month (1 - 12) */ - private static Cron getRandomCronExpression() { + public static Cron getRandomCronExpression() { return new Cron( String.format( Locale.ROOT, From 29a3256a7dc6419a40ba5b156eaa295af41be869 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Thu, 4 Apr 2024 15:42:03 +0200 Subject: [PATCH 115/264] [DOCS] Makes Inference APIs main page more informative. (#107100) --- .../inference/delete-inference.asciidoc | 14 +++++++------ .../inference/get-inference.asciidoc | 20 ++++++++++--------- .../inference/inference-apis.asciidoc | 14 ++++++++----- .../inference/post-inference.asciidoc | 17 +++++++++------- .../inference/put-inference.asciidoc | 6 +++--- 5 files changed, 41 insertions(+), 30 deletions(-) diff --git a/docs/reference/inference/delete-inference.asciidoc b/docs/reference/inference/delete-inference.asciidoc index 5b693f51d65da..72f752563491b 100644 --- a/docs/reference/inference/delete-inference.asciidoc +++ b/docs/reference/inference/delete-inference.asciidoc @@ -4,12 +4,14 @@ experimental[] -Deletes an {infer} model deployment. - -IMPORTANT: The {infer} APIs enable you to use certain services, such as ELSER, -OpenAI, or Hugging Face, in your cluster. This is not the same feature that you -can use on an ML node with custom {ml} models. If you want to train and use your -own model, use the <>. +Deletes an {infer} endpoint. + +IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in +{ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, or +Hugging Face. For built-in models and models uploaded though Eland, the {infer} +APIs offer an alternative way to use and manage trained models. However, if you +do not plan to use the {infer} APIs to use these models or if you want to use +non-NLP models, use the <>. [discrete] diff --git a/docs/reference/inference/get-inference.asciidoc b/docs/reference/inference/get-inference.asciidoc index 1a11904a169ca..2cfc17a3b6203 100644 --- a/docs/reference/inference/get-inference.asciidoc +++ b/docs/reference/inference/get-inference.asciidoc @@ -4,12 +4,14 @@ experimental[] -Retrieves {infer} model information. +Retrieves {infer} endpoint information. -IMPORTANT: The {infer} APIs enable you to use certain services, such as ELSER, -OpenAI, or Hugging Face, in your cluster. This is not the same feature that you -can use on an ML node with custom {ml} models. If you want to train and use your -own model, use the <>. +IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in +{ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, or +Hugging Face. For built-in models and models uploaded though Eland, the {infer} +APIs offer an alternative way to use and manage trained models. However, if you +do not plan to use the {infer} APIs to use these models or if you want to use +non-NLP models, use the <>. [discrete] @@ -37,10 +39,10 @@ own model, use the <>. You can get information in a single API request for: -* a single {infer} model by providing the task type and the model ID, -* all of the {infer} models for a certain task type by providing the task type -and a wildcard expression, -* all of the {infer} models by using a wildcard expression. +* a single {infer} endpoint by providing the task type and the {infer} ID, +* all of the {infer} endpoints for a certain task type by providing the task +type and a wildcard expression, +* all of the {infer} endpoints by using a wildcard expression. [discrete] diff --git a/docs/reference/inference/inference-apis.asciidoc b/docs/reference/inference/inference-apis.asciidoc index cdc6bfe254ea2..d700a396e08bf 100644 --- a/docs/reference/inference/inference-apis.asciidoc +++ b/docs/reference/inference/inference-apis.asciidoc @@ -4,12 +4,16 @@ experimental[] -IMPORTANT: The {infer} APIs enable you to use certain services, such as ELSER, -OpenAI, or Hugging Face, in your cluster. This is not the same feature that you -can use on an ML node with custom {ml} models. If you want to train and use your -own model, use the <>. +IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in +{ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, or +Hugging Face. For built-in models and models uploaded though Eland, the {infer} +APIs offer an alternative way to use and manage trained models. However, if you +do not plan to use the {infer} APIs to use these models or if you want to use +non-NLP models, use the <>. -You can use the following APIs to manage {infer} models and perform {infer}: +The {infer} APIs enable you to create {infer} endpoints and use {ml} models of +different providers - such as Cohere, OpenAI, or HuggingFace - as a service. Use +the following APIs to manage {infer} models and perform {infer}: * <> * <> diff --git a/docs/reference/inference/post-inference.asciidoc b/docs/reference/inference/post-inference.asciidoc index 08a58d7789e33..5b78af905b095 100644 --- a/docs/reference/inference/post-inference.asciidoc +++ b/docs/reference/inference/post-inference.asciidoc @@ -4,12 +4,14 @@ experimental[] -Performs an inference task on an input text by using an {infer} model. +Performs an inference task on an input text by using an {infer} endpoint. -IMPORTANT: The {infer} APIs enable you to use certain services, such as ELSER, -OpenAI, or Hugging Face, in your cluster. This is not the same feature that you -can use on an ML node with custom {ml} models. If you want to train and use your -own model, use the <>. +IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in +{ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, or +Hugging Face. For built-in models and models uploaded though Eland, the {infer} +APIs offer an alternative way to use and manage trained models. However, if you +do not plan to use the {infer} APIs to use these models or if you want to use +non-NLP models, use the <>. [discrete] @@ -34,8 +36,9 @@ own model, use the <>. The perform {infer} API enables you to use {ml} models to perform specific tasks on data that you provide as an input. The API returns a response with the -results of the tasks. The {infer} model you use can perform one specific task -that has been defined when the model was created with the <>. +results of the tasks. The {infer} endpoint you use can perform one specific task +that has been defined when the endpoint was created with the +<>. [discrete] diff --git a/docs/reference/inference/put-inference.asciidoc b/docs/reference/inference/put-inference.asciidoc index 110ec9d6fa98c..87a865b9487e5 100644 --- a/docs/reference/inference/put-inference.asciidoc +++ b/docs/reference/inference/put-inference.asciidoc @@ -4,7 +4,7 @@ experimental[] -Creates a model to perform an {infer} task. +Creates an {infer} endpoint to perform an {infer} task. IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in {ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, or @@ -33,8 +33,8 @@ or if you want to use non-NLP models, use the <>. [[put-inference-api-desc]] ==== {api-description-title} -The create {infer} API enables you to create and configure a {ml} model to -perform a specific {infer} task. +The create {infer} API enables you to create an {infer} endpoint and configure a +{ml} model to perform a specific {infer} task. The following services are available through the {infer} API: From 87a995377efafd56d28ec6acf1dcefd2ca1e8ec5 Mon Sep 17 00:00:00 2001 From: Volodymyr Krasnikov <129072588+volodk85@users.noreply.github.com> Date: Thu, 4 Apr 2024 07:11:47 -0700 Subject: [PATCH 116/264] Unmute testUniDirectionalIndexFollowing (#106886) --- .../java/org/elasticsearch/upgrades/CcrRollingUpgradeIT.java | 1 - 1 file changed, 1 deletion(-) diff --git a/x-pack/qa/rolling-upgrade-multi-cluster/src/test/java/org/elasticsearch/upgrades/CcrRollingUpgradeIT.java b/x-pack/qa/rolling-upgrade-multi-cluster/src/test/java/org/elasticsearch/upgrades/CcrRollingUpgradeIT.java index db30f7cb98b02..68ebb43d607a1 100644 --- a/x-pack/qa/rolling-upgrade-multi-cluster/src/test/java/org/elasticsearch/upgrades/CcrRollingUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade-multi-cluster/src/test/java/org/elasticsearch/upgrades/CcrRollingUpgradeIT.java @@ -28,7 +28,6 @@ public class CcrRollingUpgradeIT extends AbstractMultiClusterUpgradeTestCase { - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102000") public void testUniDirectionalIndexFollowing() throws Exception { logger.info("clusterName={}, upgradeState={}", clusterName, upgradeState); From da28e76cccca610b7ed5c77ceb75892f5dda659b Mon Sep 17 00:00:00 2001 From: Andrei Dan Date: Thu, 4 Apr 2024 17:20:14 +0300 Subject: [PATCH 117/264] Default `data_streams.auto_sharding.excludes` to NO exclusions (`[]`) (#107091) --- .../datastreams/DataStreamAutoshardingIT.java | 7 +------ .../autosharding/DataStreamAutoShardingService.java | 2 +- .../DataStreamAutoShardingServiceTests.java | 10 ++-------- 3 files changed, 4 insertions(+), 15 deletions(-) diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamAutoshardingIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamAutoshardingIT.java index 9f2e6feb91659..f7743ebac9caf 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamAutoshardingIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamAutoshardingIT.java @@ -84,7 +84,6 @@ protected Collection> nodePlugins() { public void configureClusterSettings() { updateClusterSettings( Settings.builder() - .putList(DataStreamAutoShardingService.DATA_STREAMS_AUTO_SHARDING_EXCLUDES_SETTING.getKey(), List.of()) // we want to manually trigger the rollovers in this test suite to be able to assert incrementally the changes in shard // configurations .put(DataStreamLifecycleService.DATA_STREAM_LIFECYCLE_POLL_INTERVAL, "30d") @@ -93,11 +92,7 @@ public void configureClusterSettings() { @After public void resetClusterSetting() { - updateClusterSettings( - Settings.builder() - .putNull(DataStreamAutoShardingService.DATA_STREAMS_AUTO_SHARDING_EXCLUDES_SETTING.getKey()) - .putNull(DataStreamLifecycleService.DATA_STREAM_LIFECYCLE_POLL_INTERVAL) - ); + updateClusterSettings(Settings.builder().putNull(DataStreamLifecycleService.DATA_STREAM_LIFECYCLE_POLL_INTERVAL)); } public void testRolloverOnAutoShardCondition() throws Exception { diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingService.java b/server/src/main/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingService.java index a26be73cc169d..da1c85834d2c8 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingService.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingService.java @@ -46,7 +46,7 @@ public class DataStreamAutoShardingService { public static final Setting> DATA_STREAMS_AUTO_SHARDING_EXCLUDES_SETTING = Setting.listSetting( "data_streams.auto_sharding.excludes", - List.of("*"), + List.of(), Function.identity(), Setting.Property.Dynamic, Setting.Property.NodeScope diff --git a/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java b/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java index 41a5d0b70ea10..0d1104279d3ce 100644 --- a/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java +++ b/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java @@ -77,10 +77,7 @@ public void setupService() { clusterService = createClusterService(threadPool, clusterSettings); now = System.currentTimeMillis(); service = new DataStreamAutoShardingService( - Settings.builder() - .put(DataStreamAutoShardingService.DATA_STREAMS_AUTO_SHARDING_ENABLED, true) - .putList(DataStreamAutoShardingService.DATA_STREAMS_AUTO_SHARDING_EXCLUDES_SETTING.getKey(), List.of()) - .build(), + Settings.builder().put(DataStreamAutoShardingService.DATA_STREAMS_AUTO_SHARDING_ENABLED, true).build(), clusterService, new FeatureService(List.of(new FeatureSpecification() { @Override @@ -147,10 +144,7 @@ public Set getFeatures() { ClusterState stateNoFeature = ClusterState.builder(ClusterName.DEFAULT).metadata(Metadata.builder()).build(); DataStreamAutoShardingService noFeatureService = new DataStreamAutoShardingService( - Settings.builder() - .put(DataStreamAutoShardingService.DATA_STREAMS_AUTO_SHARDING_ENABLED, true) - .putList(DataStreamAutoShardingService.DATA_STREAMS_AUTO_SHARDING_EXCLUDES_SETTING.getKey(), List.of()) - .build(), + Settings.builder().put(DataStreamAutoShardingService.DATA_STREAMS_AUTO_SHARDING_ENABLED, true).build(), clusterService, new FeatureService(List.of()), () -> now From abfb0ae7b3bb7ca2fcc426e7e1482ef8d042a8f9 Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Thu, 4 Apr 2024 16:24:33 +0200 Subject: [PATCH 118/264] ESQL: Fix treating all fields as MV in COUNT pushdown (#106720) Fix a mistake in #106690 that accidentally prevented COUNT(field) from being pushed down in case field is single-valued. Add test to avoid future regressions. --- docs/changelog/106720.yaml | 5 ++ .../xpack/esql/qa/rest/RestEsqlTestCase.java | 9 +-- .../xpack/esql/EsqlTestUtils.java | 11 +++ .../xpack/esql/stats/SearchStats.java | 3 +- .../LocalPhysicalPlanOptimizerTests.java | 74 ++++++++++++++++++- 5 files changed, 93 insertions(+), 9 deletions(-) create mode 100644 docs/changelog/106720.yaml diff --git a/docs/changelog/106720.yaml b/docs/changelog/106720.yaml new file mode 100644 index 0000000000000..93358ed1d3dff --- /dev/null +++ b/docs/changelog/106720.yaml @@ -0,0 +1,5 @@ +pr: 106720 +summary: "ESQL: Fix treating all fields as MV in COUNT pushdown" +area: ES|QL +type: bug +issues: [] diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java index 301b30df5647a..b67432f491cf3 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java @@ -30,6 +30,7 @@ import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.junit.After; import org.junit.Before; @@ -79,12 +80,8 @@ public abstract class RestEsqlTestCase extends ESRestTestCase { private static final String MAPPING_ALL_TYPES; static { - try (InputStream mappingPropertiesStream = RestEsqlTestCase.class.getResourceAsStream("/mapping-all-types.json")) { - String properties = new String(mappingPropertiesStream.readAllBytes(), StandardCharsets.UTF_8); - MAPPING_ALL_TYPES = "{\"mappings\": " + properties + "}"; - } catch (IOException ex) { - throw new RuntimeException(ex); - } + String properties = EsqlTestUtils.loadUtf8TextFile("/mapping-all-types.json"); + MAPPING_ALL_TYPES = "{\"mappings\": " + properties + "}"; } private static final String DOCUMENT_TEMPLATE = """ diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java index 8c5c79b98767e..fc8f80a19f09f 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java @@ -34,6 +34,9 @@ import org.elasticsearch.xpack.ql.util.StringUtils; import org.junit.Assert; +import java.io.IOException; +import java.io.InputStream; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Iterator; import java.util.List; @@ -146,6 +149,14 @@ public static Map loadMapping(String name) { return TypesTests.loadMapping(EsqlDataTypeRegistry.INSTANCE, name, true); } + public static String loadUtf8TextFile(String name) { + try (InputStream textStream = EsqlTestUtils.class.getResourceAsStream(name)) { + return new String(textStream.readAllBytes(), StandardCharsets.UTF_8); + } catch (IOException ex) { + throw new RuntimeException(ex); + } + } + public static EnrichResolution emptyPolicyResolution() { return new EnrichResolution(); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/SearchStats.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/SearchStats.java index e8c547c55a373..57458c0574776 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/SearchStats.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/SearchStats.java @@ -198,8 +198,9 @@ public boolean isSingleValue(String field) { // fields are MV per default var sv = new boolean[] { false }; for (SearchExecutionContext context : contexts) { - MappedFieldType mappedType = context.isFieldMapped(field) ? null : context.getFieldType(field); + MappedFieldType mappedType = context.isFieldMapped(field) ? context.getFieldType(field) : null; if (mappedType != null) { + sv[0] = true; doWithContexts(r -> { sv[0] &= detectSingleValue(r, mappedType, field); return sv[0]; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java index cf387245a5968..80deb0ea83d86 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java @@ -9,12 +9,16 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.apache.lucene.search.IndexSearcher; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Tuple; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.MapperServiceTestCase; +import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.EsqlTestUtils.TestSearchStats; @@ -53,8 +57,10 @@ import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataTypes; import org.elasticsearch.xpack.ql.type.EsField; +import org.elasticsearch.xpack.ql.util.Holder; import org.junit.Before; +import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Locale; @@ -78,7 +84,7 @@ import static org.hamcrest.Matchers.nullValue; //@TestLogging(value = "org.elasticsearch.xpack.esql:TRACE,org.elasticsearch.compute:TRACE", reason = "debug") -public class LocalPhysicalPlanOptimizerTests extends ESTestCase { +public class LocalPhysicalPlanOptimizerTests extends MapperServiceTestCase { private static final String PARAM_FORMATTING = "%1$s"; @@ -270,6 +276,70 @@ public void testCountOneFieldWithFilterAndLimit() { assertThat(plan.anyMatch(EsQueryExec.class::isInstance), is(true)); } + public void testCountPushdownForSvAndMvFields() throws IOException { + String properties = EsqlTestUtils.loadUtf8TextFile("/mapping-basic.json"); + String mapping = "{\"mappings\": " + properties + "}"; + + String query = """ + from test + | stats c = count(salary) + """; + + PhysicalPlan plan; + + List> docsCasesWithoutPushdown = List.of( + // No pushdown yet in case of MVs + List.of("{ \"salary\" : [1,2] }"), + List.of("{ \"salary\" : [1,2] }", "{ \"salary\" : null}") + ); + for (List docs : docsCasesWithoutPushdown) { + plan = planWithMappingAndDocs(query, mapping, docs); + // No EsSatsQueryExec as leaf of the plan. + assertThat(plan.anyMatch(EsQueryExec.class::isInstance), is(true)); + } + + // Cases where we can push this down as a COUNT(*) since there are only SVs + List> docsCasesWithPushdown = List.of(List.of(), List.of("{ \"salary\" : 1 }"), List.of("{ \"salary\": null }")); + for (List docs : docsCasesWithPushdown) { + plan = planWithMappingAndDocs(query, mapping, docs); + + Holder leaf = new Holder<>(); + plan.forEachDown(p -> { + if (p instanceof EsStatsQueryExec s) { + leaf.set(s); + } + }); + + String expectedStats = """ + [Stat[name=salary, type=COUNT, query={ + "exists" : { + "field" : "salary", + "boost" : 1.0 + } + }]]"""; + assertNotNull(leaf.get()); + assertThat(leaf.get().stats().toString(), equalTo(expectedStats)); + } + } + + private PhysicalPlan planWithMappingAndDocs(String query, String mapping, List docs) throws IOException { + MapperService mapperService = createMapperService(mapping); + List parsedDocs = docs.stream().map(d -> mapperService.documentMapper().parse(source(d))).toList(); + + Holder plan = new Holder<>(null); + withLuceneIndex(mapperService, indexWriter -> { + for (ParsedDocument parsedDoc : parsedDocs) { + indexWriter.addDocument(parsedDoc.rootDoc()); + } + }, directoryReader -> { + IndexSearcher searcher = newSearcher(directoryReader); + SearchExecutionContext ctx = createSearchExecutionContext(mapperService, searcher); + plan.set(plan(query, new SearchStats(List.of(ctx)))); + }); + + return plan.get(); + } + // optimized doesn't know yet how to break down different multi count public void testCountMultipleFieldsWithFilter() { var plan = plan(""" From edc9e6787a88991214c835b3995f941dc56ba28d Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Thu, 4 Apr 2024 16:28:07 +0200 Subject: [PATCH 119/264] ESQL: Fix fully pruned aggregates (#106673) Fix a bug where PruneColumns would sometimes completely replace an Aggregate, producing the wrong number of rows as a result. --- docs/changelog/106673.yaml | 6 ++ .../src/main/resources/stats.csv-spec | 91 ++++++++++++++++++- .../esql/optimizer/LogicalPlanOptimizer.java | 18 +++- .../optimizer/LogicalPlanOptimizerTests.java | 72 +++++++++++++++ 4 files changed, 183 insertions(+), 4 deletions(-) create mode 100644 docs/changelog/106673.yaml diff --git a/docs/changelog/106673.yaml b/docs/changelog/106673.yaml new file mode 100644 index 0000000000000..9a716d20ad2bc --- /dev/null +++ b/docs/changelog/106673.yaml @@ -0,0 +1,6 @@ +pr: 106673 +summary: "ESQL: Fix fully pruned aggregates" +area: ES|QL +type: bug +issues: + - 106427 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index 91c79e64b2385..0a18568cf3c84 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -1218,7 +1218,7 @@ c:l 891 ; -countMV#[skip:-8.13.99,reason:supported in 8.14] +countMV#[skip:-8.13.99,reason:fixed in 8.14] FROM employees | STATS vals = COUNT(salary_change.int) ; @@ -1227,6 +1227,95 @@ vals:l 183 ; +emptyProjectInStatWithEval#[skip:-8.13.99,reason:fixed in 8.14] +FROM employees +| STATS c = COUNT(salary) +| EVAL x = 3.14 +| DROP c +; + +x:d +3.14 +; + +emptyProjectInStatWithCountGroupAndEval#[skip:-8.13.99,reason:fixed in 8.14] +FROM employees +| STATS c = COUNT(salary) BY gender +| EVAL x = 3.14 +| DROP c, gender +; + +x:d +3.14 +3.14 +3.14 +; + + +emptyProjectInStatWithMinGroupAndEval#[skip:-8.13.99,reason:fixed in 8.14] +FROM employees +| STATS m = MIN(salary) BY gender +| EVAL x = 3.14 +| DROP m, gender +; + +x:d +3.14 +3.14 +3.14 +; + +emptyProjectInStatOnlyGroupAndEval#[skip:-8.13.99,reason:fixed in 8.14] +FROM employees +| STATS BY gender +| EVAL x = 3.14 +| DROP gender +; + +x:d +3.14 +3.14 +3.14 +; + +emptyProjectInStatWithTwoGroupsAndEval#[skip:-8.13.99,reason:fixed in 8.14] +FROM employees +| STATS c = COUNT(salary) BY gender, still_hired +| EVAL x = 3.14 +| DROP c, gender, still_hired +; + +x:d +3.14 +3.14 +3.14 +3.14 +3.14 +3.14 +; + +emptyProjectInStatDueToAnotherStat#[skip:-8.13.99,reason:fixed in 8.14] +FROM employees +| STATS s = SUM(salary), m = MIN(salary) +| EVAL x = 3.14 +| STATS rows = COUNT(*) +; + +rows:l +1 +; + +emptyProjectInStatDueToAnotherStatWithGroups#[skip:-8.13.99,reason:fixed in 8.14] +FROM employees +| STATS m = MEDIAN(salary) BY gender, still_hired +| EVAL x = 3.14 +| STATS rows = COUNT(*) +; + +rows:l +6 +; + sumOfConst#[skip:-8.13.99,reason:supported in 8.14] FROM employees | STATS s1 = sum(1), s2point1 = sum(2.1), s_mv = sum([-1, 0, 3]) * 3, s_null = sum(null), rows = count(*) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java index ec3ff07a9867f..fe2a3076380df 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java @@ -1037,11 +1037,23 @@ public LogicalPlan apply(LogicalPlan plan) { recheck = false; if (p instanceof Aggregate aggregate) { var remaining = seenProjection.get() ? removeUnused(aggregate.aggregates(), used) : null; - // no aggregates, no need + if (remaining != null) { if (remaining.isEmpty()) { - recheck = true; - p = aggregate.child(); + // We still need to have a plan that produces 1 row per group. + if (aggregate.groupings().isEmpty()) { + p = new LocalRelation( + aggregate.source(), + List.of(new EmptyAttribute(aggregate.source())), + LocalSupplier.of( + new Block[] { BlockUtils.constantBlock(PlannerUtils.NON_BREAKING_BLOCK_FACTORY, null, 1) } + ) + ); + } else { + // Aggs cannot produce pages with 0 columns, so retain one grouping. + remaining = List.of(Expressions.attribute(aggregate.groupings().get(0))); + p = new Aggregate(aggregate.source(), aggregate.child(), aggregate.groupings(), remaining); + } } else { p = new Aggregate(aggregate.source(), aggregate.child(), aggregate.groupings(), remaining); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 3f0b39603ef89..050ee2caefec0 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -238,6 +238,78 @@ public void testEmptyProjectionInStat() { assertThat(relation.supplier().get(), emptyArray()); } + /** + * Expects + * + * EsqlProject[[x{r}#6]] + * \_Eval[[1[INTEGER] AS x]] + * \_Limit[1000[INTEGER]] + * \_LocalRelation[[{e}#18],[ConstantNullBlock[positions=1]]] + */ + public void testEmptyProjectInStatWithEval() { + var plan = plan(""" + from test + | where languages > 1 + | stats c = count(salary) + | eval x = 1, c2 = c*2 + | drop c, c2 + """); + + var project = as(plan, Project.class); + var eval = as(project.child(), Eval.class); + var limit = as(eval.child(), Limit.class); + var singleRowRelation = as(limit.child(), LocalRelation.class); + var singleRow = singleRowRelation.supplier().get(); + assertThat(singleRow.length, equalTo(1)); + assertThat(singleRow[0].getPositionCount(), equalTo(1)); + + var exprs = eval.fields(); + assertThat(exprs.size(), equalTo(1)); + var alias = as(exprs.get(0), Alias.class); + assertThat(alias.name(), equalTo("x")); + assertThat(alias.child().fold(), equalTo(1)); + } + + /** + * Expects + * + * EsqlProject[[x{r}#8]] + * \_Eval[[1[INTEGER] AS x]] + * \_Limit[1000[INTEGER]] + * \_Aggregate[[emp_no{f}#15],[emp_no{f}#15]] + * \_Filter[languages{f}#18 > 1[INTEGER]] + * \_EsRelation[test][_meta_field{f}#21, emp_no{f}#15, first_name{f}#16, ..] + */ + public void testEmptyProjectInStatWithGroupAndEval() { + var plan = plan(""" + from test + | where languages > 1 + | stats c = count(salary) by emp_no + | eval x = 1, c2 = c*2 + | drop c, emp_no, c2 + """); + + var project = as(plan, Project.class); + var eval = as(project.child(), Eval.class); + var limit = as(eval.child(), Limit.class); + var agg = as(limit.child(), Aggregate.class); + var filter = as(agg.child(), Filter.class); + var relation = as(filter.child(), EsRelation.class); + + assertThat(Expressions.names(agg.groupings()), contains("emp_no")); + assertThat(Expressions.names(agg.aggregates()), contains("emp_no")); + + var exprs = eval.fields(); + assertThat(exprs.size(), equalTo(1)); + var alias = as(exprs.get(0), Alias.class); + assertThat(alias.name(), equalTo("x")); + assertThat(alias.child().fold(), equalTo(1)); + + var filterCondition = as(filter.condition(), GreaterThan.class); + assertThat(Expressions.name(filterCondition.left()), equalTo("languages")); + assertThat(filterCondition.right().fold(), equalTo(1)); + } + public void testCombineProjections() { var plan = plan(""" from test From 8a1df9be2d7cef6d82f45372956d264753ccaa17 Mon Sep 17 00:00:00 2001 From: shainaraskas <58563081+shainaraskas@users.noreply.github.com> Date: Thu, 4 Apr 2024 10:44:14 -0400 Subject: [PATCH 120/264] [DOCS] fix time zone logic example (#106962) * [DOCS] fix time zone logic example * specify standard time * goodbye e.g. --- .../bucket/datehistogram-aggregation.asciidoc | 27 ++++++++++--------- 1 file changed, 14 insertions(+), 13 deletions(-) diff --git a/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc b/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc index 26774c7091d27..3511ec9e63b02 100644 --- a/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc @@ -341,23 +341,24 @@ Response: rounding is also done in UTC. Use the `time_zone` parameter to indicate that bucketing should use a different time zone. -For example, if the interval is a calendar day and the time zone is -`America/New_York` then `2020-01-03T01:00:01Z` is : -# Converted to `2020-01-02T18:00:01` -# Rounded down to `2020-01-02T00:00:00` -# Then converted back to UTC to produce `2020-01-02T05:00:00:00Z` -# Finally, when the bucket is turned into a string key it is printed in - `America/New_York` so it'll display as `"2020-01-02T00:00:00"`. - -It looks like: +When you specify a time zone, the following logic is used to determine the bucket the document belongs in: [source,java] ---- bucket_key = localToUtc(Math.floor(utcToLocal(value) / interval) * interval)) ---- -You can specify time zones as an ISO 8601 UTC offset (e.g. `+01:00` or -`-08:00`) or as an IANA time zone ID, +For example, if the interval is a calendar day and the time zone is +`America/New_York`, then the date value `2020-01-03T01:00:01Z` is processed as follows: + +. Converted to EST: `2020-01-02T20:00:01` +. Rounded down to the nearest interval: `2020-01-02T00:00:00` +. Converted back to UTC: `2020-01-02T05:00:00:00Z` + +When a `key_as_string` is generated for the bucket, the key value is stored in `America/New_York` time, so it'll display as `"2020-01-02T00:00:00"`. + +You can specify time zones as an ISO 8601 UTC offset, such as `+01:00` or +`-08:00`, or as an IANA time zone ID, such as `America/Los_Angeles`. Consider the following example: @@ -618,7 +619,7 @@ For example, for `+50d` we see: -------------------------------------------------- // TESTRESPONSE[skip:no setup made for this example yet] -It is therefor always important when using `offset` with `calendar_interval` bucket sizes +It is therefore always important when using `offset` with `calendar_interval` bucket sizes to understand the consequences of using offsets larger than the interval size. More examples: @@ -633,7 +634,7 @@ but as soon as you push the start date into the second month by having an offset quarters will all start on different dates. [[date-histogram-keyed-response]] -==== Keyed Response +==== Keyed response Setting the `keyed` flag to `true` associates a unique string key with each bucket and returns the ranges as a hash rather than an array: From 46ec6362b5a87a85f5a2461a42b3383043e9ad07 Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Thu, 4 Apr 2024 16:45:06 +0200 Subject: [PATCH 121/264] [Profiling] Add TopN Functions API (#106860) With this commit we add a new API to the Universal Profiling plugin that allows to gather a list of functions with the most observed samples (TopN functions). --------- Co-authored-by: Joseph Crail --- docs/changelog/106860.yaml | 5 + .../api/profiling.topn_functions.json | 28 ++ .../profiling/GetFlameGraphActionIT.java | 16 +- .../profiling/GetStackTracesActionIT.java | 91 +++++- .../profiling/GetTopNFunctionsActionIT.java | 81 +++++ .../data/profiling-events-all.ndjson | 4 +- .../profiling/GetStackTracesRequest.java | 34 +- .../GetStackTracesResponseBuilder.java | 3 + .../profiling/GetTopNFunctionsAction.java | 18 ++ .../profiling/GetTopNFunctionsResponse.java | 56 ++++ .../xpack/profiling/ProfilingPlugin.java | 2 + .../profiling/RestGetTopNFunctionsAction.java | 46 +++ .../xpack/profiling/StackFrame.java | 1 + .../xpack/profiling/StackTrace.java | 6 +- .../xpack/profiling/TopNFunction.java | 297 ++++++++++++++++++ .../xpack/profiling/TraceEvent.java | 9 +- .../TransportGetFlamegraphAction.java | 4 +- .../TransportGetStackTracesAction.java | 72 ++++- .../TransportGetTopNFunctionsAction.java | 162 ++++++++++ .../profiling/GetStackTracesRequestTests.java | 21 +- .../xpack/profiling/ResamplerTests.java | 5 + .../xpack/profiling/StackFrameTests.java | 1 - .../xpack/profiling/TopNFunctionTests.java | 117 +++++++ .../TransportGetFlamegraphActionTests.java | 1 - .../TransportGetTopNFunctionsActionTests.java | 183 +++++++++++ .../xpack/security/operator/Constants.java | 1 + .../rest-api-spec/test/profiling/10_basic.yml | 64 ++++ 27 files changed, 1299 insertions(+), 29 deletions(-) create mode 100644 docs/changelog/106860.yaml create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/profiling.topn_functions.json create mode 100644 x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsActionIT.java create mode 100644 x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsAction.java create mode 100644 x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsResponse.java create mode 100644 x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetTopNFunctionsAction.java create mode 100644 x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TopNFunction.java create mode 100644 x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetTopNFunctionsAction.java create mode 100644 x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TopNFunctionTests.java create mode 100644 x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetTopNFunctionsActionTests.java diff --git a/docs/changelog/106860.yaml b/docs/changelog/106860.yaml new file mode 100644 index 0000000000000..376f8753023b9 --- /dev/null +++ b/docs/changelog/106860.yaml @@ -0,0 +1,5 @@ +pr: 106860 +summary: "[Profiling] Add TopN Functions API" +area: Application +type: enhancement +issues: [] diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/profiling.topn_functions.json b/rest-api-spec/src/main/resources/rest-api-spec/api/profiling.topn_functions.json new file mode 100644 index 0000000000000..3b4db3abf2cca --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/profiling.topn_functions.json @@ -0,0 +1,28 @@ +{ + "profiling.topn_functions":{ + "documentation":{ + "url":"https://www.elastic.co/guide/en/observability/current/universal-profiling.html", + "description":"Extracts a list of topN functions from Universal Profiling." + }, + "stability":"stable", + "visibility":"private", + "headers":{ + "accept": ["application/json"], + "content_type": ["application/json"] + }, + "url":{ + "paths":[ + { + "path":"/_profiling/topn/functions", + "methods":[ + "POST" + ] + } + ] + }, + "body":{ + "description":"The filter conditions for stacktraces", + "required":true + } + } +} diff --git a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetFlameGraphActionIT.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetFlameGraphActionIT.java index 855c0c200aaba..20519d53459ba 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetFlameGraphActionIT.java +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetFlameGraphActionIT.java @@ -9,7 +9,21 @@ public class GetFlameGraphActionIT extends ProfilingTestCase { public void testGetStackTracesUnfiltered() throws Exception { - GetStackTracesRequest request = new GetStackTracesRequest(1000, 600.0d, 1.0d, 1.0d, null, null, null, null, null, null, null, null); + GetStackTracesRequest request = new GetStackTracesRequest( + 1000, + 600.0d, + 1.0d, + 1.0d, + null, + null, + null, + null, + null, + null, + null, + null, + null + ); GetFlamegraphResponse response = client().execute(GetFlamegraphAction.INSTANCE, request).get(); // only spot-check top level properties - detailed tests are done in unit tests assertEquals(994, response.getSize()); diff --git a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStackTracesActionIT.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStackTracesActionIT.java index 62b8242e7df86..30de2173e8903 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStackTracesActionIT.java +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStackTracesActionIT.java @@ -15,7 +15,65 @@ public class GetStackTracesActionIT extends ProfilingTestCase { public void testGetStackTracesUnfiltered() throws Exception { - GetStackTracesRequest request = new GetStackTracesRequest(1000, 600.0d, 1.0d, 1.0d, null, null, null, null, null, null, null, null); + GetStackTracesRequest request = new GetStackTracesRequest( + 1000, + 600.0d, + 1.0d, + 1.0d, + null, + null, + null, + null, + null, + null, + null, + null, + null + ); + request.setAdjustSampleCount(true); + GetStackTracesResponse response = client().execute(GetStackTracesAction.INSTANCE, request).get(); + assertEquals(46, response.getTotalSamples()); + assertEquals(1821, response.getTotalFrames()); + + assertNotNull(response.getStackTraceEvents()); + assertEquals(3L, response.getStackTraceEvents().get("L7kj7UvlKbT-vN73el4faQ").count); + + assertNotNull(response.getStackTraces()); + // just do a high-level spot check. Decoding is tested in unit-tests + StackTrace stackTrace = response.getStackTraces().get("L7kj7UvlKbT-vN73el4faQ"); + assertEquals(18, stackTrace.addressOrLines.length); + assertEquals(18, stackTrace.fileIds.length); + assertEquals(18, stackTrace.frameIds.length); + assertEquals(18, stackTrace.typeIds.length); + assertEquals(0.0000048475146d, stackTrace.annualCO2Tons, 0.0000000001d); + assertEquals(0.18834d, stackTrace.annualCostsUSD, 0.00001d); + // not determined by default + assertNull(stackTrace.subGroups); + + assertNotNull(response.getStackFrames()); + StackFrame stackFrame = response.getStackFrames().get("8NlMClggx8jaziUTJXlmWAAAAAAAAIYI"); + assertEquals(List.of("start_thread"), stackFrame.functionName); + + assertNotNull(response.getExecutables()); + assertEquals("vmlinux", response.getExecutables().get("lHp5_WAgpLy2alrUVab6HA")); + } + + public void testGetStackTracesGroupedByServiceName() throws Exception { + GetStackTracesRequest request = new GetStackTracesRequest( + 1000, + 600.0d, + 1.0d, + 1.0d, + null, + null, + null, + "service.name", + null, + null, + null, + null, + null + ); request.setAdjustSampleCount(true); GetStackTracesResponse response = client().execute(GetStackTracesAction.INSTANCE, request).get(); assertEquals(46, response.getTotalSamples()); @@ -33,6 +91,7 @@ public void testGetStackTracesUnfiltered() throws Exception { assertEquals(18, stackTrace.typeIds.length); assertEquals(0.0000048475146d, stackTrace.annualCO2Tons, 0.0000000001d); assertEquals(0.18834d, stackTrace.annualCostsUSD, 0.00001d); + assertEquals(Long.valueOf(2L), stackTrace.subGroups.get("basket")); assertNotNull(response.getStackFrames()); StackFrame stackFrame = response.getStackFrames().get("8NlMClggx8jaziUTJXlmWAAAAAAAAIYI"); @@ -42,6 +101,28 @@ public void testGetStackTracesUnfiltered() throws Exception { assertEquals("vmlinux", response.getExecutables().get("lHp5_WAgpLy2alrUVab6HA")); } + public void testGetStackTracesGroupedByInvalidField() { + GetStackTracesRequest request = new GetStackTracesRequest( + 1000, + 600.0d, + 1.0d, + 1.0d, + null, + null, + null, + // only service.name is supported (note the trailing "s") + "service.names", + null, + null, + null, + null, + null + ); + request.setAdjustSampleCount(true); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, client().execute(GetStackTracesAction.INSTANCE, request)); + assertEquals("Requested custom event aggregation field [service.names] but only [service.name] is supported.", e.getMessage()); + } + public void testGetStackTracesFromAPMWithMatchNoDownsampling() throws Exception { BoolQueryBuilder query = QueryBuilders.boolQuery(); query.must().add(QueryBuilders.termQuery("transaction.name", "encodeSha1")); @@ -56,6 +137,7 @@ public void testGetStackTracesFromAPMWithMatchNoDownsampling() throws Exception // also match an index that does not contain stacktrace ids to ensure it is ignored new String[] { "apm-test-*", "apm-legacy-test-*" }, "transaction.profiler_stack_trace_ids", + "transaction.name", null, null, null, @@ -79,6 +161,7 @@ public void testGetStackTracesFromAPMWithMatchNoDownsampling() throws Exception assertEquals(39, stackTrace.typeIds.length); assertTrue(stackTrace.annualCO2Tons > 0.0d); assertTrue(stackTrace.annualCostsUSD > 0.0d); + assertEquals(Long.valueOf(3L), stackTrace.subGroups.get("encodeSha1")); assertNotNull(response.getStackFrames()); StackFrame stackFrame = response.getStackFrames().get("fhsEKXDuxJ-jIJrZpdRuSAAAAAAAAFtj"); @@ -103,6 +186,7 @@ public void testGetStackTracesFromAPMWithMatchAndDownsampling() throws Exception null, null, null, + null, null ); // ensures consistent results in the random sampler aggregation that is used internally @@ -126,6 +210,8 @@ public void testGetStackTracesFromAPMWithMatchAndDownsampling() throws Exception assertEquals(39, stackTrace.typeIds.length); assertTrue(stackTrace.annualCO2Tons > 0.0d); assertTrue(stackTrace.annualCostsUSD > 0.0d); + // not determined by default + assertNull(stackTrace.subGroups); assertNotNull(response.getStackFrames()); StackFrame stackFrame = response.getStackFrames().get("fhsEKXDuxJ-jIJrZpdRuSAAAAAAAAFtj"); @@ -150,6 +236,7 @@ public void testGetStackTracesFromAPMNoMatch() throws Exception { null, null, null, + null, null ); GetStackTracesResponse response = client().execute(GetStackTracesAction.INSTANCE, request).get(); @@ -171,6 +258,7 @@ public void testGetStackTracesFromAPMIndexNotAvailable() throws Exception { null, null, null, + null, null ); GetStackTracesResponse response = client().execute(GetStackTracesAction.INSTANCE, request).get(); @@ -192,6 +280,7 @@ public void testGetStackTracesFromAPMStackTraceFieldNotAvailable() throws Except null, null, null, + null, null ); GetStackTracesResponse response = client().execute(GetStackTracesAction.INSTANCE, request).get(); diff --git a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsActionIT.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsActionIT.java new file mode 100644 index 0000000000000..05d0e1cb0471b --- /dev/null +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsActionIT.java @@ -0,0 +1,81 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.profiling; + +import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; + +public class GetTopNFunctionsActionIT extends ProfilingTestCase { + public void testGetTopNFunctionsUnfiltered() throws Exception { + GetStackTracesRequest request = new GetStackTracesRequest( + 1000, + 600.0d, + 1.0d, + 1.0d, + null, + null, + null, + null, + null, + null, + null, + null, + null + ); + request.setAdjustSampleCount(true); + GetTopNFunctionsResponse response = client().execute(GetTopNFunctionsAction.INSTANCE, request).get(); + assertEquals(747, response.getTopN().size()); + } + + public void testGetTopNFunctionsGroupedByServiceName() throws Exception { + GetStackTracesRequest request = new GetStackTracesRequest( + 1000, + 600.0d, + 1.0d, + 1.0d, + null, + null, + null, + "service.name", + null, + null, + null, + null, + null + ); + request.setAdjustSampleCount(true); + request.setLimit(50); + GetTopNFunctionsResponse response = client().execute(GetTopNFunctionsAction.INSTANCE, request).get(); + assertEquals(50, response.getTopN().size()); + } + + public void testGetTopNFunctionsFromAPM() throws Exception { + BoolQueryBuilder query = QueryBuilders.boolQuery(); + query.must().add(QueryBuilders.termQuery("transaction.name", "encodeSha1")); + query.must().add(QueryBuilders.rangeQuery("@timestamp").lte("1698624000")); + + GetStackTracesRequest request = new GetStackTracesRequest( + null, + 1.0d, + 1.0d, + 1.0d, + query, + // also match an index that does not contain stacktrace ids to ensure it is ignored + new String[] { "apm-test-*", "apm-legacy-test-*" }, + "transaction.profiler_stack_trace_ids", + "transaction.name", + null, + null, + null, + null, + null + ); + GetTopNFunctionsResponse response = client().execute(GetTopNFunctionsAction.INSTANCE, request).get(); + assertEquals(45, response.getTopN().size()); + } +} diff --git a/x-pack/plugin/profiling/src/internalClusterTest/resources/data/profiling-events-all.ndjson b/x-pack/plugin/profiling/src/internalClusterTest/resources/data/profiling-events-all.ndjson index 502494f05c62c..7211ad54cbcd1 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/resources/data/profiling-events-all.ndjson +++ b/x-pack/plugin/profiling/src/internalClusterTest/resources/data/profiling-events-all.ndjson @@ -71,9 +71,9 @@ {"create": {"_index": "profiling-events-all"}} {"Stacktrace.count": [1], "profiling.project.id": ["100"], "os.kernel": ["9.9.9-0"], "tags": ["environment:qa", "region:eu-west-1"], "host.ip": ["192.168.1.2"], "@timestamp": ["1698624000"], "container.name": ["instance-0000000010"], "ecs.version": ["1.12.0"], "Stacktrace.id": ["XF9MchOwpePfa6_hYy-vZQ"], "agent.version": ["head-be593ef3-1688111067"], "host.name": ["ip-192-168-1-2"], "host.id": ["8457605156473051743"], "process.thread.name": ["497295213074376"]} {"create": {"_index": "profiling-events-all"}} -{"Stacktrace.count": [2], "profiling.project.id": ["100"], "os.kernel": ["9.9.9-0"], "tags": ["environment:qa", "region:eu-west-1"], "host.ip": ["192.168.1.2"], "@timestamp": ["1698624000"], "container.name": ["instance-0000000010"], "ecs.version": ["1.12.0"], "Stacktrace.id": ["L7kj7UvlKbT-vN73el4faQ"], "agent.version": ["head-be593ef3-1688111067"], "host.name": ["ip-192-168-1-2"], "host.id": ["8457605156473051743"], "process.thread.name": ["497295213074376"]} +{"Stacktrace.count": [2], "profiling.project.id": ["100"], "os.kernel": ["9.9.9-0"], "tags": ["environment:qa", "region:eu-west-1"], "host.ip": ["192.168.1.2"], "@timestamp": ["1698624000"], "container.name": ["instance-0000000010"], "ecs.version": ["1.12.0"], "Stacktrace.id": ["L7kj7UvlKbT-vN73el4faQ"], "agent.version": ["head-be593ef3-1688111067"], "host.name": ["ip-192-168-1-2"], "host.id": ["8457605156473051743"], "process.thread.name": ["497295213074376"], "service.name": "basket"} {"create": {"_index": "profiling-events-all"}} -{"Stacktrace.count": [1], "profiling.project.id": ["100"], "os.kernel": ["9.9.9-0"], "tags": ["environment:qa", "region:eu-west-1"], "host.ip": ["192.168.1.2"], "@timestamp": ["1698624000"], "container.name": ["instance-0000000010"], "ecs.version": ["1.12.0"], "Stacktrace.id": ["L7kj7UvlKbT-vN73el4faQ"], "agent.version": ["head-be593ef3-1688111067"], "host.name": ["ip-192-168-1-2"], "host.id": ["8457605156473051743"], "process.thread.name": ["497295213074376"]} +{"Stacktrace.count": [1], "profiling.project.id": ["100"], "os.kernel": ["9.9.9-0"], "tags": ["environment:qa", "region:eu-west-1"], "host.ip": ["192.168.1.2"], "@timestamp": ["1698624000"], "container.name": ["instance-0000000010"], "ecs.version": ["1.12.0"], "Stacktrace.id": ["L7kj7UvlKbT-vN73el4faQ"], "agent.version": ["head-be593ef3-1688111067"], "host.name": ["ip-192-168-1-2"], "host.id": ["8457605156473051743"], "process.thread.name": ["497295213074376"], "service.name": "basket"} {"create": {"_index": "profiling-events-all"}} {"Stacktrace.count": [1], "profiling.project.id": ["100"], "os.kernel": ["9.9.9-0"], "tags": ["environment:qa", "region:eu-west-1"], "host.ip": ["192.168.1.2"], "@timestamp": ["1698624000"], "container.name": ["instance-0000000010"], "ecs.version": ["1.12.0"], "Stacktrace.id": ["hRqQI2CBPiapzgFG9jrmDA"], "agent.version": ["head-be593ef3-1688111067"], "host.name": ["ip-192-168-1-2"], "host.id": ["8457605156473051743"], "process.thread.name": ["599103450330106"]} {"create": {"_index": "profiling-events-all"}} diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesRequest.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesRequest.java index 86ed038467191..038a576cd77fc 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesRequest.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesRequest.java @@ -38,8 +38,10 @@ public class GetStackTracesRequest extends ActionRequest implements IndicesRequest.Replaceable { public static final ParseField QUERY_FIELD = new ParseField("query"); public static final ParseField SAMPLE_SIZE_FIELD = new ParseField("sample_size"); + public static final ParseField LIMIT_FIELD = new ParseField("limit"); public static final ParseField INDICES_FIELD = new ParseField("indices"); public static final ParseField STACKTRACE_IDS_FIELD = new ParseField("stacktrace_ids_field"); + public static final ParseField AGGREGATION_FIELD = new ParseField("aggregation_field"); public static final ParseField REQUESTED_DURATION_FIELD = new ParseField("requested_duration"); public static final ParseField AWS_COST_FACTOR_FIELD = new ParseField("aws_cost_factor"); public static final ParseField AZURE_COST_FACTOR_FIELD = new ParseField("azure_cost_factor"); @@ -52,9 +54,11 @@ public class GetStackTracesRequest extends ActionRequest implements IndicesReque private QueryBuilder query; private int sampleSize; + private Integer limit; private String[] indices; private boolean userProvidedIndices; private String stackTraceIdsField; + private String aggregationField; private Double requestedDuration; private Double awsCostFactor; private Double azureCostFactor; @@ -73,7 +77,7 @@ public class GetStackTracesRequest extends ActionRequest implements IndicesReque private Integer shardSeed; public GetStackTracesRequest() { - this(null, null, null, null, null, null, null, null, null, null, null, null); + this(null, null, null, null, null, null, null, null, null, null, null, null, null); } public GetStackTracesRequest( @@ -84,6 +88,7 @@ public GetStackTracesRequest( QueryBuilder query, String[] indices, String stackTraceIdsField, + String aggregationField, Double customCO2PerKWH, Double customDatacenterPUE, Double customPerCoreWattX86, @@ -98,6 +103,7 @@ public GetStackTracesRequest( this.indices = indices; this.userProvidedIndices = indices != null && indices.length > 0; this.stackTraceIdsField = stackTraceIdsField; + this.aggregationField = aggregationField; this.customCO2PerKWH = customCO2PerKWH; this.customDatacenterPUE = customDatacenterPUE; this.customPerCoreWattX86 = customPerCoreWattX86; @@ -114,6 +120,14 @@ public int getSampleSize() { return sampleSize; } + public void setLimit(int limit) { + this.limit = limit; + } + + public Integer getLimit() { + return limit; + } + public Double getRequestedDuration() { return requestedDuration; } @@ -162,6 +176,10 @@ public String getStackTraceIdsField() { return stackTraceIdsField; } + public String getAggregationField() { + return aggregationField; + } + public boolean isAdjustSampleCount() { return Boolean.TRUE.equals(adjustSampleCount); } @@ -194,8 +212,12 @@ public void parseXContent(XContentParser parser) throws IOException { } else if (token.isValue()) { if (SAMPLE_SIZE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { this.sampleSize = parser.intValue(); + } else if (LIMIT_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { + this.limit = parser.intValue(); } else if (STACKTRACE_IDS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { this.stackTraceIdsField = parser.text(); + } else if (AGGREGATION_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { + this.aggregationField = parser.text(); } else if (REQUESTED_DURATION_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { this.requestedDuration = parser.doubleValue(); } else if (AWS_COST_FACTOR_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { @@ -277,7 +299,15 @@ public ActionRequestValidationException validate() { ); } } + if (aggregationField != null && aggregationField.isBlank()) { + validationException = addValidationError( + "[" + AGGREGATION_FIELD.getPreferredName() + "] must be non-empty", + validationException + ); + } + validationException = requirePositive(SAMPLE_SIZE_FIELD, sampleSize, validationException); + validationException = requirePositive(LIMIT_FIELD, limit, validationException); validationException = requirePositive(REQUESTED_DURATION_FIELD, requestedDuration, validationException); validationException = requirePositive(AWS_COST_FACTOR_FIELD, awsCostFactor, validationException); validationException = requirePositive(AZURE_COST_FACTOR_FIELD, azureCostFactor, validationException); @@ -307,7 +337,9 @@ public String getDescription() { StringBuilder sb = new StringBuilder(); appendField(sb, "indices", indices); appendField(sb, "stacktrace_ids_field", stackTraceIdsField); + appendField(sb, "aggregation_field", aggregationField); appendField(sb, "sample_size", sampleSize); + appendField(sb, "limit", limit); appendField(sb, "requested_duration", requestedDuration); appendField(sb, "aws_cost_factor", awsCostFactor); appendField(sb, "azure_cost_factor", azureCostFactor); diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesResponseBuilder.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesResponseBuilder.java index f058341b80b37..44c9c987fc6c7 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesResponseBuilder.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesResponseBuilder.java @@ -155,6 +155,9 @@ public GetStackTracesResponse build() { if (event != null) { StackTrace stackTrace = entry.getValue(); stackTrace.count = event.count; + if (event.subGroups.isEmpty() == false) { + stackTrace.subGroups = event.subGroups; + } stackTrace.annualCO2Tons = event.annualCO2Tons; stackTrace.annualCostsUSD = event.annualCostsUSD; } diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsAction.java new file mode 100644 index 0000000000000..b11e74cbbf93d --- /dev/null +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsAction.java @@ -0,0 +1,18 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.profiling; + +import org.elasticsearch.action.ActionType; + +public final class GetTopNFunctionsAction extends ActionType { + public static final GetTopNFunctionsAction INSTANCE = new GetTopNFunctionsAction(); + public static final String NAME = "indices:data/read/profiling/topn/functions"; + + private GetTopNFunctionsAction() { + super(NAME); + } +} diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsResponse.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsResponse.java new file mode 100644 index 0000000000000..b8785bc607b18 --- /dev/null +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsResponse.java @@ -0,0 +1,56 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.profiling; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.support.TransportAction; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.List; + +public class GetTopNFunctionsResponse extends ActionResponse implements ToXContentObject { + private final long selfCount; + private final long totalCount; + private final List topNFunctions; + + public GetTopNFunctionsResponse(long selfCount, long totalCount, List topNFunctions) { + this.selfCount = selfCount; + this.totalCount = totalCount; + this.topNFunctions = topNFunctions; + } + + @Override + public void writeTo(StreamOutput out) { + TransportAction.localOnly(); + } + + public long getSelfCount() { + return selfCount; + } + + public long getTotalCount() { + return totalCount; + } + + public List getTopN() { + return topNFunctions; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("self_count", selfCount); + builder.field("total_count", totalCount); + builder.xContentList("topn", topNFunctions); + builder.endObject(); + return builder; + } +} diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingPlugin.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingPlugin.java index 400ddfdbf73b6..0615bef7a4980 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingPlugin.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingPlugin.java @@ -134,6 +134,7 @@ public List getRestHandlers( if (enabled) { handlers.add(new RestGetStackTracesAction()); handlers.add(new RestGetFlamegraphAction()); + handlers.add(new RestGetTopNFunctionsAction()); } return Collections.unmodifiableList(handlers); } @@ -168,6 +169,7 @@ public static ExecutorBuilder responseExecutorBuilder() { return List.of( new ActionHandler<>(GetStackTracesAction.INSTANCE, TransportGetStackTracesAction.class), new ActionHandler<>(GetFlamegraphAction.INSTANCE, TransportGetFlamegraphAction.class), + new ActionHandler<>(GetTopNFunctionsAction.INSTANCE, TransportGetTopNFunctionsAction.class), new ActionHandler<>(GetStatusAction.INSTANCE, TransportGetStatusAction.class), new ActionHandler<>(XPackUsageFeatureAction.UNIVERSAL_PROFILING, ProfilingUsageTransportAction.class), new ActionHandler<>(XPackInfoFeatureAction.UNIVERSAL_PROFILING, ProfilingInfoTransportAction.class) diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetTopNFunctionsAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetTopNFunctionsAction.java new file mode 100644 index 0000000000000..b9896418d7b79 --- /dev/null +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetTopNFunctionsAction.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.profiling; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; +import org.elasticsearch.rest.action.RestCancellableNodeClient; +import org.elasticsearch.rest.action.RestToXContentListener; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.POST; + +@ServerlessScope(Scope.PUBLIC) +public class RestGetTopNFunctionsAction extends BaseRestHandler { + @Override + public List routes() { + return List.of(new Route(POST, "/_profiling/topn/functions")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + GetStackTracesRequest getStackTracesRequest = new GetStackTracesRequest(); + request.applyContentParser(getStackTracesRequest::parseXContent); + // enforce server-side adjustment of sample counts for top N functions + getStackTracesRequest.setAdjustSampleCount(true); + + return channel -> { + RestCancellableNodeClient cancelClient = new RestCancellableNodeClient(client, request.getHttpChannel()); + cancelClient.execute(GetTopNFunctionsAction.INSTANCE, getStackTracesRequest, new RestToXContentListener<>(channel)); + }; + } + + @Override + public String getName() { + return "get_topn_functions_action"; + } +} diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackFrame.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackFrame.java index 35f5899536745..5f7102c63d3d7 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackFrame.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackFrame.java @@ -18,6 +18,7 @@ import java.util.function.Consumer; final class StackFrame implements ToXContentObject { + static final StackFrame EMPTY_STACKFRAME = new StackFrame("", "", 0, 0); List fileName; List functionName; List functionOffset; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackTrace.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackTrace.java index b039bf22110b1..d24127824dafd 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackTrace.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackTrace.java @@ -26,7 +26,7 @@ final class StackTrace implements ToXContentObject { String[] fileIds; String[] frameIds; int[] typeIds; - + Map subGroups; double annualCO2Tons; double annualCostsUSD; long count; @@ -247,10 +247,10 @@ public boolean equals(Object o) { && Arrays.equals(fileIds, that.fileIds) && Arrays.equals(frameIds, that.frameIds) && Arrays.equals(typeIds, that.typeIds); - // Don't compare metadata like annualized co2, annualized costs and count. + // Don't compare metadata like annualized co2, annualized costs, subGroups and count. } - // Don't hash metadata like annualized co2, annualized costs and count. + // Don't hash metadata like annualized co2, annualized costs, subGroups and count. @Override public int hashCode() { int result = Arrays.hashCode(addressOrLines); diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TopNFunction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TopNFunction.java new file mode 100644 index 0000000000000..777d8e247335c --- /dev/null +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TopNFunction.java @@ -0,0 +1,297 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.profiling; + +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +final class TopNFunction implements Cloneable, ToXContentObject, Comparable { + private final String id; + private int rank; + private final int frameType; + private final boolean inline; + private final int addressOrLine; + private final String functionName; + private final String sourceFilename; + private final int sourceLine; + private final String exeFilename; + private long selfCount; + private long totalCount; + private double selfAnnualCO2Tons; + private double totalAnnualCO2Tons; + private double selfAnnualCostsUSD; + private double totalAnnualCostsUSD; + private final Map subGroups; + + TopNFunction( + String id, + int frameType, + boolean inline, + int addressOrLine, + String functionName, + String sourceFilename, + int sourceLine, + String exeFilename + ) { + this( + id, + 0, + frameType, + inline, + addressOrLine, + functionName, + sourceFilename, + sourceLine, + exeFilename, + 0, + 0, + 0.0d, + 0.0d, + 0.0d, + 0.0d, + new HashMap<>() + ); + } + + TopNFunction( + String id, + int rank, + int frameType, + boolean inline, + int addressOrLine, + String functionName, + String sourceFilename, + int sourceLine, + String exeFilename, + long selfCount, + long totalCount, + double selfAnnualCO2Tons, + double totalAnnualCO2Tons, + double selfAnnualCostsUSD, + double totalAnnualCostsUSD, + Map subGroups + ) { + this.id = id; + this.rank = rank; + this.frameType = frameType; + this.inline = inline; + this.addressOrLine = addressOrLine; + this.functionName = functionName; + this.sourceFilename = sourceFilename; + this.sourceLine = sourceLine; + this.exeFilename = exeFilename; + this.selfCount = selfCount; + this.totalCount = totalCount; + this.selfAnnualCO2Tons = selfAnnualCO2Tons; + this.totalAnnualCO2Tons = totalAnnualCO2Tons; + this.selfAnnualCostsUSD = selfAnnualCostsUSD; + this.totalAnnualCostsUSD = totalAnnualCostsUSD; + this.subGroups = subGroups; + } + + public String getId() { + return this.id; + } + + public void setRank(int rank) { + this.rank = rank; + } + + public long getSelfCount() { + return selfCount; + } + + public void addSelfCount(long selfCount) { + this.selfCount += selfCount; + } + + public long getTotalCount() { + return totalCount; + } + + public void addTotalCount(long totalCount) { + this.totalCount += totalCount; + } + + public void addSelfAnnualCO2Tons(double co2Tons) { + this.selfAnnualCO2Tons += co2Tons; + } + + public void addTotalAnnualCO2Tons(double co2Tons) { + this.totalAnnualCO2Tons += co2Tons; + } + + public void addSelfAnnualCostsUSD(double costs) { + this.selfAnnualCostsUSD += costs; + } + + public void addTotalAnnualCostsUSD(double costs) { + this.totalAnnualCostsUSD += costs; + } + + public void addSubGroups(Map subGroups) { + for (Map.Entry subGroup : subGroups.entrySet()) { + long count = this.subGroups.getOrDefault(subGroup.getKey(), 0L); + this.subGroups.put(subGroup.getKey(), count + subGroup.getValue()); + } + } + + @Override + protected TopNFunction clone() { + return new TopNFunction( + id, + rank, + frameType, + inline, + addressOrLine, + functionName, + sourceFilename, + sourceLine, + exeFilename, + selfCount, + totalCount, + selfAnnualCO2Tons, + totalAnnualCO2Tons, + selfAnnualCostsUSD, + totalAnnualCostsUSD, + new HashMap<>(subGroups) + ); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("id", this.id); + builder.field("rank", this.rank); + builder.startObject("frame"); + builder.field("frame_type", this.frameType); + builder.field("inline", this.inline); + builder.field("address_or_line", this.addressOrLine); + builder.field("function_name", this.functionName); + builder.field("file_name", this.sourceFilename); + builder.field("line_number", this.sourceLine); + builder.field("executable_file_name", this.exeFilename); + builder.endObject(); + builder.field("sub_groups", subGroups); + builder.field("self_count", this.selfCount); + builder.field("total_count", this.totalCount); + builder.field("self_annual_co2_tons").rawValue(NumberUtils.doubleToString(selfAnnualCO2Tons)); + builder.field("total_annual_co2_tons").rawValue(NumberUtils.doubleToString(totalAnnualCO2Tons)); + builder.field("self_annual_costs_usd").rawValue(NumberUtils.doubleToString(selfAnnualCostsUSD)); + builder.field("total_annual_costs_usd").rawValue(NumberUtils.doubleToString(totalAnnualCostsUSD)); + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + TopNFunction that = (TopNFunction) o; + return Objects.equals(id, that.id) + && Objects.equals(rank, that.rank) + && Objects.equals(frameType, that.frameType) + && Objects.equals(inline, that.inline) + && Objects.equals(addressOrLine, that.addressOrLine) + && Objects.equals(functionName, that.functionName) + && Objects.equals(sourceFilename, that.sourceFilename) + && Objects.equals(sourceLine, that.sourceLine) + && Objects.equals(exeFilename, that.exeFilename) + && Objects.equals(selfCount, that.selfCount) + && Objects.equals(totalCount, that.totalCount) + && Objects.equals(selfAnnualCO2Tons, that.selfAnnualCO2Tons) + && Objects.equals(totalAnnualCO2Tons, that.totalAnnualCO2Tons) + && Objects.equals(selfAnnualCostsUSD, that.selfAnnualCostsUSD) + && Objects.equals(totalAnnualCostsUSD, that.totalAnnualCostsUSD) + && Objects.equals(subGroups, that.subGroups); + } + + @Override + public int hashCode() { + return Objects.hash( + id, + rank, + frameType, + inline, + addressOrLine, + functionName, + sourceFilename, + sourceLine, + exeFilename, + selfCount, + totalCount, + selfAnnualCO2Tons, + totalAnnualCO2Tons, + selfAnnualCostsUSD, + totalAnnualCostsUSD, + subGroups + ); + } + + @Override + public String toString() { + return "TopNFunction{" + + "id='" + + id + + '\'' + + ", rank=" + + rank + + ", frameType=" + + frameType + + ", inline=" + + inline + + ", addressOrLine=" + + addressOrLine + + ", functionName='" + + functionName + + '\'' + + ", sourceFilename='" + + sourceFilename + + '\'' + + ", sourceLine=" + + sourceLine + + ", exeFilename='" + + exeFilename + + '\'' + + ", selfCount=" + + selfCount + + ", totalCount=" + + totalCount + + ", selfAnnualCO2Tons=" + + selfAnnualCO2Tons + + ", totalAnnualCO2Tons=" + + totalAnnualCO2Tons + + ", selfAnnualCostsUSD=" + + selfAnnualCostsUSD + + ", totalAnnualCostsUSD=" + + totalAnnualCostsUSD + + ", subGroups=" + + subGroups + + '}'; + } + + @Override + public int compareTo(TopNFunction that) { + if (this.selfCount > that.selfCount) { + return 1; + } + if (this.selfCount < that.selfCount) { + return -1; + } + return this.id.compareTo(that.id); + } +} diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TraceEvent.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TraceEvent.java index d092868e23cd9..adb88848a418e 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TraceEvent.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TraceEvent.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.profiling; +import java.util.HashMap; +import java.util.Map; import java.util.Objects; final class TraceEvent { @@ -14,9 +16,10 @@ final class TraceEvent { double annualCO2Tons; double annualCostsUSD; long count; + final Map subGroups = new HashMap<>(); TraceEvent(String stacktraceID) { - this.stacktraceID = stacktraceID; + this(stacktraceID, 0); } TraceEvent(String stacktraceID, long count) { @@ -53,6 +56,8 @@ public String toString() { + annualCostsUSD + ", count=" + count - + "}"; + + ", subGroups=" + + subGroups + + '}'; } } diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphAction.java index 39b73db41aeef..7a25319d3a1cc 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphAction.java @@ -28,8 +28,6 @@ public class TransportGetFlamegraphAction extends TransportAction { private static final Logger log = LogManager.getLogger(TransportGetFlamegraphAction.class); - private static final StackFrame EMPTY_STACKFRAME = new StackFrame("", "", 0, 0); - private final NodeClient nodeClient; private final TransportService transportService; @@ -97,7 +95,7 @@ static GetFlamegraphResponse buildFlamegraph(GetStackTracesResponse response) { String fileId = stackTrace.fileIds[i]; int frameType = stackTrace.typeIds[i]; int addressOrLine = stackTrace.addressOrLines[i]; - StackFrame stackFrame = response.getStackFrames().getOrDefault(frameId, EMPTY_STACKFRAME); + StackFrame stackFrame = response.getStackFrames().getOrDefault(frameId, StackFrame.EMPTY_STACKFRAME); String executable = response.getExecutables().getOrDefault(fileId, ""); final boolean isLeafFrame = i == frameCount - 1; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java index 8fff0dab53b08..0acdc7c37ce09 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java @@ -108,6 +108,12 @@ public class TransportGetStackTracesAction extends TransportAction { @@ -370,6 +408,14 @@ The same stacktraces may come from different hosts (eventually from different da stackTraceEvents.put(stackTraceID, event); } event.count += finalCount; + if (request.getAggregationField() != null) { + Terms subGroup = stacktraceBucket.getAggregations().get(CUSTOM_EVENT_SUB_AGGREGATION_NAME); + for (Terms.Bucket b : subGroup.getBuckets()) { + String subGroupName = b.getKeyAsString(); + long subGroupCount = event.subGroups.getOrDefault(subGroupName, 0L); + event.subGroups.put(subGroupName, subGroupCount + b.getDocCount()); + } + } } } responseBuilder.setTotalSamples(totalFinalCount); diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetTopNFunctionsAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetTopNFunctionsAction.java new file mode 100644 index 0000000000000..cb5f2da6c3731 --- /dev/null +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetTopNFunctionsAction.java @@ -0,0 +1,162 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.profiling; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.TransportAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.ParentTaskAssigningClient; +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +public class TransportGetTopNFunctionsAction extends TransportAction { + private static final Logger log = LogManager.getLogger(TransportGetTopNFunctionsAction.class); + private final NodeClient nodeClient; + private final TransportService transportService; + + @Inject + public TransportGetTopNFunctionsAction(NodeClient nodeClient, TransportService transportService, ActionFilters actionFilters) { + super(GetTopNFunctionsAction.NAME, actionFilters, transportService.getTaskManager()); + this.nodeClient = nodeClient; + this.transportService = transportService; + } + + @Override + protected void doExecute(Task task, GetStackTracesRequest request, ActionListener listener) { + Client client = new ParentTaskAssigningClient(this.nodeClient, transportService.getLocalNode(), task); + StopWatch watch = new StopWatch("getTopNFunctionsAction"); + client.execute(GetStackTracesAction.INSTANCE, request, ActionListener.wrap(searchResponse -> { + StopWatch processingWatch = new StopWatch("Processing response"); + GetTopNFunctionsResponse topNFunctionsResponse = buildTopNFunctions(searchResponse, request.getLimit()); + log.debug(() -> watch.report() + " " + processingWatch.report()); + listener.onResponse(topNFunctionsResponse); + }, listener::onFailure)); + } + + static GetTopNFunctionsResponse buildTopNFunctions(GetStackTracesResponse response, Integer limit) { + TopNFunctionsBuilder builder = new TopNFunctionsBuilder(limit); + if (response.getTotalFrames() == 0) { + return builder.build(); + } + + for (StackTrace stackTrace : response.getStackTraces().values()) { + Set frameGroupsPerStackTrace = new HashSet<>(); + long samples = stackTrace.count; + double annualCO2Tons = stackTrace.annualCO2Tons; + double annualCostsUSD = stackTrace.annualCostsUSD; + + int frameCount = stackTrace.frameIds.length; + for (int i = 0; i < frameCount; i++) { + String frameId = stackTrace.frameIds[i]; + String fileId = stackTrace.fileIds[i]; + int frameType = stackTrace.typeIds[i]; + int addressOrLine = stackTrace.addressOrLines[i]; + StackFrame stackFrame = response.getStackFrames().getOrDefault(frameId, StackFrame.EMPTY_STACKFRAME); + String executable = response.getExecutables().getOrDefault(fileId, ""); + + final boolean isLeafFrame = i == frameCount - 1; + stackFrame.forEach(frame -> { + // The samples associated with a frame provide the total number of + // traces in which that frame has appeared at least once. However, a + // frame may appear multiple times in a trace, and thus to avoid + // counting it multiple times we need to record the frames seen so + // far in each trace. Instead of using the entire frame information + // to determine if a frame has already been seen within a given + // stacktrace, we use the frame group ID for a frame. + String frameGroupId = FrameGroupID.create(fileId, addressOrLine, executable, frame.fileName(), frame.functionName()); + if (builder.isExists(frameGroupId) == false) { + builder.addTopNFunction( + new TopNFunction( + frameGroupId, + frameType, + frame.inline(), + addressOrLine, + frame.functionName(), + frame.fileName(), + frame.lineNumber(), + executable + ) + ); + } + TopNFunction current = builder.getTopNFunction(frameGroupId); + if (stackTrace.subGroups != null) { + current.addSubGroups(stackTrace.subGroups); + } + if (frameGroupsPerStackTrace.contains(frameGroupId) == false) { + frameGroupsPerStackTrace.add(frameGroupId); + current.addTotalCount(samples); + current.addTotalAnnualCO2Tons(annualCO2Tons); + current.addTotalAnnualCostsUSD(annualCostsUSD); + + } + if (isLeafFrame && frame.last()) { + // Leaf frame: sum up counts for self CPU. + current.addSelfCount(samples); + current.addSelfAnnualCO2Tons(annualCO2Tons); + current.addSelfAnnualCostsUSD(annualCostsUSD); + + } + }); + } + } + + return builder.build(); + } + + private static class TopNFunctionsBuilder { + private final Integer limit; + private final HashMap topNFunctions; + + TopNFunctionsBuilder(Integer limit) { + this.limit = limit; + this.topNFunctions = new HashMap<>(); + } + + public GetTopNFunctionsResponse build() { + List functions = new ArrayList<>(topNFunctions.values()); + functions.sort(Collections.reverseOrder()); + long sumSelfCount = 0; + long sumTotalCount = 0; + for (int i = 0; i < functions.size(); i++) { + TopNFunction topNFunction = functions.get(i); + topNFunction.setRank(i + 1); + sumSelfCount += topNFunction.getSelfCount(); + sumTotalCount += topNFunction.getTotalCount(); + } + // limit at the end so global stats are independent of the limit + if (limit != null && limit > 0) { + functions = functions.subList(0, limit); + } + return new GetTopNFunctionsResponse(sumSelfCount, sumTotalCount, functions); + } + + public boolean isExists(String frameGroupID) { + return this.topNFunctions.containsKey(frameGroupID); + } + + public TopNFunction getTopNFunction(String frameGroupID) { + return this.topNFunctions.get(frameGroupID); + } + + public void addTopNFunction(TopNFunction topNFunction) { + this.topNFunctions.put(topNFunction.getId(), topNFunction); + } + } +} diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesRequestTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesRequestTests.java index a6fd6f39d88a2..cfaa90b8adf85 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesRequestTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesRequestTests.java @@ -254,6 +254,7 @@ public void testValidateWrongSampleSize() { null, null, null, + null, null ); List validationErrors = request.validate().validationErrors(); @@ -274,6 +275,7 @@ public void testValidateSampleSizeIsValidWithCustomIndices() { null, null, null, + null, null ); assertNull("Expecting no validation errors", request.validate()); @@ -292,6 +294,7 @@ public void testValidateStacktraceWithoutIndices() { null, null, null, + null, null ); List validationErrors = request.validate().validationErrors(); @@ -312,6 +315,7 @@ public void testValidateIndicesWithoutStacktraces() { null, null, null, + null, null ); List validationErrors = request.validate().validationErrors(); @@ -333,6 +337,7 @@ public void testConsidersCustomIndicesInRelatedIndices() { null, null, null, + null, null ); String[] indices = request.indices(); @@ -341,7 +346,21 @@ public void testConsidersCustomIndicesInRelatedIndices() { } public void testConsidersDefaultIndicesInRelatedIndices() { - GetStackTracesRequest request = new GetStackTracesRequest(1, 1.0d, 1.0d, 1.0d, null, null, null, null, null, null, null, null); + GetStackTracesRequest request = new GetStackTracesRequest( + 1, + 1.0d, + 1.0d, + 1.0d, + null, + null, + null, + null, + null, + null, + null, + null, + null + ); String[] indices = request.indices(); assertEquals(15, indices.length); } diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ResamplerTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ResamplerTests.java index 75b59985b35b3..0b37dcd154ca5 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ResamplerTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ResamplerTests.java @@ -42,6 +42,7 @@ public void testNoResamplingNoSampleRateAdjustment() { null, null, null, + null, null ); request.setAdjustSampleCount(false); @@ -70,6 +71,7 @@ public void testNoResamplingButAdjustSampleRate() { null, null, null, + null, null ); request.setAdjustSampleCount(true); @@ -98,6 +100,7 @@ public void testResamplingNoSampleRateAdjustment() { null, null, null, + null, null ); request.setAdjustSampleCount(false); @@ -129,6 +132,7 @@ public void testResamplingNoSampleRateAdjustmentWithQuery() { null, null, null, + null, null ); @@ -157,6 +161,7 @@ public void testResamplingAndSampleRateAdjustment() { null, null, null, + null, null ); request.setAdjustSampleCount(true); diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/StackFrameTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/StackFrameTests.java index b9d5c549c2fbc..3e1bc4eba202d 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/StackFrameTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/StackFrameTests.java @@ -62,6 +62,5 @@ public void testEquality() { frame, (o -> new StackFrame(o.fileName, o.functionName, o.functionOffset, o.lineNumber)) ); - } } diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TopNFunctionTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TopNFunctionTests.java new file mode 100644 index 0000000000000..afbbe24979466 --- /dev/null +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TopNFunctionTests.java @@ -0,0 +1,117 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.profiling; + +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.EqualsHashCodeTestUtils; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; + +import java.io.IOException; +import java.util.Map; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; + +public class TopNFunctionTests extends ESTestCase { + public void testToXContent() throws IOException { + String fileID = "6tVKI4mSYDEJ-ABAIpYXcg"; + int frameType = 1; + boolean inline = false; + int addressOrLine = 23; + String functionName = "PyDict_GetItemWithError"; + String sourceFilename = "/build/python3.9-RNBry6/python3.9-3.9.2/Objects/dictobject.c"; + int sourceLine = 1456; + String exeFilename = "python3.9"; + + String frameGroupID = FrameGroupID.create(fileID, addressOrLine, exeFilename, sourceFilename, functionName); + + XContentType contentType = randomFrom(XContentType.values()); + XContentBuilder expectedRequest = XContentFactory.contentBuilder(contentType) + .startObject() + .field("id", frameGroupID) + .field("rank", 1) + .startObject("frame") + .field("frame_type", frameType) + .field("inline", inline) + .field("address_or_line", addressOrLine) + .field("function_name", functionName) + .field("file_name", sourceFilename) + .field("line_number", sourceLine) + .field("executable_file_name", exeFilename) + .endObject() + .field("sub_groups", Map.of("basket", 7L)) + .field("self_count", 1) + .field("total_count", 10) + .field("self_annual_co2_tons") + .rawValue("2.2000") + .field("total_annual_co2_tons") + .rawValue("22.0000") + .field("self_annual_costs_usd", "12.0000") + .field("total_annual_costs_usd", "120.0000") + .endObject(); + + XContentBuilder actualRequest = XContentFactory.contentBuilder(contentType); + TopNFunction topNFunction = new TopNFunction( + frameGroupID, + 1, + frameType, + inline, + addressOrLine, + functionName, + sourceFilename, + sourceLine, + exeFilename, + 1, + 10, + 2.2d, + 22.0d, + 12.0d, + 120.0d, + Map.of("basket", 7L) + ); + topNFunction.toXContent(actualRequest, ToXContent.EMPTY_PARAMS); + + assertToXContentEquivalent(BytesReference.bytes(expectedRequest), BytesReference.bytes(actualRequest), contentType); + } + + public void testEquality() { + String fileID = "6tVKI4mSYDEJ-ABAIpYXcg"; + int frameType = 1; + boolean inline = false; + int addressOrLine = 23; + String functionName = "PyDict_GetItemWithError"; + String sourceFilename = "/build/python3.9-RNBry6/python3.9-3.9.2/Objects/dictobject.c"; + int sourceLine = 1456; + String exeFilename = "python3.9"; + + String frameGroupID = FrameGroupID.create(fileID, addressOrLine, exeFilename, sourceFilename, functionName); + + TopNFunction topNFunction = new TopNFunction( + frameGroupID, + 1, + frameType, + inline, + addressOrLine, + functionName, + sourceFilename, + sourceLine, + exeFilename, + 1, + 10, + 2.0d, + 4.0d, + 23.2d, + 12.0d, + Map.of("checkout", 4L, "basket", 12L) + ); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(topNFunction, (TopNFunction::clone)); + } +} diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphActionTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphActionTests.java index fd20ed04978f2..e10892f0e73ce 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphActionTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphActionTests.java @@ -116,7 +116,6 @@ public void testCreateFlamegraph() { assertEquals(1L, response.getSelfCPU()); assertEquals(10L, response.getTotalCPU()); assertEquals(1L, response.getTotalSamples()); - } public void testCreateEmptyFlamegraphWithRootNode() { diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetTopNFunctionsActionTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetTopNFunctionsActionTests.java new file mode 100644 index 0000000000000..f248d8e27bd43 --- /dev/null +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetTopNFunctionsActionTests.java @@ -0,0 +1,183 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.profiling; + +import org.elasticsearch.test.ESTestCase; + +import java.util.Collections; +import java.util.List; +import java.util.Map; + +public class TransportGetTopNFunctionsActionTests extends ESTestCase { + public void testCreateAllTopNFunctions() { + GetStackTracesResponse stacktraces = new GetStackTracesResponse( + Map.of( + "2buqP1GpF-TXYmL4USW8gA", + new StackTrace( + new int[] { 12784352, 19334053, 19336161, 18795859, 18622708, 18619213, 12989721, 13658842, 16339645 }, + new String[] { + "fr28zxcZ2UDasxYuu6dV-w", + "fr28zxcZ2UDasxYuu6dV-w", + "fr28zxcZ2UDasxYuu6dV-w", + "fr28zxcZ2UDasxYuu6dV-w", + "fr28zxcZ2UDasxYuu6dV-w", + "fr28zxcZ2UDasxYuu6dV-w", + "fr28zxcZ2UDasxYuu6dV-w", + "fr28zxcZ2UDasxYuu6dV-w", + "fr28zxcZ2UDasxYuu6dV-w" }, + new String[] { + "fr28zxcZ2UDasxYuu6dV-wAAAAAAwxLg", + "fr28zxcZ2UDasxYuu6dV-wAAAAABJwOl", + "fr28zxcZ2UDasxYuu6dV-wAAAAABJwvh", + "fr28zxcZ2UDasxYuu6dV-wAAAAABHs1T", + "fr28zxcZ2UDasxYuu6dV-wAAAAABHCj0", + "fr28zxcZ2UDasxYuu6dV-wAAAAABHBtN", + "fr28zxcZ2UDasxYuu6dV-wAAAAAAxjUZ", + "fr28zxcZ2UDasxYuu6dV-wAAAAAA0Gra", + "fr28zxcZ2UDasxYuu6dV-wAAAAAA-VK9" }, + new int[] { 3, 3, 3, 3, 3, 3, 3, 3, 3 }, + 0.3d, + 2.7d, + 1 + ) + ), + Map.of(), + Map.of("fr28zxcZ2UDasxYuu6dV-w", "containerd"), + Map.of("2buqP1GpF-TXYmL4USW8gA", new TraceEvent("2buqP1GpF-TXYmL4USW8gA", 1L)), + 9, + 1.0d, + 1 + ); + + GetTopNFunctionsResponse response = TransportGetTopNFunctionsAction.buildTopNFunctions(stacktraces, null); + assertNotNull(response); + assertEquals(1, response.getSelfCount()); + assertEquals(9, response.getTotalCount()); + + List topNFunctions = response.getTopN(); + assertNotNull(topNFunctions); + assertEquals(9, topNFunctions.size()); + + assertEquals( + List.of( + topN("178196121", 1, 16339645, 1L, 1L, 0.3d, 0.3d, 2.7d, 2.7d), + topN("181192637", 2, 19336161, 0L, 1L, 0.0d, 0.3d, 0.0d, 2.7d), + topN("181190529", 3, 19334053, 0L, 1L, 0.0d, 0.3d, 0.0d, 2.7d), + topN("180652335", 4, 18795859, 0L, 1L, 0.0d, 0.3d, 0.0d, 2.7d), + topN("180479184", 5, 18622708, 0L, 1L, 0.0d, 0.3d, 0.0d, 2.7d), + topN("180475689", 6, 18619213, 0L, 1L, 0.0d, 0.3d, 0.0d, 2.7d), + topN("175515318", 7, 13658842, 0L, 1L, 0.0d, 0.3d, 0.0d, 2.7d), + topN("174846197", 8, 12989721, 0L, 1L, 0.0d, 0.3d, 0.0d, 2.7d), + topN("174640828", 9, 12784352, 0L, 1L, 0.0d, 0.3d, 0.0d, 2.7d) + ), + topNFunctions + ); + } + + public void testCreateTopNFunctionsWithLimit() { + GetStackTracesResponse stacktraces = new GetStackTracesResponse( + Map.of( + "2buqP1GpF-TXYmL4USW8gA", + new StackTrace( + new int[] { 12784352, 19334053, 19336161, 18795859, 18622708, 18619213, 12989721, 13658842, 16339645 }, + new String[] { + "fr28zxcZ2UDasxYuu6dV-w", + "fr28zxcZ2UDasxYuu6dV-w", + "fr28zxcZ2UDasxYuu6dV-w", + "fr28zxcZ2UDasxYuu6dV-w", + "fr28zxcZ2UDasxYuu6dV-w", + "fr28zxcZ2UDasxYuu6dV-w", + "fr28zxcZ2UDasxYuu6dV-w", + "fr28zxcZ2UDasxYuu6dV-w", + "fr28zxcZ2UDasxYuu6dV-w" }, + new String[] { + "fr28zxcZ2UDasxYuu6dV-wAAAAAAwxLg", + "fr28zxcZ2UDasxYuu6dV-wAAAAABJwOl", + "fr28zxcZ2UDasxYuu6dV-wAAAAABJwvh", + "fr28zxcZ2UDasxYuu6dV-wAAAAABHs1T", + "fr28zxcZ2UDasxYuu6dV-wAAAAABHCj0", + "fr28zxcZ2UDasxYuu6dV-wAAAAABHBtN", + "fr28zxcZ2UDasxYuu6dV-wAAAAAAxjUZ", + "fr28zxcZ2UDasxYuu6dV-wAAAAAA0Gra", + "fr28zxcZ2UDasxYuu6dV-wAAAAAA-VK9" }, + new int[] { 3, 3, 3, 3, 3, 3, 3, 3, 3 }, + 0.3d, + 2.7d, + 1 + ) + ), + Map.of(), + Map.of("fr28zxcZ2UDasxYuu6dV-w", "containerd"), + Map.of("2buqP1GpF-TXYmL4USW8gA", new TraceEvent("2buqP1GpF-TXYmL4USW8gA", 1L)), + 9, + 1.0d, + 1 + ); + + GetTopNFunctionsResponse response = TransportGetTopNFunctionsAction.buildTopNFunctions(stacktraces, 3); + assertNotNull(response); + assertEquals(1, response.getSelfCount()); + assertEquals(9, response.getTotalCount()); + + List topNFunctions = response.getTopN(); + assertNotNull(topNFunctions); + assertEquals(3, topNFunctions.size()); + + assertEquals( + List.of( + topN("178196121", 1, 16339645, 1L, 1L, 0.3d, 0.3d, 2.7d, 2.7d), + topN("181192637", 2, 19336161, 0L, 1L, 0.0d, 0.3d, 0.0d, 2.7d), + topN("181190529", 3, 19334053, 0L, 1L, 0.0d, 0.3d, 0.0d, 2.7d) + ), + topNFunctions + ); + } + + private TopNFunction topN( + String id, + int rank, + int addressOrLine, + long exclusiveCount, + long inclusiveCount, + double annualCO2TonsExclusive, + double annualCO2TonsInclusive, + double annualCostsUSDExclusive, + double annualCostsUSDInclusive + ) { + return new TopNFunction( + id, + rank, + 3, + false, + addressOrLine, + "", + "", + 0, + "containerd", + exclusiveCount, + inclusiveCount, + annualCO2TonsExclusive, + annualCO2TonsInclusive, + annualCostsUSDExclusive, + annualCostsUSDInclusive, + Collections.emptyMap() + ); + } + + public void testCreateEmptyTopNFunctions() { + GetStackTracesResponse stacktraces = new GetStackTracesResponse(Map.of(), Map.of(), Map.of(), Map.of(), 0, 1.0d, 0); + GetTopNFunctionsResponse response = TransportGetTopNFunctionsAction.buildTopNFunctions(stacktraces, null); + assertNotNull(response); + assertEquals(0, response.getSelfCount()); + assertEquals(0, response.getTotalCount()); + + List topNFunctions = response.getTopN(); + assertNotNull(topNFunctions); + assertEquals(0, topNFunctions.size()); + } +} diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 1009499d91b41..0f292d64bc4a6 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -549,6 +549,7 @@ public class Constants { "indices:data/read/open_point_in_time", "indices:data/read/profiling/stack_traces", "indices:data/read/profiling/flamegraph", + "indices:data/read/profiling/topn/functions", "indices:data/read/rank_eval", "indices:data/read/scroll", "indices:data/read/scroll/clear", diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/profiling/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/profiling/10_basic.yml index cc282d26ae418..ffd5358a12d0a 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/profiling/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/profiling/10_basic.yml @@ -217,3 +217,67 @@ teardown: } } - match: { Size: 47} + +--- +"Test topN functions from profiling-events": + - skip: + version: "- 8.13.99" + reason: "the topN functions API was added in 8.14.0" + + - do: + profiling.topn_functions: + body: > + { + "sample_size": 20000, + "requested_duration": 86400, + "limit": 10, + "query": { + "bool": { + "filter": [ + { + "range": { + "@timestamp": { + "gte": "2023-11-20", + "lt": "2023-11-21", + "format": "yyyy-MM-dd" + } + } + } + ] + } + } + } + - length: { topn: 10} + +--- +"Test topN functions from test-events": + - skip: + version: "- 8.13.99" + reason: "the topN functions API was added in 8.14.0" + + - do: + profiling.topn_functions: + body: > + { + "sample_size": 20000, + "indices": ["test-event*"], + "stacktrace_ids_field": "events", + "requested_duration": 86400, + "limit": 10, + "query": { + "bool": { + "filter": [ + { + "range": { + "@timestamp": { + "gte": "2023-11-20", + "lt": "2023-11-21", + "format": "yyyy-MM-dd" + } + } + } + ] + } + } + } + - length: { topn: 10} From 08ad143dcd04d72b6fbe2977cabfe5be562b0cd9 Mon Sep 17 00:00:00 2001 From: Jake Landis Date: Thu, 4 Apr 2024 09:46:33 -0500 Subject: [PATCH 122/264] Support swapping the user context to the secondary auth user for named actions (#106613) This commit adds the ability to define specific actions that are required to be executed as the secondary authenticated user. When actions are defined as secondary auth actions, then the secondary authentication headers (and subsequent authentication) are required to call those actions and the authorization is based on the secondary user. The SPI hook to define which actions are required is intended for internal only use only. --- .../ml/MlUpgradeModeActionFilterTests.java | 4 +- .../qa/secondary-auth-actions/build.gradle | 29 +++++ .../auth/actions/SecondaryAuthActionsIT.java | 118 ++++++++++++++++++ .../src/main/java/module-info.java | 10 ++ .../actions/SecondaryAuthActionsPlugin.java | 19 +++ ...ecurity.authc.support.SecondaryAuthActions | 1 + .../security/src/main/java/module-info.java | 1 + .../xpack/security/Security.java | 6 +- .../action/filter/SecurityActionFilter.java | 19 ++- .../authc/support/SecondaryAuthActions.java | 22 ++++ .../filter/SecurityActionFilterTests.java | 80 +++++++++++- 11 files changed, 305 insertions(+), 4 deletions(-) create mode 100644 x-pack/plugin/security/qa/secondary-auth-actions/build.gradle create mode 100644 x-pack/plugin/security/qa/secondary-auth-actions/src/javaRestTest/java/org/elasticsearch/secondary/auth/actions/SecondaryAuthActionsIT.java create mode 100644 x-pack/plugin/security/qa/secondary-auth-actions/src/main/java/module-info.java create mode 100644 x-pack/plugin/security/qa/secondary-auth-actions/src/main/java/org/elasticsearch/secondary/auth/actions/SecondaryAuthActionsPlugin.java create mode 100644 x-pack/plugin/security/qa/secondary-auth-actions/src/main/resources/META-INF/services/org.elasticsearch.xpack.security.authc.support.SecondaryAuthActions create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/SecondaryAuthActions.java diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlUpgradeModeActionFilterTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlUpgradeModeActionFilterTests.java index 7ecf98cd7a6dd..3092808dc91f8 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlUpgradeModeActionFilterTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlUpgradeModeActionFilterTests.java @@ -31,6 +31,8 @@ import org.junit.After; import org.junit.Before; +import java.util.Set; + import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -114,7 +116,7 @@ public void testApply_ActionDisallowedInUpgradeModeWithResetModeExemption() { public void testOrder_UpgradeFilterIsExecutedAfterSecurityFilter() { MlUpgradeModeActionFilter upgradeModeFilter = new MlUpgradeModeActionFilter(clusterService); - SecurityActionFilter securityFilter = new SecurityActionFilter(null, null, null, null, mock(ThreadPool.class), null, null); + SecurityActionFilter securityFilter = new SecurityActionFilter(null, null, null, null, mock(ThreadPool.class), null, null, Set::of); ActionFilter[] actionFiltersInOrderOfExecution = new ActionFilters(Sets.newHashSet(upgradeModeFilter, securityFilter)).filters(); assertThat(actionFiltersInOrderOfExecution, is(arrayContaining(securityFilter, upgradeModeFilter))); diff --git a/x-pack/plugin/security/qa/secondary-auth-actions/build.gradle b/x-pack/plugin/security/qa/secondary-auth-actions/build.gradle new file mode 100644 index 0000000000000..f805dc74c4ca0 --- /dev/null +++ b/x-pack/plugin/security/qa/secondary-auth-actions/build.gradle @@ -0,0 +1,29 @@ +import org.elasticsearch.gradle.util.GradleUtils + +apply plugin: 'elasticsearch.internal-java-rest-test' +apply plugin: 'elasticsearch.base-internal-es-plugin' + +esplugin { + name 'secondary-auth-actions-extension' + description 'Spi extension plugin for security to enforce custom secondary auth actions' + classname 'org.elasticsearch.secondary.auth.actions.SecondaryAuthActionsPlugin' + extendedPlugins = ['x-pack-security'] +} + +dependencies { + compileOnly project(':x-pack:plugin:core') + compileOnly project(':x-pack:plugin:security') + javaRestTestImplementation project(':test:framework') +} + +GradleUtils.extendSourceSet(project, 'main', 'javaRestTest') + +dependencies { + clusterPlugins project(':x-pack:plugin:security:qa:secondary-auth-actions') +} + +tasks.named("javadoc").configure { enabled = false } + +tasks.named('javaRestTest') { + usesDefaultDistribution() +} diff --git a/x-pack/plugin/security/qa/secondary-auth-actions/src/javaRestTest/java/org/elasticsearch/secondary/auth/actions/SecondaryAuthActionsIT.java b/x-pack/plugin/security/qa/secondary-auth-actions/src/javaRestTest/java/org/elasticsearch/secondary/auth/actions/SecondaryAuthActionsIT.java new file mode 100644 index 0000000000000..768353dcb14f4 --- /dev/null +++ b/x-pack/plugin/security/qa/secondary-auth-actions/src/javaRestTest/java/org/elasticsearch/secondary/auth/actions/SecondaryAuthActionsIT.java @@ -0,0 +1,118 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.secondary.auth.actions; + +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.junit.Before; +import org.junit.ClassRule; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; + +public class SecondaryAuthActionsIT extends ESRestTestCase { + + private static final String ADMIN_TOKEN = basicAuthHeaderValue("test_admin", new SecureString("x-pack-test-password".toCharArray())); + private static final String USER_TOKEN = basicAuthHeaderValue("test_user", new SecureString("x-pack-test-password".toCharArray())); + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .nodes(2) + // ensure secondary auth actions go across the cluster, so we don't attempt to double swap out the user in context + .node(0, n -> n.setting("node.roles", "[master]")) + .node(1, n -> n.setting("node.roles", "[data]")) + .setting("xpack.watcher.enabled", "false") + .setting("xpack.ml.enabled", "false") + .setting("xpack.security.enabled", "true") + .setting("xpack.security.transport.ssl.enabled", "false") + .setting("xpack.security.http.ssl.enabled", "false") + .user("test_admin", "x-pack-test-password", "superuser", false) + .user("test_user", "x-pack-test-password", "logsrole", false) + .plugin("secondary-auth-actions-extension") + + .build(); + + @Before + public void setup() throws IOException { + final Request roleRequest = new Request("PUT", "/_security/role/logsrole"); + roleRequest.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", ADMIN_TOKEN)); + roleRequest.setJsonEntity("{\"cluster\":[],\"indices\":[{\"names\":[\"logs*\"],\"privileges\":[\"view_index_metadata\"]}]}"); + client().performRequest(roleRequest); + + final Request logsRequest = new Request("PUT", "/logs/_doc/1"); + logsRequest.setEntity(new StringEntity("{}", ContentType.APPLICATION_JSON)); + logsRequest.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", ADMIN_TOKEN)); + client().performRequest(logsRequest); + + final Request metricsRequest = new Request("PUT", "/metrics/_doc/1"); + metricsRequest.setEntity(new StringEntity("{}", ContentType.APPLICATION_JSON)); + metricsRequest.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", ADMIN_TOKEN)); + client().performRequest(metricsRequest); + } + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + @Override + protected Settings restAdminSettings() { + return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", ADMIN_TOKEN).build(); + } + + public void testSecondaryAuthUser() throws IOException { + final Request authenticateRequest = new Request("GET", "_security/_authenticate"); + authenticateRequest.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", ADMIN_TOKEN)); + // This should fail because the secondary auth header is not set + ResponseException responseException = expectThrows(ResponseException.class, () -> client().performRequest(authenticateRequest)); + assertThat(responseException.getResponse().getStatusLine().getStatusCode(), equalTo(RestStatus.BAD_REQUEST.getStatus())); + assertThat(responseException.getMessage(), containsString("es-secondary-authorization header must be used to call action")); + // set the secondary auth header + authenticateRequest.setOptions( + RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", ADMIN_TOKEN).addHeader("es-secondary-authorization", USER_TOKEN) + ); + final Response authenticateResponse = client().performRequest(authenticateRequest); + final Map authenticateResponseBody = entityAsMap(authenticateResponse); + // ensure the result represents the secondary user + assertEquals("test_user", authenticateResponseBody.get("username")); + assertEquals(List.of("logsrole"), authenticateResponseBody.get("roles")); + + // check index level permissions + final Request getIndicesRequest = new Request("GET", "*"); + getIndicesRequest.setOptions( + RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", ADMIN_TOKEN).addHeader("es-secondary-authorization", USER_TOKEN) + ); + final Response getIndicesResponse = client().performRequest(getIndicesRequest); + final Map getIndicesResponseBody = entityAsMap(getIndicesResponse); + assertNotNull(getIndicesResponseBody.get("logs")); + assertNull(getIndicesResponseBody.get("metrics")); + + // invalid secondary auth header + getIndicesRequest.setOptions( + RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", ADMIN_TOKEN).addHeader("es-secondary-authorization", "junk") + ); + responseException = expectThrows(ResponseException.class, () -> client().performRequest(getIndicesRequest)); + assertThat(responseException.getResponse().getStatusLine().getStatusCode(), equalTo(RestStatus.UNAUTHORIZED.getStatus())); + assertThat(responseException.getMessage(), containsString("Failed to authenticate secondary user")); + } +} diff --git a/x-pack/plugin/security/qa/secondary-auth-actions/src/main/java/module-info.java b/x-pack/plugin/security/qa/secondary-auth-actions/src/main/java/module-info.java new file mode 100644 index 0000000000000..15ffd5ce480f0 --- /dev/null +++ b/x-pack/plugin/security/qa/secondary-auth-actions/src/main/java/module-info.java @@ -0,0 +1,10 @@ +import org.elasticsearch.secondary.auth.actions.SecondaryAuthActionsPlugin; +import org.elasticsearch.xpack.security.authc.support.SecondaryAuthActions; + +module org.elasticsearch.internal.security { + requires org.elasticsearch.base; + requires org.elasticsearch.server; + requires org.elasticsearch.security; + + provides SecondaryAuthActions with SecondaryAuthActionsPlugin; +} diff --git a/x-pack/plugin/security/qa/secondary-auth-actions/src/main/java/org/elasticsearch/secondary/auth/actions/SecondaryAuthActionsPlugin.java b/x-pack/plugin/security/qa/secondary-auth-actions/src/main/java/org/elasticsearch/secondary/auth/actions/SecondaryAuthActionsPlugin.java new file mode 100644 index 0000000000000..5bb5f7b90f407 --- /dev/null +++ b/x-pack/plugin/security/qa/secondary-auth-actions/src/main/java/org/elasticsearch/secondary/auth/actions/SecondaryAuthActionsPlugin.java @@ -0,0 +1,19 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.secondary.auth.actions; + +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.xpack.security.authc.support.SecondaryAuthActions; + +import java.util.Set; + +public class SecondaryAuthActionsPlugin extends Plugin implements SecondaryAuthActions { + public Set get() { + return Set.of("cluster:admin/xpack/security/user/authenticate", "indices:admin/get"); + } +} diff --git a/x-pack/plugin/security/qa/secondary-auth-actions/src/main/resources/META-INF/services/org.elasticsearch.xpack.security.authc.support.SecondaryAuthActions b/x-pack/plugin/security/qa/secondary-auth-actions/src/main/resources/META-INF/services/org.elasticsearch.xpack.security.authc.support.SecondaryAuthActions new file mode 100644 index 0000000000000..7cc1a88205a76 --- /dev/null +++ b/x-pack/plugin/security/qa/secondary-auth-actions/src/main/resources/META-INF/services/org.elasticsearch.xpack.security.authc.support.SecondaryAuthActions @@ -0,0 +1 @@ +org.elasticsearch.secondary.auth.actions.SecondaryAuthActionsPlugin diff --git a/x-pack/plugin/security/src/main/java/module-info.java b/x-pack/plugin/security/src/main/java/module-info.java index cd1eb8a650149..557d601579af8 100644 --- a/x-pack/plugin/security/src/main/java/module-info.java +++ b/x-pack/plugin/security/src/main/java/module-info.java @@ -68,6 +68,7 @@ exports org.elasticsearch.xpack.security.authz to org.elasticsearch.internal.security; exports org.elasticsearch.xpack.security.authc to org.elasticsearch.xcontent; exports org.elasticsearch.xpack.security.slowlog to org.elasticsearch.server; + exports org.elasticsearch.xpack.security.authc.support to org.elasticsearch.internal.security; provides org.elasticsearch.index.SlowLogFieldProvider with org.elasticsearch.xpack.security.slowlog.SecuritySlowLogFieldProvider; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 4fe4b35683343..f4457dcbbfaa9 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -290,6 +290,7 @@ import org.elasticsearch.xpack.security.authc.service.FileServiceAccountTokenStore; import org.elasticsearch.xpack.security.authc.service.IndexServiceAccountTokenStore; import org.elasticsearch.xpack.security.authc.service.ServiceAccountService; +import org.elasticsearch.xpack.security.authc.support.SecondaryAuthActions; import org.elasticsearch.xpack.security.authc.support.SecondaryAuthenticator; import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; import org.elasticsearch.xpack.security.authz.AuthorizationDenialMessages; @@ -587,6 +588,7 @@ public class Security extends Plugin private final SetOnce> reloadableComponents = new SetOnce<>(); private final SetOnce authorizationDenialMessages = new SetOnce<>(); private final SetOnce reservedRoleNameCheckerFactory = new SetOnce<>(); + private final SetOnce secondaryAuthActions = new SetOnce<>(); public Security(Settings settings) { this(settings, Collections.emptyList()); @@ -1080,7 +1082,8 @@ Collection createComponents( getLicenseState(), threadPool, securityContext.get(), - destructiveOperations + destructiveOperations, + secondaryAuthActions.get() == null ? Set::of : secondaryAuthActions.get() ) ); @@ -2115,6 +2118,7 @@ public void loadExtensions(ExtensionLoader loader) { loadSingletonExtensionAndSetOnce(loader, hasPrivilegesRequestBuilderFactory, HasPrivilegesRequestBuilderFactory.class); loadSingletonExtensionAndSetOnce(loader, authorizationDenialMessages, AuthorizationDenialMessages.class); loadSingletonExtensionAndSetOnce(loader, reservedRoleNameCheckerFactory, ReservedRoleNameChecker.Factory.class); + loadSingletonExtensionAndSetOnce(loader, secondaryAuthActions, SecondaryAuthActions.class); } private void loadSingletonExtensionAndSetOnce(ExtensionLoader loader, SetOnce setOnce, Class clazz) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilter.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilter.java index 08544d316e87a..d499e55b21b70 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilter.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilter.java @@ -28,12 +28,14 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.security.SecurityContext; +import org.elasticsearch.xpack.core.security.authc.support.SecondaryAuthentication; import org.elasticsearch.xpack.core.security.authz.privilege.HealthAndStatsPrivilege; import org.elasticsearch.xpack.core.security.user.InternalUsers; import org.elasticsearch.xpack.security.action.SecurityActionMapper; import org.elasticsearch.xpack.security.audit.AuditTrailService; import org.elasticsearch.xpack.security.audit.AuditUtil; import org.elasticsearch.xpack.security.authc.AuthenticationService; +import org.elasticsearch.xpack.security.authc.support.SecondaryAuthActions; import org.elasticsearch.xpack.security.authz.AuthorizationService; import org.elasticsearch.xpack.security.authz.AuthorizationUtils; @@ -51,6 +53,7 @@ public class SecurityActionFilter implements ActionFilter { private final ThreadContext threadContext; private final SecurityContext securityContext; private final DestructiveOperations destructiveOperations; + private final SecondaryAuthActions secondaryAuthActions; public SecurityActionFilter( AuthenticationService authcService, @@ -59,7 +62,8 @@ public SecurityActionFilter( XPackLicenseState licenseState, ThreadPool threadPool, SecurityContext securityContext, - DestructiveOperations destructiveOperations + DestructiveOperations destructiveOperations, + SecondaryAuthActions secondaryAuthActions ) { this.authcService = authcService; this.authzService = authzService; @@ -68,6 +72,7 @@ public SecurityActionFilter( this.threadContext = threadPool.getThreadContext(); this.securityContext = securityContext; this.destructiveOperations = destructiveOperations; + this.secondaryAuthActions = secondaryAuthActions; } @Override @@ -109,6 +114,18 @@ operations are blocked on license expiration. All data operations (read and writ TransportVersion.current(), // current version since this is on the same node (original) -> { applyInternal(task, chain, action, request, contextPreservingListener); } ); + } else if (secondaryAuthActions.get().contains(action) && threadContext.getHeader("secondary_auth_action_applied") == null) { + SecondaryAuthentication secondaryAuth = securityContext.getSecondaryAuthentication(); + if (secondaryAuth == null) { + throw new IllegalArgumentException("es-secondary-authorization header must be used to call action [" + action + "]"); + } else { + secondaryAuth.execute(ignore -> { + // this header exists to ensure that if this action goes across nodes we don't attempt to swap out the user again + threadContext.putHeader("secondary_auth_action_applied", "true"); + applyInternal(task, chain, action, request, contextPreservingListener); + return null; + }); + } } else { try (ThreadContext.StoredContext ignore = threadContext.newStoredContextPreservingResponseHeaders()) { applyInternal(task, chain, action, request, contextPreservingListener); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/SecondaryAuthActions.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/SecondaryAuthActions.java new file mode 100644 index 0000000000000..0c6f6e7270627 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/SecondaryAuthActions.java @@ -0,0 +1,22 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.authc.support; + +import java.util.Set; + +/** + * Actions that are only available when a secondary authenticator is present. The user represented by the secondary authenticator will + * be used as the user for these actions. Secondary authorization requires both the primary and secondary authentication passes. + * Any actions returned here will ensure that the RBAC authorization represents the secondary user. + * If these actions are called without a secondary authenticated user, an exception will be thrown. + * {@see SecondaryAuthenticator} + */ +@FunctionalInterface +public interface SecondaryAuthActions { + Set get(); +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilterTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilterTests.java index a2ab6c1864783..0191062eb4631 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilterTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilterTests.java @@ -37,6 +37,7 @@ import org.elasticsearch.xpack.core.security.authc.Authentication.RealmRef; import org.elasticsearch.xpack.core.security.authc.AuthenticationField; import org.elasticsearch.xpack.core.security.authc.AuthenticationTestHelper; +import org.elasticsearch.xpack.core.security.authc.support.SecondaryAuthentication; import org.elasticsearch.xpack.core.security.authz.AuthorizationServiceField; import org.elasticsearch.xpack.core.security.authz.accesscontrol.IndicesAccessControl; import org.elasticsearch.xpack.core.security.user.InternalUsers; @@ -47,8 +48,10 @@ import org.elasticsearch.xpack.security.authc.AuthenticationService; import org.elasticsearch.xpack.security.authz.AuthorizationService; import org.junit.Before; +import org.mockito.ArgumentCaptor; import java.util.Collections; +import java.util.Set; import static org.elasticsearch.test.ActionListenerUtils.anyActionListener; import static org.elasticsearch.xpack.core.security.authz.AuthorizationServiceField.INDICES_PERMISSIONS_KEY; @@ -62,6 +65,7 @@ import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoInteractions; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; @@ -114,7 +118,8 @@ public void init() throws Exception { licenseState, threadPool, securityContext, - destructiveOperations + destructiveOperations, + () -> Set.of("_action_secondary_auth") ); } @@ -306,6 +311,79 @@ public void testActionProcessException() throws Exception { verifyNoMoreInteractions(chain); } + public void testSecondaryAuth() throws Exception { + ActionRequest request = mock(ActionRequest.class); + ActionListener listener = mock(ActionListener.class); + Task task = mock(Task.class); + User user1 = new User("user1", "r1", "r2"); + User user2 = new User("user2", "r3", "r4"); + Authentication authentication = AuthenticationTestHelper.builder() + .user(user1) + .realmRef(new RealmRef("test", "test", "foo")) + .build(false); + Authentication secondaryAuth = AuthenticationTestHelper.builder() + .user(user2) + .realmRef(new RealmRef("test2", "test2", "foo2")) + .build(false); + String requestId = UUIDs.randomBase64UUID(); + + // mock primary and secondary authentication headers already set + assertNull(threadContext.getTransient(AuthenticationField.AUTHENTICATION_KEY)); + threadContext.putTransient(AuthenticationField.AUTHENTICATION_KEY, authentication); + threadContext.putHeader(AuthenticationField.AUTHENTICATION_KEY, authentication.encode()); + assertNull(threadContext.getTransient(SecondaryAuthentication.THREAD_CTX_KEY)); + threadContext.putTransient(SecondaryAuthentication.THREAD_CTX_KEY, secondaryAuth); + threadContext.putHeader(SecondaryAuthentication.THREAD_CTX_KEY, secondaryAuth.encode()); + + String actionName = "_action_secondary_auth"; + // ensure that the filter swaps out to the secondary user + doAnswer(i -> { + final Object[] args = i.getArguments(); + assertThat(args, arrayWithSize(4)); + ActionListener callback = (ActionListener) args[args.length - 1]; + assertSame(threadContext.getTransient(AuthenticationField.AUTHENTICATION_KEY), secondaryAuth); + assertEquals(threadContext.getHeader(AuthenticationField.AUTHENTICATION_KEY), secondaryAuth.encode()); + threadContext.putHeader("_xpack_audit_request_id", requestId); + callback.onResponse(secondaryAuth); + return Void.TYPE; + }).when(authcService).authenticate(eq(actionName), eq(request), eq(InternalUsers.SYSTEM_USER), anyActionListener()); + + mockAuthorize(); + ActionResponse actionResponse = mock(ActionResponse.class); + mockChain(task, actionName, request, actionResponse); + filter.apply(task, actionName, request, listener, chain); + verify(authzService).authorize(eq(secondaryAuth), eq(actionName), eq(request), anyActionListener()); + verify(auditTrail).coordinatingActionResponse(eq(requestId), eq(secondaryAuth), eq(actionName), eq(request), eq(actionResponse)); + } + + public void testSecondaryAuthRequired() throws Exception { + ActionRequest request = mock(ActionRequest.class); + ActionListener listener = mock(ActionListener.class); + Task task = mock(Task.class); + User user1 = new User("user1", "r1", "r2"); + Authentication authentication = AuthenticationTestHelper.builder() + .user(user1) + .realmRef(new RealmRef("test", "test", "foo")) + .build(false); + // mock primary but not secondary authentication headers already set + assertNull(threadContext.getTransient(AuthenticationField.AUTHENTICATION_KEY)); + threadContext.putTransient(AuthenticationField.AUTHENTICATION_KEY, authentication); + threadContext.putHeader(AuthenticationField.AUTHENTICATION_KEY, authentication.encode()); + String actionName = "_action_secondary_auth"; + ActionResponse actionResponse = mock(ActionResponse.class); + mockChain(task, actionName, request, actionResponse); + filter.apply(task, actionName, request, listener, chain); + ArgumentCaptor exceptionCaptor = ArgumentCaptor.forClass(Exception.class); + verify(listener).onFailure(exceptionCaptor.capture()); + assertTrue(exceptionCaptor.getValue() instanceof IllegalArgumentException); + assertEquals( + "es-secondary-authorization header must be used to call action [" + actionName + "]", + exceptionCaptor.getValue().getMessage() + ); + verifyNoInteractions(authcService); + verifyNoInteractions(authzService); + } + private void mockAuthentication(ActionRequest request, Authentication authentication, String requestId) { doAnswer(i -> { final Object[] args = i.getArguments(); From 01efbbf137248011f23d9448c66e2585eb27f34c Mon Sep 17 00:00:00 2001 From: Niels Bauman <33722607+nielsbauman@users.noreply.github.com> Date: Thu, 4 Apr 2024 16:51:02 +0200 Subject: [PATCH 123/264] Remove AwaitsFix of fixed test (#107104) This `@AwaitsFixed` linked to https://github.com/elastic/elasticsearch/issues/102813, which was marked as a duplicate of https://github.com/elastic/elasticsearch/issues/102337, which in turn was fixed by https://github.com/elastic/elasticsearch/pull/102724. --- .../datastreams/action/GetDataStreamsResponseTests.java | 1 - 1 file changed, 1 deletion(-) diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java index e200ff7cba2e1..2118c98b377bc 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java @@ -63,7 +63,6 @@ protected Response mutateInstance(Response instance) { } @SuppressWarnings("unchecked") - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102813") public void testResponseIlmAndDataStreamLifecycleRepresentation() throws Exception { // we'll test a data stream with 3 backing indices and a failure store - two backing indices managed by ILM (having the ILM policy // configured for them) and the remainder without any ILM policy configured From 6c986cbd8a6f8b737ba02c27c92bd5eba861de57 Mon Sep 17 00:00:00 2001 From: Volodymyr Krasnikov <129072588+volodk85@users.noreply.github.com> Date: Thu, 4 Apr 2024 08:20:08 -0700 Subject: [PATCH 124/264] Add primary and coordination operation rejection ratio metrics (#106978) Introduce two new metrics `es.indexing.primary_operations.rejections.ratio` and `es.indexing.coordinating_operations.rejections.ratio` They are needed as a second signal (along with rejections rate) to determine if it is time to alert about 429s error during _bulk indexing + add IT test to ensure that indexing metrics are published --- .../metrics/NodeIndexingMetricsIT.java | 264 ++++++++++++++++++ .../monitor/metrics/NodeMetrics.java | 44 +++ 2 files changed, 308 insertions(+) create mode 100644 server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/NodeIndexingMetricsIT.java diff --git a/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/NodeIndexingMetricsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/NodeIndexingMetricsIT.java new file mode 100644 index 0000000000000..6cca0ccb3fdf3 --- /dev/null +++ b/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/NodeIndexingMetricsIT.java @@ -0,0 +1,264 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.monitor.metrics; + +import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.bulk.BulkRequestBuilder; +import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.delete.DeleteRequest; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.PluginsService; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.telemetry.Measurement; +import org.elasticsearch.telemetry.TestTelemetryPlugin; +import org.elasticsearch.test.ESIntegTestCase; + +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.function.Function; + +import static org.elasticsearch.index.IndexingPressure.MAX_INDEXING_BYTES; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; + +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, numClientNodes = 0) +public class NodeIndexingMetricsIT extends ESIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return List.of(TestTelemetryPlugin.class); + } + + public void testNodeIndexingMetricsArePublishing() throws Exception { + + final String dataNode = internalCluster().startNode(); + ensureStableCluster(1); + + final TestTelemetryPlugin plugin = internalCluster().getInstance(PluginsService.class, dataNode) + .filterPlugins(TestTelemetryPlugin.class) + .findFirst() + .orElseThrow(); + plugin.resetMeter(); + + assertAcked(prepareCreate("test").get()); + + // index some documents + final int docsCount = randomIntBetween(500, 1000); + for (int i = 0; i < docsCount; i++) { + var indexResponse = client(dataNode).index(new IndexRequest("test").id("doc_" + i).source(Map.of("key", i, "val", i))) + .actionGet(); + // check that all documents were created successfully since metric counters below assume that + assertThat(indexResponse.status(), equalTo(RestStatus.CREATED)); + } + + // delete documents + final int deletesCount = randomIntBetween(1, 50); + for (int i = 0; i < deletesCount; i++) { + client(dataNode).delete(new DeleteRequest().index("test").id("doc_" + i)).actionGet(); + } + + // simulate async apm `polling` call for metrics + plugin.collect(); + + assertBusy(() -> { + var indexingTotal = getRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.docs.total"); + assertThat(indexingTotal.getLong(), equalTo((long) docsCount)); + + var indexingCurrent = getRecordedMetric(plugin::getLongGaugeMeasurement, "es.indexing.docs.current.total"); + assertThat(indexingCurrent.getLong(), equalTo(0L)); + + var indexingFailedTotal = getRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.indexing.failed.total"); + assertThat(indexingFailedTotal.getLong(), equalTo(0L)); + + var deletionTotal = getRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.deletion.docs.total"); + assertThat(deletionTotal.getLong(), equalTo((long) deletesCount)); + + var deletionCurrent = getRecordedMetric(plugin::getLongGaugeMeasurement, "es.indexing.deletion.docs.current.total"); + assertThat(deletionCurrent.getLong(), equalTo(0L)); + + var indexingTime = getRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.time"); + assertThat(indexingTime.getLong(), greaterThan(0L)); + + var deletionTime = getRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.deletion.time"); + assertThat(deletionTime.getLong(), greaterThanOrEqualTo(0L)); + + var throttleTime = getRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indices.throttle.time"); + assertThat(throttleTime.getLong(), equalTo(0L)); + + var noopTotal = getRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indices.noop.total"); + assertThat(noopTotal.getLong(), equalTo(0L)); + + var coordinatingOperationsSize = getRecordedMetric( + plugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating_operations.size" + ); + assertThat(coordinatingOperationsSize.getLong(), greaterThan(0L)); + + var coordinatingOperationsTotal = getRecordedMetric( + plugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating_operations.total" + ); + // Note: `delete` request goes thru `TransportBulkAction` invoking coordinating/primary limit checks + assertThat(coordinatingOperationsTotal.getLong(), equalTo((long) docsCount + deletesCount)); + + var coordinatingOperationsCurrentSize = getRecordedMetric( + plugin::getLongGaugeMeasurement, + "es.indexing.coordinating_operations.current.size" + ); + assertThat(coordinatingOperationsCurrentSize.getLong(), equalTo(0L)); + + var coordinatingOperationsCurrentTotal = getRecordedMetric( + plugin::getLongGaugeMeasurement, + "es.indexing.coordinating_operations.current.total" + ); + assertThat(coordinatingOperationsCurrentTotal.getLong(), equalTo(0L)); + + var coordinatingOperationsRejectionsTotal = getRecordedMetric( + plugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating_operations.rejections.total" + ); + assertThat(coordinatingOperationsRejectionsTotal.getLong(), equalTo(0L)); + + var coordinatingOperationsRejectionsRatio = getRecordedMetric( + plugin::getDoubleGaugeMeasurement, + "es.indexing.coordinating_operations.rejections.ratio" + ); + assertThat(coordinatingOperationsRejectionsRatio.getDouble(), equalTo(0.0)); + + var primaryOperationsSize = getRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.primary_operations.size"); + assertThat(primaryOperationsSize.getLong(), greaterThan(0L)); + + var primaryOperationsTotal = getRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.primary_operations.total"); + // Note: `delete` request goes thru `TransportBulkAction` invoking coordinating/primary limit checks + assertThat(primaryOperationsTotal.getLong(), equalTo((long) docsCount + deletesCount)); + + var primaryOperationsCurrentSize = getRecordedMetric( + plugin::getLongGaugeMeasurement, + "es.indexing.primary_operations.current.size" + ); + assertThat(primaryOperationsCurrentSize.getLong(), equalTo(0L)); + + var primaryOperationsCurrentTotal = getRecordedMetric( + plugin::getLongGaugeMeasurement, + "es.indexing.primary_operations.current.total" + ); + assertThat(primaryOperationsCurrentTotal.getLong(), equalTo(0L)); + + var primaryOperationsRejectionsTotal = getRecordedMetric( + plugin::getLongAsyncCounterMeasurement, + "es.indexing.primary_operations.rejections.total" + ); + assertThat(primaryOperationsRejectionsTotal.getLong(), equalTo(0L)); + + var primaryOperationsRejectionsRatio = getRecordedMetric( + plugin::getDoubleGaugeMeasurement, + "es.indexing.primary_operations.rejections.ratio" + ); + assertThat(primaryOperationsRejectionsRatio.getDouble(), equalTo(0.0)); + + }); + + } + + public void testCoordinatingRejectionMetricsArePublishing() throws Exception { + + // lower Indexing Pressure limits to trigger coordinating rejections + final String dataNode = internalCluster().startNode(Settings.builder().put(MAX_INDEXING_BYTES.getKey(), "1KB")); + ensureStableCluster(1); + + final TestTelemetryPlugin plugin = internalCluster().getInstance(PluginsService.class, dataNode) + .filterPlugins(TestTelemetryPlugin.class) + .findFirst() + .orElseThrow(); + plugin.resetMeter(); + + assertAcked(prepareCreate("test").get()); + + final BulkRequestBuilder bulkRequestBuilder = new BulkRequestBuilder(client(dataNode)); + final int batchCount = randomIntBetween(100, 1000); + for (int i = 0; i < batchCount; i++) { + bulkRequestBuilder.add(new IndexRequest("test").source("field", randomAlphaOfLength(100))); + } + + // big batch should not pass thru coordinating limit check + expectThrows(EsRejectedExecutionException.class, bulkRequestBuilder); + + // simulate async apm `polling` call for metrics + plugin.collect(); + + // this bulk request is too big to pass coordinating limit check + assertBusy(() -> { + var coordinatingOperationsRejectionsTotal = getRecordedMetric( + plugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating_operations.rejections.total" + ); + assertThat(coordinatingOperationsRejectionsTotal.getLong(), equalTo(1L)); + }); + } + + public void testPrimaryRejectionMetricsArePublishing() throws Exception { + + // setting low Indexing Pressure limits to trigger primary rejections + final String dataNode = internalCluster().startNode(Settings.builder().put(MAX_INDEXING_BYTES.getKey(), "1KB").build()); + // setting high Indexing Pressure limits to pass coordinating checks + final String coordinatingNode = internalCluster().startCoordinatingOnlyNode( + Settings.builder().put(MAX_INDEXING_BYTES.getKey(), "10MB").build() + ); + ensureStableCluster(2); + + final TestTelemetryPlugin plugin = internalCluster().getInstance(PluginsService.class, dataNode) + .filterPlugins(TestTelemetryPlugin.class) + .findFirst() + .orElseThrow(); + plugin.resetMeter(); + + final int numberOfShards = randomIntBetween(1, 5); + assertAcked(prepareCreate("test", Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numberOfShards)).get()); + + final BulkRequest bulkRequest = new BulkRequest(); + final int batchCount = randomIntBetween(50, 100); + for (int i = 0; i < batchCount; i++) { + bulkRequest.add(new IndexRequest("test").source("field", randomAlphaOfLength(2048))); + } + + // big batch should pass thru coordinating limit check but fail on primary + // note the bulk request is sent to coordinating client + final BulkResponse bulkResponse = client(coordinatingNode).bulk(bulkRequest).actionGet(); + assertThat(bulkResponse.hasFailures(), equalTo(true)); + assertThat(Arrays.stream(bulkResponse.getItems()).allMatch(item -> item.status() == RestStatus.TOO_MANY_REQUESTS), equalTo(true)); + + // simulate async apm `polling` call for metrics + plugin.collect(); + + // this bulk request is too big to pass coordinating limit check + assertBusy(() -> { + var primaryOperationsRejectionsTotal = getRecordedMetric( + plugin::getLongAsyncCounterMeasurement, + "es.indexing.primary_operations.rejections.total" + ); + assertThat(primaryOperationsRejectionsTotal.getLong(), equalTo((long) numberOfShards)); + }); + + } + + private static Measurement getRecordedMetric(Function> metricGetter, String name) { + final List measurements = metricGetter.apply(name); + assertFalse("Indexing metric is not recorded", measurements.isEmpty()); + assertThat(measurements.size(), equalTo(1)); + return measurements.get(0); + } +} diff --git a/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java b/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java index 8874c43c919ca..527acb8d4fcbc 100644 --- a/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java +++ b/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java @@ -15,9 +15,11 @@ import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.util.SingleObjectCache; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.stats.IndexingPressureStats; import org.elasticsearch.monitor.jvm.GcNames; import org.elasticsearch.monitor.jvm.JvmStats; import org.elasticsearch.node.NodeService; +import org.elasticsearch.telemetry.metric.DoubleWithAttributes; import org.elasticsearch.telemetry.metric.LongWithAttributes; import org.elasticsearch.telemetry.metric.MeterRegistry; @@ -526,6 +528,27 @@ private void registerAsyncMetrics(MeterRegistry registry) { ) ); + metrics.add( + registry.registerDoubleGauge( + "es.indexing.coordinating_operations.rejections.ratio", + "Ratio of rejected coordinating operations", + "ratio", + () -> { + var totalCoordinatingOperations = Optional.ofNullable(stats.getOrRefresh()) + .map(NodeStats::getIndexingPressureStats) + .map(IndexingPressureStats::getTotalCoordinatingOps) + .orElse(0L); + var totalCoordinatingRejections = Optional.ofNullable(stats.getOrRefresh()) + .map(NodeStats::getIndexingPressureStats) + .map(IndexingPressureStats::getCoordinatingRejections) + .orElse(0L); + // rejections do not count towards `totalCoordinatingOperations` + var totalOps = totalCoordinatingOperations + totalCoordinatingRejections; + return new DoubleWithAttributes(totalOps != 0 ? (double) totalCoordinatingRejections / totalOps : 0.0); + } + ) + ); + metrics.add( registry.registerLongAsyncCounter( "es.indexing.primary_operations.size", @@ -596,6 +619,27 @@ private void registerAsyncMetrics(MeterRegistry registry) { ) ); + metrics.add( + registry.registerDoubleGauge( + "es.indexing.primary_operations.rejections.ratio", + "Ratio of rejected primary operations", + "ratio", + () -> { + var totalPrimaryOperations = Optional.ofNullable(stats.getOrRefresh()) + .map(NodeStats::getIndexingPressureStats) + .map(IndexingPressureStats::getTotalPrimaryOps) + .orElse(0L); + var totalPrimaryRejections = Optional.ofNullable(stats.getOrRefresh()) + .map(NodeStats::getIndexingPressureStats) + .map(IndexingPressureStats::getPrimaryRejections) + .orElse(0L); + // rejections do not count towards `totalPrimaryOperations` + var totalOps = totalPrimaryOperations + totalPrimaryRejections; + return new DoubleWithAttributes(totalOps != 0 ? (double) totalPrimaryRejections / totalOps : 0.0); + } + ) + ); + metrics.add( registry.registerLongGauge( "es.indexing.memory.limit.size", From 75f548765f3583f41aa3baeffbafdf861aa5a11e Mon Sep 17 00:00:00 2001 From: Fang Xing <155562079+fang-xing-esql@users.noreply.github.com> Date: Thu, 4 Apr 2024 11:34:35 -0400 Subject: [PATCH 125/264] fix substring type resolution (#107109) --- .../function/scalar/string/Substring.java | 10 +++++-- .../AbstractScalarFunctionTestCase.java | 2 +- .../scalar/string/SubstringTests.java | 29 +++++++++++++++++-- 3 files changed, 35 insertions(+), 6 deletions(-) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java index 88187b8ba65bc..3bd7d660352c3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java @@ -15,6 +15,7 @@ import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.TypeResolutions; import org.elasticsearch.xpack.ql.expression.function.OptionalArgument; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -28,8 +29,8 @@ import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.SECOND; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.THIRD; -import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isInteger; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isString; +import static org.elasticsearch.xpack.ql.type.DataTypes.INTEGER; public class Substring extends EsqlScalarFunction implements OptionalArgument { @@ -67,12 +68,15 @@ protected TypeResolution resolveType() { return resolution; } - resolution = isInteger(start, sourceText(), SECOND); + resolution = TypeResolutions.isType(start, dt -> dt == INTEGER, sourceText(), SECOND, "integer"); + if (resolution.unresolved()) { return resolution; } - return length == null ? TypeResolution.TYPE_RESOLVED : isInteger(length, sourceText(), THIRD); + return length == null + ? TypeResolution.TYPE_RESOLVED + : TypeResolutions.isType(length, dt -> dt == INTEGER, sourceText(), THIRD, "integer"); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/AbstractScalarFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/AbstractScalarFunctionTestCase.java index 2b75010ef66a1..a0f63a46649e2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/AbstractScalarFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/AbstractScalarFunctionTestCase.java @@ -165,7 +165,7 @@ private String expectedTypeName(Set validTypes) { if (withoutNull.equals(Arrays.asList(strings()))) { return "string"; } - if (withoutNull.equals(Arrays.asList(integers()))) { + if (withoutNull.equals(Arrays.asList(integers())) || withoutNull.equals(List.of(DataTypes.INTEGER))) { return "integer"; } if (withoutNull.equals(Arrays.asList(rationals()))) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java index 8dbc9eaeeccd6..4736ba2cc74d7 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java @@ -64,7 +64,32 @@ public static Iterable parameters() { DataTypes.KEYWORD, equalTo(new BytesRef(text.substring(start - 1, start + length - 1))) ); - }))); + }), + new TestCaseSupplier( + "Substring basic test with start long", + List.of(DataTypes.KEYWORD, DataTypes.LONG, DataTypes.INTEGER), + () -> TestCaseSupplier.TestCase.typeError( + List.of( + new TestCaseSupplier.TypedData(new BytesRef("text"), DataTypes.KEYWORD, "str"), + new TestCaseSupplier.TypedData(1L, DataTypes.LONG, "start"), + new TestCaseSupplier.TypedData(2, DataTypes.INTEGER, "length") + ), + "second argument of [] must be [integer], found value [start] type [long]" + ) + ), + new TestCaseSupplier( + "Substring basic test with length double", + List.of(DataTypes.KEYWORD, DataTypes.INTEGER, DataTypes.DOUBLE), + () -> TestCaseSupplier.TestCase.typeError( + List.of( + new TestCaseSupplier.TypedData(new BytesRef("text"), DataTypes.KEYWORD, "str"), + new TestCaseSupplier.TypedData(1L, DataTypes.INTEGER, "start"), + new TestCaseSupplier.TypedData(2.0, DataTypes.DOUBLE, "length") + ), + "third argument of [] must be [integer], found value [length] type [double]" + ) + ) + )); } @Override @@ -90,7 +115,7 @@ public void testNoLengthToString() { @Override protected List argSpec() { - return List.of(required(strings()), required(integers()), optional(integers())); + return List.of(required(strings()), required(DataTypes.INTEGER), optional(DataTypes.INTEGER)); } @Override From 0cc19f32704cfd168d3d3fa3cb2200c27e3b4c6b Mon Sep 17 00:00:00 2001 From: James Baiera Date: Thu, 4 Apr 2024 12:18:29 -0400 Subject: [PATCH 126/264] Extract failure store specific settings to the failure store (#107063) Moves the failure store specific settings logic to the same place that we manage their mappings. --- .../DataStreamFailureStoreDefinition.java | 42 +++++++++++++++++++ .../MetadataCreateDataStreamService.java | 18 ++------ 2 files changed, 46 insertions(+), 14 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamFailureStoreDefinition.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamFailureStoreDefinition.java index f1fc107df5f62..43c4eae41c948 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamFailureStoreDefinition.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamFailureStoreDefinition.java @@ -9,6 +9,9 @@ package org.elasticsearch.cluster.metadata; import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.RoutingFieldMapper; @@ -20,6 +23,7 @@ */ public class DataStreamFailureStoreDefinition { + public static final String FAILURE_STORE_REFRESH_INTERVAL_SETTING_NAME = "data_streams.failure_store.refresh_interval"; public static final CompressedXContent DATA_STREAM_FAILURE_STORE_MAPPING; static { @@ -131,4 +135,42 @@ public class DataStreamFailureStoreDefinition { throw new AssertionError(e); } } + + public static TimeValue getFailureStoreRefreshInterval(Settings settings) { + return settings.getAsTime(FAILURE_STORE_REFRESH_INTERVAL_SETTING_NAME, null); + } + + /** + * Like {@link DataStreamFailureStoreDefinition#applyFailureStoreSettings} but optionally applied on an existing {@link Settings} + * @param existingSettings initial settings to update + * @param nodeSettings settings from the cluster service which capture the node's current settings + * @return either the existing settings if no changes are needed, or a new settings instance which includes failure store specific + * settings + */ + public static Settings buildFailureStoreIndexSettings(Settings existingSettings, Settings nodeSettings) { + // Optionally set a custom refresh interval for the failure store index. + TimeValue refreshInterval = getFailureStoreRefreshInterval(nodeSettings); + if (refreshInterval != null) { + return Settings.builder() + .put(existingSettings) + .put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), refreshInterval) + .build(); + } + return existingSettings; + } + + /** + * Like {@link DataStreamFailureStoreDefinition#buildFailureStoreIndexSettings} but for usage with a {@link Settings.Builder} + * @param nodeSettings settings from the cluster service which capture the node's current settings + * @param builder to capture failure store specific index settings + * @return the original settings builder, with any failure store specific settings applied + */ + public static Settings.Builder applyFailureStoreSettings(Settings nodeSettings, Settings.Builder builder) { + // Optionally set a custom refresh interval for the failure store index. + TimeValue refreshInterval = getFailureStoreRefreshInterval(nodeSettings); + if (refreshInterval != null) { + builder.put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), refreshInterval); + } + return builder; + } } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java index 6c933ba1480df..6d0b424cad8f2 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java @@ -31,7 +31,6 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexMode; -import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.DataStreamTimestampFieldMapper; import org.elasticsearch.index.mapper.MappingLookup; import org.elasticsearch.index.mapper.MetadataFieldMapper; @@ -422,15 +421,10 @@ public static ClusterState createFailureStoreIndex( return currentState; } - var indexSettings = MetadataRolloverService.HIDDEN_INDEX_SETTINGS; - // Optionally set a custom refresh interval for the failure store index. - var refreshInterval = getFailureStoreRefreshInterval(settings); - if (refreshInterval != null) { - indexSettings = Settings.builder() - .put(indexSettings) - .put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), refreshInterval) - .build(); - } + var indexSettings = DataStreamFailureStoreDefinition.buildFailureStoreIndexSettings( + MetadataRolloverService.HIDDEN_INDEX_SETTINGS, + settings + ); CreateIndexClusterStateUpdateRequest createIndexRequest = new CreateIndexClusterStateUpdateRequest( cause, @@ -489,8 +483,4 @@ public static void validateTimestampFieldMapping(MappingLookup mappingLookup) th // Sanity check (this validation logic should already have been executed when merging mappings): fieldMapper.validate(mappingLookup); } - - public static TimeValue getFailureStoreRefreshInterval(Settings settings) { - return settings.getAsTime(FAILURE_STORE_REFRESH_INTERVAL_SETTING_NAME, null); - } } From 7483844edc23675e22cdd37ddd0beaa715c5d962 Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Thu, 4 Apr 2024 20:12:41 +0300 Subject: [PATCH 127/264] ESQL: Allow grouping key inside stats expressions (#106579) Similar to aggs, allow grouping keys to used inside STATS expressions by introducing a synthetic eval, e.g.: STATS a = x + count(*) BY x becomes STATS c = count(*) BY x | EVAL a = x + c | KEEP a, x To better handle overriding aliases, introduce EsqlAggregate which keeps the declared structure intact during analysis and verification while merging the output. The deduplication happens now in the optimization phase. Fix small bug that caused replacement of expressions inside aggregations to be skipped despite being applied Improved Verifier to not repeat error messages in case for Aggregates Removed verification heuristics for missing columns as functions as it was too broad --- docs/changelog/106579.yaml | 5 + .../src/main/resources/stats.csv-spec | 141 +++++++++++++++- .../xpack/esql/analysis/Analyzer.java | 142 +++++++++------- .../xpack/esql/analysis/Verifier.java | 92 +++++++---- .../esql/optimizer/LogicalPlanOptimizer.java | 94 ++++++++--- .../xpack/esql/parser/LogicalPlanBuilder.java | 31 +++- .../esql/plan/logical/EsqlAggregate.java | 60 +++++++ .../xpack/esql/analysis/AnalyzerTests.java | 63 ++++++- .../xpack/esql/analysis/VerifierTests.java | 82 ++++++--- .../optimizer/LogicalPlanOptimizerTests.java | 155 ++++++++++++++++++ .../esql/parser/StatementParserTests.java | 16 +- 11 files changed, 728 insertions(+), 153 deletions(-) create mode 100644 docs/changelog/106579.yaml create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsqlAggregate.java diff --git a/docs/changelog/106579.yaml b/docs/changelog/106579.yaml new file mode 100644 index 0000000000000..104ed3066a6f6 --- /dev/null +++ b/docs/changelog/106579.yaml @@ -0,0 +1,5 @@ +pr: 106579 +summary: "ESQL: Allow grouping key inside stats expressions" +area: ES|QL +type: enhancement +issues: [] diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index 0a18568cf3c84..70d5053c64c45 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -1044,7 +1044,7 @@ FROM employees ; // tag::docsStatsByExpression-result[] -my_count:long |LEFT(last_name, 1):keyword +my_count:long |LEFT(last_name, 1):keyword 2 |A 11 |B 5 |C @@ -1188,6 +1188,145 @@ e:i | l:i 4 | 3 ; +nestedAggsOverGroupingExpressionWithoutAlias#[skip:-8.13.99,reason:supported in 8.14] +FROM employees +| STATS e = max(languages + emp_no) + 1 by languages + emp_no +| SORT e +| LIMIT 3 +; + +e:i | languages + emp_no:i +10004 | 10003 +10007 | 10006 +10008 | 10007 +; + +nestedAggsOverGroupingExpressionMultiGroupWithoutAlias#[skip:-8.13.99,reason:supported in 8.14] +FROM employees +| STATS e = max(languages + emp_no + 10) + 1 by languages + emp_no, f = emp_no % 3 +| SORT e, f +| LIMIT 3 +; + +e:i | languages + emp_no:i | f:i +10014 | 10003 | 2 +10017 | 10006 | 0 +10018 | 10007 | 0 +; + +nestedAggsOverGroupingExpressionWithAlias#[skip:-8.13.99,reason:supported in 8.14] +FROM employees +| STATS e = max(languages + emp_no + 10) + 1 by languages + emp_no +| SORT e +| LIMIT 3 +; + +e:i | languages + emp_no:i +10014 | 10003 +10017 | 10006 +10018 | 10007 +; + +nestedAggsOverGroupingExpressionWithAlias#[skip:-8.13.99,reason:supported in 8.14] +FROM employees +| STATS e = max(a), f = min(a), g = count(a) + 1 by a = languages + emp_no +| SORT a +| LIMIT 3 +; + +e: i | f:i | g:l | a:i +10003 | 10003 | 2 | 10003 +10006 | 10006 | 2 | 10006 +10007 | 10007 | 3 | 10007 +; + +nestedAggsOverGroupingTwiceWithAlias#[skip:-8.13.99,reason:supported in 8.14] +FROM employees +| STATS vals = COUNT() BY x = emp_no, x = languages +| SORT x +| LIMIT 3 +; + +vals: l| x:i +15 | 1 +19 | 2 +17 | 3 +; + +nestedAggsOverGroupingWithAlias#[skip:-8.13.99,reason:supported in 8.14] +FROM employees +| STATS e = length(f) + 1, count(*) by f = first_name +| SORT f +| LIMIT 3 +; + +e:i | count(*):l | f:s +10 | 1 | Alejandro +8 | 1 | Amabile +7 | 1 | Anneke +; + +nestedAggsOverGroupingWithAliasInsideExpression#[skip:-8.13.99,reason:supported in 8.14] +FROM employees +| STATS m = max(l), o = min(s) by l = languages, s = salary + 1 +| SORT l, s +| LIMIT 5 +; + +m:i | o:i | l:i | s:i +1 | 25977 | 1 | 25977 +1 | 28036 | 1 | 28036 +1 | 34342 | 1 | 34342 +1 | 39111 | 1 | 39111 +1 | 39729 | 1 | 39729 +; + +nestedAggsOverGroupingWithAliasAndProjection#[skip:-8.13.99,reason:supported in 8.14] +FROM employees +| STATS e = length(f) + 1, c = count(*) by f = first_name +| KEEP e +| SORT e +| LIMIT 5 +; + +e:i +4 +4 +4 +4 +5 +; + +nestedAggsOverGroupingAndAggWithAliasAndProjection#[skip:-8.13.99,reason:supported in 8.14] +FROM employees +| STATS e = length(f) + count(*), m = max(emp_no) by f = first_name +| KEEP e +| SORT e +| LIMIT 5 +; + +e:l +4 +4 +4 +4 +5 +; + +nestedAggsOverGroupingExpAndAggWithAliasAndProjection#[skip:-8.13.99,reason:supported in 8.14] +FROM employees +| STATS e = f + count(*), m = max(emp_no) by f = length(first_name) % 2 +| KEEP e +| SORT e +| LIMIT 3 +; + +e:l +44 +47 +null +; + defaultNameWithSpace ROW a = 1 | STATS couNt(*) | SORT `couNt(*)` ; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java index 5c787415a8419..005dd8081a9e8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java @@ -8,14 +8,15 @@ package org.elasticsearch.xpack.esql.analysis; import org.elasticsearch.common.logging.HeaderWarning; -import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.VerificationException; +import org.elasticsearch.xpack.esql.expression.NamedExpressions; import org.elasticsearch.xpack.esql.expression.UnresolvedNamePattern; import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute; import org.elasticsearch.xpack.esql.plan.logical.Drop; import org.elasticsearch.xpack.esql.plan.logical.Enrich; +import org.elasticsearch.xpack.esql.plan.logical.EsqlAggregate; import org.elasticsearch.xpack.esql.plan.logical.EsqlUnresolvedRelation; import org.elasticsearch.xpack.esql.plan.logical.Eval; import org.elasticsearch.xpack.esql.plan.logical.Keep; @@ -27,9 +28,11 @@ import org.elasticsearch.xpack.ql.analyzer.AnalyzerRules; import org.elasticsearch.xpack.ql.analyzer.AnalyzerRules.BaseAnalyzerRule; import org.elasticsearch.xpack.ql.analyzer.AnalyzerRules.ParameterizedAnalyzerRule; +import org.elasticsearch.xpack.ql.capabilities.Resolvables; import org.elasticsearch.xpack.ql.common.Failure; import org.elasticsearch.xpack.ql.expression.Alias; import org.elasticsearch.xpack.ql.expression.Attribute; +import org.elasticsearch.xpack.ql.expression.AttributeMap; import org.elasticsearch.xpack.ql.expression.EmptyAttribute; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.Expressions; @@ -40,6 +43,8 @@ import org.elasticsearch.xpack.ql.expression.ReferenceAttribute; import org.elasticsearch.xpack.ql.expression.UnresolvedAttribute; import org.elasticsearch.xpack.ql.expression.UnresolvedStar; +import org.elasticsearch.xpack.ql.expression.function.FunctionDefinition; +import org.elasticsearch.xpack.ql.expression.function.FunctionRegistry; import org.elasticsearch.xpack.ql.expression.function.UnresolvedFunction; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.ql.index.EsIndex; @@ -53,6 +58,7 @@ import org.elasticsearch.xpack.ql.rule.ParameterizedRuleExecutor; import org.elasticsearch.xpack.ql.rule.Rule; import org.elasticsearch.xpack.ql.rule.RuleExecutor; +import org.elasticsearch.xpack.ql.session.Configuration; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; @@ -60,6 +66,7 @@ import org.elasticsearch.xpack.ql.type.InvalidMappedField; import org.elasticsearch.xpack.ql.type.UnsupportedEsField; import org.elasticsearch.xpack.ql.util.CollectionUtils; +import org.elasticsearch.xpack.ql.util.Holder; import org.elasticsearch.xpack.ql.util.StringUtils; import java.util.ArrayList; @@ -70,7 +77,6 @@ import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; -import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -85,7 +91,6 @@ import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.dateTimeToLong; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_POINT; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_SHAPE; -import static org.elasticsearch.xpack.ql.analyzer.AnalyzerRules.resolveFunction; import static org.elasticsearch.xpack.ql.type.DataTypes.DATETIME; import static org.elasticsearch.xpack.ql.type.DataTypes.DOUBLE; import static org.elasticsearch.xpack.ql.type.DataTypes.FLOAT; @@ -105,14 +110,7 @@ public class Analyzer extends ParameterizedRuleExecutor> rules; static { - var resolution = new Batch<>( - "Resolution", - new ResolveTable(), - new ResolveEnrich(), - new ResolveRefs(), - new ResolveFunctions(), - new RemoveDuplicateProjections() - ); + var resolution = new Batch<>("Resolution", new ResolveTable(), new ResolveEnrich(), new ResolveFunctions(), new ResolveRefs()); var finish = new Batch<>("Finish Analysis", Limiter.ONCE, new AddImplicitLimit(), new PromoteStringsInDateComparisons()); rules = List.of(resolution, finish); } @@ -313,6 +311,10 @@ protected LogicalPlan doRule(LogicalPlan plan) { childrenOutput.addAll(output); } + if (plan instanceof Aggregate agg) { + return resolveAggregate(agg, childrenOutput); + } + if (plan instanceof Drop d) { return resolveDrop(d, childrenOutput); } @@ -337,7 +339,60 @@ protected LogicalPlan doRule(LogicalPlan plan) { return resolveMvExpand(p, childrenOutput); } - return plan.transformExpressionsUp(UnresolvedAttribute.class, ua -> maybeResolveAttribute(ua, childrenOutput)); + return plan.transformExpressionsOnly(UnresolvedAttribute.class, ua -> maybeResolveAttribute(ua, childrenOutput)); + } + + private LogicalPlan resolveAggregate(Aggregate a, List childrenOutput) { + // if the grouping is resolved but the aggs are not, use the former to resolve the latter + // e.g. STATS a ... GROUP BY a = x + 1 + Holder changed = new Holder<>(false); + List groupings = a.groupings(); + // first resolve groupings since the aggs might refer to them + // trying to globally resolve unresolved attributes will lead to some being marked as unresolvable + if (Resolvables.resolved(groupings) == false) { + List newGroupings = new ArrayList<>(groupings.size()); + for (Expression g : groupings) { + Expression resolved = g.transformUp(UnresolvedAttribute.class, ua -> maybeResolveAttribute(ua, childrenOutput)); + if (resolved != g) { + changed.set(true); + } + newGroupings.add(resolved); + } + groupings = newGroupings; + if (changed.get()) { + a = new EsqlAggregate(a.source(), a.child(), newGroupings, a.aggregates()); + changed.set(false); + } + } + + if (a.expressionsResolved() == false && Resolvables.resolved(groupings)) { + AttributeMap resolved = new AttributeMap<>(); + for (Expression e : groupings) { + Attribute attr = Expressions.attribute(e); + if (attr != null) { + resolved.put(attr, attr); + } + } + List resolvedList = NamedExpressions.mergeOutputAttributes(new ArrayList<>(resolved.keySet()), childrenOutput); + List newAggregates = new ArrayList<>(); + + for (NamedExpression aggregate : a.aggregates()) { + var agg = (NamedExpression) aggregate.transformUp(UnresolvedAttribute.class, ua -> { + Expression ne = ua; + Attribute maybeResolved = maybeResolveAttribute(ua, resolvedList); + if (maybeResolved != null) { + changed.set(true); + ne = maybeResolved; + } + return ne; + }); + newAggregates.add(agg); + } + + a = changed.get() ? new EsqlAggregate(a.source(), a.child(), groupings, newAggregates) : a; + } + + return a; } private LogicalPlan resolveMvExpand(MvExpand p, List childrenOutput) { @@ -664,59 +719,30 @@ private static class ResolveFunctions extends ParameterizedAnalyzerRule resolveFunction(uf, context.configuration(), context.functionRegistry()) ); } - } - - /** - * Rule that removes duplicate projects - this is done as a separate rule to allow - * full validation of the node before looking at the duplication. - * The duplication needs to be addressed to avoid ambiguity errors from commands further down - * the line. - */ - private static class RemoveDuplicateProjections extends BaseAnalyzerRule { - - @Override - protected boolean skipResolved() { - return false; - } - - @Override - protected LogicalPlan doRule(LogicalPlan plan) { - if (plan.resolved()) { - if (plan instanceof Aggregate agg) { - plan = removeAggDuplicates(agg); - } - } - return plan; - } - private static LogicalPlan removeAggDuplicates(Aggregate agg) { - var groupings = agg.groupings(); - var newGroupings = new LinkedHashSet<>(groupings); - // reuse existing objects - groupings = newGroupings.size() == groupings.size() ? groupings : new ArrayList<>(newGroupings); - - var aggregates = agg.aggregates(); - var newAggregates = new ArrayList<>(aggregates); - var nameSet = Sets.newHashSetWithExpectedSize(newAggregates.size()); - // remove duplicates in reverse to preserve the last one appearing - for (int i = newAggregates.size() - 1; i >= 0; i--) { - var aggregate = newAggregates.get(i); - if (nameSet.add(aggregate.name()) == false) { - newAggregates.remove(i); + public static org.elasticsearch.xpack.ql.expression.function.Function resolveFunction( + UnresolvedFunction uf, + Configuration configuration, + FunctionRegistry functionRegistry + ) { + org.elasticsearch.xpack.ql.expression.function.Function f = null; + if (uf.analyzed()) { + f = uf; + } else { + String functionName = functionRegistry.resolveAlias(uf.name()); + if (functionRegistry.functionExists(functionName) == false) { + f = uf.missing(functionName, functionRegistry.listFunctions()); + } else { + FunctionDefinition def = functionRegistry.resolveFunction(functionName); + f = uf.buildResolved(configuration, def); } } - // reuse existing objects - aggregates = newAggregates.size() == aggregates.size() ? aggregates : newAggregates; - // replace aggregate if needed - agg = (groupings == agg.groupings() && newAggregates == agg.aggregates()) - ? agg - : new Aggregate(agg.source(), agg.child(), groupings, aggregates); - return agg; + return f; } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java index 6492743c8548b..de6c3208df2ec 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java @@ -23,10 +23,11 @@ import org.elasticsearch.xpack.ql.expression.Alias; import org.elasticsearch.xpack.ql.expression.Attribute; import org.elasticsearch.xpack.ql.expression.AttributeMap; +import org.elasticsearch.xpack.ql.expression.AttributeSet; import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.Expressions; import org.elasticsearch.xpack.ql.expression.NamedExpression; import org.elasticsearch.xpack.ql.expression.TypeResolutions; -import org.elasticsearch.xpack.ql.expression.UnresolvedAttribute; import org.elasticsearch.xpack.ql.expression.function.aggregate.AggregateFunction; import org.elasticsearch.xpack.ql.expression.predicate.BinaryOperator; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; @@ -45,6 +46,7 @@ import java.util.LinkedHashSet; import java.util.List; import java.util.Set; +import java.util.function.Consumer; import java.util.stream.Stream; import static org.elasticsearch.xpack.ql.analyzer.VerifierChecks.checkFilterConditionType; @@ -87,16 +89,8 @@ else if (p.resolved()) { p.forEachExpressionUp(Alias.class, a -> aliases.put(a.toAttribute(), a.child())); return; } - // handle aggregate first to disambiguate between missing fields or incorrect function declaration - if (p instanceof Aggregate aggregate) { - for (NamedExpression agg : aggregate.aggregates()) { - var child = Alias.unwrap(agg); - if (child instanceof UnresolvedAttribute) { - failures.add(fail(child, "invalid stats declaration; [{}] is not an aggregate function", child.sourceText())); - } - } - } - p.forEachExpression(e -> { + + Consumer unresolvedExpressions = e -> { // everything is fine, skip expression if (e.resolved()) { return; @@ -118,7 +112,20 @@ else if (p.resolved()) { failures.add(fail(ae, ae.typeResolved().message())); } }); - }); + }; + + // aggregates duplicate grouping inside aggs - to avoid potentially confusing messages, we only check the aggregates + if (p instanceof Aggregate agg) { + // do groupings first + var groupings = agg.groupings(); + groupings.forEach(unresolvedExpressions); + // followed by just the aggregates (to avoid going through the groups again) + var aggs = agg.aggregates(); + int size = aggs.size() - groupings.size(); + aggs.subList(0, size).forEach(unresolvedExpressions); + } else { + p.forEachExpression(unresolvedExpressions); + } }); // in case of failures bail-out as all other checks will be redundant @@ -155,35 +162,47 @@ else if (p.resolved()) { private static void checkAggregate(LogicalPlan p, Set failures, AttributeMap aliases) { if (p instanceof Aggregate agg) { - - List nakedGroups = new ArrayList<>(agg.groupings().size()); + List groupings = agg.groupings(); + AttributeSet groupRefs = new AttributeSet(); // check grouping // The grouping can not be an aggregate function - agg.groupings().forEach(e -> { + groupings.forEach(e -> { e.forEachUp(g -> { if (g instanceof AggregateFunction af) { failures.add(fail(g, "cannot use an aggregate [{}] for grouping", af)); } }); - nakedGroups.add(Alias.unwrap(e)); + // keep the grouping attributes (common case) + Attribute attr = Expressions.attribute(e); + if (attr != null) { + groupRefs.add(attr); + } }); - // check aggregates - accept only aggregate functions or expressions in which each naked attribute is copied as - // specified in the grouping clause - agg.aggregates().forEach(e -> { + // check aggregates - accept only aggregate functions or expressions over grouping + // don't allow the group by itself to avoid duplicates in the output + // and since the groups are copied, only look at the declared aggregates + List aggs = agg.aggregates(); + aggs.subList(0, aggs.size() - groupings.size()).forEach(e -> { var exp = Alias.unwrap(e); if (exp.foldable()) { failures.add(fail(exp, "expected an aggregate function but found [{}]", exp.sourceText())); } // traverse the tree to find invalid matches - checkInvalidNamedExpressionUsage(exp, nakedGroups, failures, 0); + checkInvalidNamedExpressionUsage(exp, groupings, groupRefs, failures, 0); }); } } // traverse the expression and look either for an agg function or a grouping match // stop either when no children are left, the leaves are literals or a reference attribute is given - private static void checkInvalidNamedExpressionUsage(Expression e, List groups, Set failures, int level) { + private static void checkInvalidNamedExpressionUsage( + Expression e, + List groups, + AttributeSet groupRefs, + Set failures, + int level + ) { // found an aggregate, constant or a group, bail out if (e instanceof AggregateFunction af) { af.field().forEachDown(AggregateFunction.class, f -> { @@ -191,21 +210,38 @@ private static void checkInvalidNamedExpressionUsage(Expression e, List se.semanticEquals(ne))) { + foundInGrouping = true; + failures.add( + fail( + e, + "column [{}] cannot be used as an aggregate once declared in the STATS BY grouping key [{}]", + ne.name(), + g.sourceText() + ) + ); + break; + } + } + if (foundInGrouping == false) { + failures.add(fail(e, "column [{}] must appear in the STATS BY clause or be used in an aggregate function", ne.name())); + } } // other keep on going else { for (Expression child : e.children()) { - checkInvalidNamedExpressionUsage(child, groups, failures, level + 1); + checkInvalidNamedExpressionUsage(child, groups, groupRefs, failures, level + 1); } } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java index fe2a3076380df..d0375e0b50849 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java @@ -9,6 +9,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.util.Maps; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockUtils; @@ -30,6 +31,7 @@ import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.ql.analyzer.AnalyzerRules; import org.elasticsearch.xpack.ql.common.Failures; import org.elasticsearch.xpack.ql.expression.Alias; import org.elasticsearch.xpack.ql.expression.Attribute; @@ -113,10 +115,11 @@ protected static Batch substitutions() { return new Batch<>( "Substitutions", Limiter.ONCE, - // first extract nested aggs top-level - this simplifies the rest of the rules - new ReplaceStatsAggExpressionWithEval(), - // second extract nested aggs inside of them + new RemoveStatsOverride(), + // first extract nested expressions inside aggs new ReplaceStatsNestedExpressionWithEval(), + // then extract nested aggs top-level + new ReplaceStatsAggExpressionWithEval(), // lastly replace surrogate functions new SubstituteSurrogates(), new ReplaceRegexMatch(), @@ -1289,9 +1292,9 @@ protected LogicalPlan rule(Aggregate aggregate) { Attribute attr = expToAttribute.computeIfAbsent(field.canonical(), k -> { Alias newAlias = new Alias(k.source(), syntheticName(k, af, counter[0]++), null, k, null, true); evals.add(newAlias); - aggsChanged.set(true); return newAlias.toAttribute(); }); + aggsChanged.set(true); // replace field with attribute List newChildren = new ArrayList<>(af.children()); newChildren.set(0, attr); @@ -1327,7 +1330,12 @@ static String syntheticName(Expression expression, AggregateFunction af, int cou * stats a = sum(a) + min(b) by x * becomes * stats a1 = sum(a), a2 = min(b) by x | eval a = a1 + a2 | keep a, x - * + * The rule also considers expressions applied over groups: + * stats a = x + 1 by x becomes stats by x | eval a = x + 1 | keep a, x + * And to combine the two: + * stats a = x + count(*) by x + * becomes + * stats a1 = count(*) by x | eval a = x + a1 | keep a1, x * Since the logic is very similar, this rule also handles duplicate aggregate functions to avoid duplicate compute * stats a = min(x), b = min(x), c = count(*), d = count() by g * becomes @@ -1344,7 +1352,7 @@ protected LogicalPlan rule(Aggregate aggregate) { AttributeMap aliases = new AttributeMap<>(); aggregate.forEachExpressionUp(Alias.class, a -> aliases.put(a.toAttribute(), a.child())); - // break down each aggregate into AggregateFunction + // break down each aggregate into AggregateFunction and/or grouping key // preserve the projection at the end List aggs = aggregate.aggregates(); @@ -1386,14 +1394,11 @@ protected LogicalPlan rule(Aggregate aggregate) { newProjections.add(as.replaceChild(found.toAttribute())); } } - // nested expression over aggregate function - replace them with reference and move the expression into a - // follow-up eval + // nested expression over aggregate function or groups + // replace them with reference and move the expression into a follow-up eval else { - Holder transformed = new Holder<>(false); + changed.set(true); Expression aggExpression = child.transformUp(AggregateFunction.class, af -> { - transformed.set(true); - changed.set(true); - AggregateFunction canonical = (AggregateFunction) af.canonical(); Alias alias = rootAggs.get(canonical); if (alias == null) { @@ -1415,17 +1420,8 @@ protected LogicalPlan rule(Aggregate aggregate) { return alias.toAttribute(); }); - Alias alias = as; - if (transformed.get()) { - // if at least a change occurred, update the alias and add it to the eval - alias = as.replaceChild(aggExpression); - newEvals.add(alias); - } - // aliased grouping - else { - newAggs.add(alias); - } - + Alias alias = as.replaceChild(aggExpression); + newEvals.add(alias); newProjections.add(alias.toAttribute()); } } @@ -1535,6 +1531,58 @@ private LogicalPlan rule(Eval eval) { } } + /** + * Rule that removes Aggregate overrides in grouping, aggregates and across them inside. + * The overrides appear when the same alias is used multiple times in aggregations and/or groupings: + * STATS x = COUNT(*), x = MIN(a) BY x = b + 1, x = c + 10 + * becomes + * STATS BY x = c + 10 + * That is the last declaration for a given alias, overrides all the other declarations, with + * groups having priority vs aggregates. + * Separately, it replaces expressions used as group keys inside the aggregates with references: + * STATS max(a + b + 1) BY a + b + * becomes + * STATS max($x + 1) BY $x = a + b + */ + private static class RemoveStatsOverride extends AnalyzerRules.AnalyzerRule { + + @Override + protected boolean skipResolved() { + return false; + } + + @Override + protected LogicalPlan rule(Aggregate agg) { + return agg.resolved() ? removeAggDuplicates(agg) : agg; + } + + private static Aggregate removeAggDuplicates(Aggregate agg) { + var groupings = agg.groupings(); + var aggregates = agg.aggregates(); + + groupings = removeDuplicateNames(groupings); + aggregates = removeDuplicateNames(aggregates); + + // replace EsqlAggregate with Aggregate + return new Aggregate(agg.source(), agg.child(), groupings, aggregates); + } + + private static List removeDuplicateNames(List list) { + var newList = new ArrayList<>(list); + var nameSet = Sets.newHashSetWithExpectedSize(list.size()); + + // remove duplicates + for (int i = list.size() - 1; i >= 0; i--) { + var element = list.get(i); + var name = Expressions.name(element); + if (nameSet.add(name) == false) { + newList.remove(i); + } + } + return newList.size() == list.size() ? list : newList; + } + } + private abstract static class ParameterizedOptimizerRule extends ParameterizedRule< SubPlan, LogicalPlan, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java index b942ccbfb8872..8906014adeecd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java @@ -13,12 +13,14 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.dissect.DissectException; import org.elasticsearch.dissect.DissectParser; +import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.expression.UnresolvedNamePattern; import org.elasticsearch.xpack.esql.parser.EsqlBaseParser.MetadataOptionContext; import org.elasticsearch.xpack.esql.parser.EsqlBaseParser.QualifiedNamePatternContext; import org.elasticsearch.xpack.esql.plan.logical.Dissect; import org.elasticsearch.xpack.esql.plan.logical.Drop; import org.elasticsearch.xpack.esql.plan.logical.Enrich; +import org.elasticsearch.xpack.esql.plan.logical.EsqlAggregate; import org.elasticsearch.xpack.esql.plan.logical.EsqlUnresolvedRelation; import org.elasticsearch.xpack.esql.plan.logical.Eval; import org.elasticsearch.xpack.esql.plan.logical.Explain; @@ -30,6 +32,7 @@ import org.elasticsearch.xpack.esql.plan.logical.Row; import org.elasticsearch.xpack.esql.plan.logical.meta.MetaFunctions; import org.elasticsearch.xpack.esql.plan.logical.show.ShowInfo; +import org.elasticsearch.xpack.ql.common.Failure; import org.elasticsearch.xpack.ql.expression.Alias; import org.elasticsearch.xpack.ql.expression.Attribute; import org.elasticsearch.xpack.ql.expression.EmptyAttribute; @@ -42,10 +45,10 @@ import org.elasticsearch.xpack.ql.expression.ReferenceAttribute; import org.elasticsearch.xpack.ql.expression.UnresolvedAttribute; import org.elasticsearch.xpack.ql.expression.UnresolvedStar; +import org.elasticsearch.xpack.ql.expression.function.UnresolvedFunction; import org.elasticsearch.xpack.ql.options.EsSourceOptions; import org.elasticsearch.xpack.ql.parser.ParserUtils; import org.elasticsearch.xpack.ql.plan.TableIdentifier; -import org.elasticsearch.xpack.ql.plan.logical.Aggregate; import org.elasticsearch.xpack.ql.plan.logical.Filter; import org.elasticsearch.xpack.ql.plan.logical.Limit; import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan; @@ -55,6 +58,7 @@ import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.LinkedHashSet; @@ -237,16 +241,31 @@ public PlanFactory visitStatsCommand(EsqlBaseParser.StatsCommandContext ctx) { } // grouping keys are automatically added as aggregations however the user is not allowed to specify them if (groupings.isEmpty() == false && aggregates.isEmpty() == false) { - var groupNames = new LinkedHashSet<>(Expressions.names(Expressions.references(groupings))); + var groupNames = new LinkedHashSet<>(Expressions.names(groupings)); + var groupRefNames = new LinkedHashSet<>(Expressions.names(Expressions.references(groupings))); for (NamedExpression aggregate : aggregates) { - if (Alias.unwrap(aggregate) instanceof UnresolvedAttribute ua && groupNames.contains(ua.name())) { - throw new ParsingException(ua.source(), "Cannot specify grouping expression [{}] as an aggregate", ua.name()); + Expression e = Alias.unwrap(aggregate); + if (e.resolved() == false && e instanceof UnresolvedFunction == false) { + String name = e.sourceText(); + if (groupNames.contains(name)) { + fail(e, "grouping key [{}] already specified in the STATS BY clause", name); + } else if (groupRefNames.contains(name)) { + fail(e, "Cannot specify grouping expression [{}] as an aggregate", name); + } } } } - aggregates.addAll(groupings); - return input -> new Aggregate(source(ctx), input, new ArrayList<>(groupings), aggregates); + // since groupings are aliased, add refs to it in the aggregates + for (Expression group : groupings) { + aggregates.add(Expressions.attribute(group)); + } + + return input -> new EsqlAggregate(source(ctx), input, new ArrayList<>(groupings), aggregates); + } + + private void fail(Expression exp, String message, Object... args) { + throw new VerificationException(Collections.singletonList(Failure.fail(exp, message, args))); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsqlAggregate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsqlAggregate.java new file mode 100644 index 0000000000000..847ed3c9972a8 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsqlAggregate.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plan.logical; + +import org.elasticsearch.xpack.ql.expression.Attribute; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.Expressions; +import org.elasticsearch.xpack.ql.expression.NamedExpression; +import org.elasticsearch.xpack.ql.plan.logical.Aggregate; +import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.ql.tree.NodeInfo; +import org.elasticsearch.xpack.ql.tree.Source; + +import java.util.List; + +import static java.util.Collections.emptyList; +import static org.elasticsearch.xpack.esql.expression.NamedExpressions.mergeOutputAttributes; + +/** + * Extension of Aggregate for handling duplicates. + * In ESQL is it possible to declare multiple aggregations and groupings with the same name, with the last declaration in grouping + * winning. + * As some of these declarations can be invalid, for validation reasons we need to keep the data around yet allowing will lead to + * ambiguity in the output. + * Hence this class - to allow the declaration to be moved over and thus for the Verifier to pick up the declaration while providing + * a proper output. + * To simplify things, the Aggregate class will be replaced with a vanilla one. + */ +public class EsqlAggregate extends Aggregate { + + private List lazyOutput; + + public EsqlAggregate(Source source, LogicalPlan child, List groupings, List aggregates) { + super(source, child, groupings, aggregates); + } + + @Override + public List output() { + if (lazyOutput == null) { + lazyOutput = mergeOutputAttributes(Expressions.asAttributes(aggregates()), emptyList()); + } + + return lazyOutput; + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, EsqlAggregate::new, child(), groupings(), aggregates()); + } + + @Override + public EsqlAggregate replaceChild(LogicalPlan newChild) { + return new EsqlAggregate(source(), newChild, groupings(), aggregates()); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index 543e7c93526d2..aedc789620480 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -21,7 +21,9 @@ import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.enrich.ResolvedEnrichPolicy; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; +import org.elasticsearch.xpack.esql.expression.function.aggregate.Count; import org.elasticsearch.xpack.esql.expression.function.aggregate.Max; +import org.elasticsearch.xpack.esql.expression.function.aggregate.Min; import org.elasticsearch.xpack.esql.parser.ParsingException; import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.EsqlUnresolvedRelation; @@ -1177,9 +1179,14 @@ public void testAggsWithDuplicates() throws Exception { var order = as(limit.child(), OrderBy.class); var agg = as(order.child(), Aggregate.class); var aggregates = agg.aggregates(); - assertThat(aggregates, hasSize(2)); - assertThat(Expressions.names(aggregates), contains("x", "b")); + var output = agg.output(); + assertThat(output, hasSize(2)); + assertThat(Expressions.names(output), contains("x", "b")); var alias = as(aggregates.get(0), Alias.class); + var count = as(alias.child(), Count.class); + alias = as(aggregates.get(1), Alias.class); + var min = as(alias.child(), Min.class); + alias = as(aggregates.get(2), Alias.class); var max = as(alias.child(), Max.class); } @@ -1193,9 +1200,53 @@ public void testAggsWithOverridingInputAndGrouping() throws Exception { var limit = as(plan, Limit.class); var order = as(limit.child(), OrderBy.class); var agg = as(order.child(), Aggregate.class); - var aggregates = agg.aggregates(); - assertThat(aggregates, hasSize(1)); - assertThat(Expressions.names(aggregates), contains("b")); + var output = agg.output(); + assertThat(output, hasSize(1)); + assertThat(Expressions.names(output), contains("b")); + } + + /** + * Expects + * Limit[1000[INTEGER]] + * \_EsqlAggregate[[emp_no{f}#9 + languages{f}#12 AS emp_no + languages],[MIN(emp_no{f}#9 + languages{f}#12) AS min(emp_no + langu + * ages), emp_no + languages{r}#7]] + * \_EsRelation[test][_meta_field{f}#15, emp_no{f}#9, first_name{f}#10, g..] + */ + public void testAggsOverGroupingKey() throws Exception { + var plan = analyze(""" + from test + | stats min(emp_no + languages) by emp_no + languages + """); + var limit = as(plan, Limit.class); + var agg = as(limit.child(), Aggregate.class); + var output = agg.output(); + assertThat(output, hasSize(2)); + var aggs = agg.aggregates(); + var min = as(Alias.unwrap(aggs.get(0)), Min.class); + assertThat(min.arguments(), hasSize(1)); + var group = Alias.unwrap(agg.groupings().get(0)); + assertEquals(min.arguments().get(0), group); + } + + /** + * Expects + * Limit[1000[INTEGER]] + * \_EsqlAggregate[[emp_no{f}#9 + languages{f}#12 AS a],[MIN(a{r}#7) AS min(a), a{r}#7]] + * \_EsRelation[test][_meta_field{f}#15, emp_no{f}#9, first_name{f}#10, g..] + */ + public void testAggsOverGroupingKeyWithAlias() throws Exception { + var plan = analyze(""" + from test + | stats min(a) by a = emp_no + languages + """); + var limit = as(plan, Limit.class); + var agg = as(limit.child(), Aggregate.class); + var output = agg.output(); + assertThat(output, hasSize(2)); + var aggs = agg.aggregates(); + var min = as(Alias.unwrap(aggs.get(0)), Min.class); + assertThat(min.arguments(), hasSize(1)); + assertEquals(Expressions.attribute(min.arguments().get(0)), Expressions.attribute(agg.groupings().get(0))); } public void testAggsWithoutAgg() throws Exception { @@ -1708,7 +1759,7 @@ public void testFoldableInGrouping() { |stats x by 1 """)); - assertThat(e.getMessage(), containsString("[x] is not an aggregate function")); + assertThat(e.getMessage(), containsString("Unknown column [x]")); } public void testScalarFunctionsInStats() { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index 866a4c458c424..d5d82207a770e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -67,14 +67,6 @@ public void testAggsExpressionsInStatsAggs() { "1:44: column [salary] must appear in the STATS BY clause or be used in an aggregate function", error("from test | eval z = 2 | stats x = avg(z), salary by emp_no") ); - assertEquals( - "1:26: scalar functions over groupings [first_name] not allowed yet", - error("from test | stats length(first_name), count(1) by first_name") - ); - assertEquals( - "1:36: scalar functions over groupings [languages] not allowed yet", - error("from test | stats max(languages) + languages by l = languages") - ); assertEquals( "1:23: nested aggregations [max(salary)] not allowed inside other aggregations [max(max(salary))]", error("from test | stats max(max(salary)) by first_name") @@ -91,7 +83,35 @@ public void testAggsExpressionsInStatsAggs() { "1:23: second argument of [count_distinct(languages, languages)] must be a constant, received [languages]", error("from test | stats x = count_distinct(languages, languages) by emp_no") ); + // no agg function + assertEquals("1:19: expected an aggregate function but found [5]", error("from test | stats 5 by emp_no")); + + // don't allow naked group + assertEquals("1:19: grouping key [emp_no] already specified in the STATS BY clause", error("from test | stats emp_no BY emp_no")); + // don't allow naked group - even when it's an expression + assertEquals( + "1:19: grouping key [languages + emp_no] already specified in the STATS BY clause", + error("from test | stats languages + emp_no BY languages + emp_no") + ); + // don't allow group alias + assertEquals( + "1:19: grouping key [e] already specified in the STATS BY clause", + error("from test | stats e BY e = languages + emp_no") + ); + var message = error("from test | stats languages + emp_no BY e = languages + emp_no"); + assertThat( + message, + containsString( + "column [emp_no] cannot be used as an aggregate once declared in the STATS BY grouping key [e = languages + emp_no]" + ) + ); + assertThat( + message, + containsString( + " column [languages] cannot be used as an aggregate once declared in the STATS BY grouping key [e = languages + emp_no]" + ) + ); } public void testAggsInsideGrouping() { @@ -103,16 +123,37 @@ public void testAggsInsideGrouping() { public void testAggsWithInvalidGrouping() { assertEquals( - "1:35: column [languages] must appear in the STATS BY clause or be used in an aggregate function", + "1:35: column [languages] cannot be used as an aggregate once declared in the STATS BY grouping key [l = languages % 3]", error("from test| stats max(languages) + languages by l = languages % 3") ); } + public void testGroupingAlias() throws Exception { + assertEquals( + "1:23: column [languages] cannot be used as an aggregate once declared in the STATS BY grouping key [l = languages % 3]", + error("from test | stats l = languages + 3 by l = languages % 3 | keep l") + ); + } + + public void testGroupingAliasDuplicate() throws Exception { + assertEquals( + "1:22: column [languages] cannot be used as an aggregate " + + "once declared in the STATS BY grouping key [l = languages % 3, l = languages, l = languages % 2]", + error("from test| stats l = languages + 3 by l = languages % 3, l = languages, l = languages % 2 | keep l") + ); + + assertEquals( + "1:22: column [languages] cannot be used as an aggregate " + "once declared in the STATS BY grouping key [l = languages % 3]", + error("from test| stats l = languages + 3, l = languages % 2 by l = languages % 3 | keep l") + ); + + } + public void testAggsIgnoreCanonicalGrouping() { // the grouping column should appear verbatim - ignore canonical representation as they complicate things significantly // for no real benefit (1+languages != languages + 1) assertEquals( - "1:39: column [languages] must appear in the STATS BY clause or be used in an aggregate function", + "1:39: column [languages] cannot be used as an aggregate once declared in the STATS BY grouping key [l = languages + 1]", error("from test| stats max(languages) + 1 + languages by l = languages + 1") ); } @@ -129,20 +170,6 @@ public void testAggsInsideEval() throws Exception { assertEquals("1:29: aggregate function [max(b)] not allowed outside STATS command", error("row a = 1, b = 2 | eval x = max(b)")); } - public void testAggsWithExpressionOverAggs() { - assertEquals( - "1:44: scalar functions over groupings [languages] not allowed yet", - error("from test | stats max(languages + 1) , m = languages + min(salary + 1) by l = languages, s = salary") - ); - } - - public void testAggScalarOverGroupingColumn() { - assertEquals( - "1:26: scalar functions over groupings [first_name] not allowed yet", - error("from test | stats length(first_name), count(1) by first_name") - ); - } - public void testGroupingInAggs() { assertEquals("2:12: column [salary] must appear in the STATS BY clause or be used in an aggregate function", error(""" from test @@ -352,8 +379,11 @@ public void testNestedAggField() { assertEquals("1:27: Unknown column [avg]", error("from test | stats c = avg(avg)")); } - public void testUnfinishedAggFunction() { - assertEquals("1:23: invalid stats declaration; [avg] is not an aggregate function", error("from test | stats c = avg")); + public void testNotFoundFieldInNestedFunction() { + assertEquals(""" + 1:30: Unknown column [missing] + line 1:43: Unknown column [not_found] + line 1:23: Unknown column [avg]""", error("from test | stats c = avg by missing + 1, not_found")); } public void testSpatialSort() { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 050ee2caefec0..b2f7690108900 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -149,6 +149,7 @@ import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; import static org.elasticsearch.xpack.ql.type.DataTypes.VERSION; +import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; @@ -365,6 +366,51 @@ public void testCombineProjectionWithAggregation() { assertThat(Expressions.names(agg.groupings()), contains("last_name", "first_name")); } + /** + * Expects + * TopN[[Order[x{r}#10,ASC,LAST]],1000[INTEGER]] + * \_Aggregate[[languages{f}#16],[MAX(emp_no{f}#13) AS x, languages{f}#16]] + * \_EsRelation[test][_meta_field{f}#19, emp_no{f}#13, first_name{f}#14, ..] + */ + public void testRemoveOverridesInAggregate() throws Exception { + var plan = plan(""" + from test + | stats x = count(emp_no), x = min(emp_no), x = max(emp_no) by languages + | sort x + """); + + var topN = as(plan, TopN.class); + var agg = as(topN.child(), Aggregate.class); + var aggregates = agg.aggregates(); + assertThat(aggregates, hasSize(2)); + assertThat(Expressions.names(aggregates), contains("x", "languages")); + var alias = as(aggregates.get(0), Alias.class); + var max = as(alias.child(), Max.class); + assertThat(Expressions.name(max.arguments().get(0)), equalTo("emp_no")); + } + + // expected stats b by b (grouping overrides the rest of the aggs) + + /** + * Expects + * TopN[[Order[b{r}#10,ASC,LAST]],1000[INTEGER]] + * \_Aggregate[[b{r}#10],[languages{f}#16 AS b]] + * \_EsRelation[test][_meta_field{f}#19, emp_no{f}#13, first_name{f}#14, ..] + */ + public void testAggsWithOverridingInputAndGrouping() throws Exception { + var plan = plan(""" + from test + | stats b = count(emp_no), b = max(emp_no) by b = languages + | sort b + """); + + var topN = as(plan, TopN.class); + var agg = as(topN.child(), Aggregate.class); + var aggregates = agg.aggregates(); + assertThat(aggregates, hasSize(1)); + assertThat(Expressions.names(aggregates), contains("b")); + } + /** * Project[[s{r}#4 AS d, s{r}#4, last_name{f}#21, first_name{f}#18]] * \_Limit[1000[INTEGER]] @@ -3074,6 +3120,115 @@ public void testNestedMultiExpressionsInGroupingAndAggs() { assertThat(Expressions.names(agg.output()), contains("count(salary + 1)", "max(salary + 23)", "languages + 1", "emp_no % 3")); } + /** + * Expects + * Limit[1000[INTEGER]] + * \_Aggregate[[g{r}#8],[COUNT($$emp_no_%_2_+_la>$COUNT$0{r}#20) AS c, g{r}#8]] + * \_Eval[[emp_no{f}#10 % 2[INTEGER] AS g, languages{f}#13 + emp_no{f}#10 % 2[INTEGER] AS $$emp_no_%_2_+_la>$COUNT$0]] + * \_EsRelation[test][_meta_field{f}#16, emp_no{f}#10, first_name{f}#11, ..] + */ + public void testNestedExpressionsWithGroupingKeyInAggs() { + var plan = optimizedPlan(""" + from test + | stats c = count(languages + emp_no % 2) by g = emp_no % 2 + """); + + var limit = as(plan, Limit.class); + var aggregate = as(limit.child(), Aggregate.class); + assertThat(Expressions.names(aggregate.aggregates()), contains("c", "g")); + assertThat(Expressions.names(aggregate.groupings()), contains("g")); + var eval = as(aggregate.child(), Eval.class); + var fields = eval.fields(); + // emp_no % 2 + var value = Alias.unwrap(fields.get(0)); + var math = as(value, Mod.class); + assertThat(Expressions.name(math.left()), is("emp_no")); + assertThat(math.right().fold(), is(2)); + // languages + emp_no % 2 + var add = as(Alias.unwrap(fields.get(1).canonical()), Add.class); + if (add.left() instanceof Mod mod) { + add = add.swapLeftAndRight(); + } + assertThat(Expressions.name(add.left()), is("languages")); + var mod = as(add.right().canonical(), Mod.class); + assertThat(Expressions.name(mod.left()), is("emp_no")); + assertThat(mod.right().fold(), is(2)); + } + + /** + * Expects + * Limit[1000[INTEGER]] + * \_Aggregate[[emp_no % 2{r}#12, languages + salary{r}#15],[MAX(languages + salary{r}#15) AS m, COUNT($$languages_+_sal>$COUN + * T$0{r}#28) AS c, emp_no % 2{r}#12, languages + salary{r}#15]] + * \_Eval[[emp_no{f}#18 % 2[INTEGER] AS emp_no % 2, languages{f}#21 + salary{f}#23 AS languages + salary, languages{f}#2 + * 1 + salary{f}#23 + emp_no{f}#18 % 2[INTEGER] AS $$languages_+_sal>$COUNT$0]] + * \_EsRelation[test][_meta_field{f}#24, emp_no{f}#18, first_name{f}#19, ..] + */ + public void testNestedExpressionsWithMultiGrouping() { + var plan = optimizedPlan(""" + from test + | stats m = max(languages + salary), c = count(languages + salary + emp_no % 2) by emp_no % 2, languages + salary + """); + + var limit = as(plan, Limit.class); + var aggregate = as(limit.child(), Aggregate.class); + assertThat(Expressions.names(aggregate.aggregates()), contains("m", "c", "emp_no % 2", "languages + salary")); + assertThat(Expressions.names(aggregate.groupings()), contains("emp_no % 2", "languages + salary")); + var eval = as(aggregate.child(), Eval.class); + var fields = eval.fields(); + // emp_no % 2 + var value = Alias.unwrap(fields.get(0).canonical()); + var math = as(value, Mod.class); + assertThat(Expressions.name(math.left()), is("emp_no")); + assertThat(math.right().fold(), is(2)); + // languages + salary + var add = as(Alias.unwrap(fields.get(1).canonical()), Add.class); + assertThat(Expressions.name(add.left()), anyOf(is("languages"), is("salary"))); + assertThat(Expressions.name(add.right()), anyOf(is("salary"), is("languages"))); + // languages + salary + emp_no % 2 + var add2 = as(Alias.unwrap(fields.get(2).canonical()), Add.class); + if (add2.left() instanceof Mod mod) { + add2 = add2.swapLeftAndRight(); + } + var add3 = as(add2.left().canonical(), Add.class); + var mod = as(add2.right().canonical(), Mod.class); + // languages + salary + assertThat(Expressions.name(add3.left()), anyOf(is("languages"), is("salary"))); + assertThat(Expressions.name(add3.right()), anyOf(is("salary"), is("languages"))); + // emp_no % 2 + assertThat(Expressions.name(mod.left()), is("emp_no")); + assertThat(mod.right().fold(), is(2)); + } + + /** + * Expects + * Project[[e{r}#5, languages + emp_no{r}#8]] + * \_Eval[[$$MAX$max(languages_+>$0{r}#20 + 1[INTEGER] AS e]] + * \_Limit[1000[INTEGER]] + * \_Aggregate[[languages + emp_no{r}#8],[MAX(emp_no{f}#10 + languages{f}#13) AS $$MAX$max(languages_+>$0, languages + emp_no{ + * r}#8]] + * \_Eval[[languages{f}#13 + emp_no{f}#10 AS languages + emp_no]] + * \_EsRelation[test][_meta_field{f}#16, emp_no{f}#10, first_name{f}#11, ..] + */ + public void testNestedExpressionsInStatsWithExpression() { + var plan = optimizedPlan(""" + from test + | stats e = max(languages + emp_no) + 1 by languages + emp_no + """); + + var project = as(plan, Project.class); + var eval = as(project.child(), Eval.class); + var fields = eval.fields(); + assertThat(Expressions.names(fields), contains("e")); + var limit = as(eval.child(), Limit.class); + var agg = as(limit.child(), Aggregate.class); + var groupings = agg.groupings(); + assertThat(Expressions.names(groupings), contains("languages + emp_no")); + eval = as(agg.child(), Eval.class); + fields = eval.fields(); + assertThat(Expressions.names(fields), contains("languages + emp_no")); + } + public void testLogicalPlanOptimizerVerifier() { var plan = plan(""" from test diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index f7bb90208af3f..49fb3af5384b4 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.Randomness; import org.elasticsearch.core.Tuple; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThan; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThanOrEqual; @@ -21,6 +22,7 @@ import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; import org.elasticsearch.xpack.esql.plan.logical.Dissect; import org.elasticsearch.xpack.esql.plan.logical.Enrich; +import org.elasticsearch.xpack.esql.plan.logical.EsqlAggregate; import org.elasticsearch.xpack.esql.plan.logical.EsqlUnresolvedRelation; import org.elasticsearch.xpack.esql.plan.logical.Eval; import org.elasticsearch.xpack.esql.plan.logical.Explain; @@ -40,7 +42,6 @@ import org.elasticsearch.xpack.ql.expression.function.UnresolvedFunction; import org.elasticsearch.xpack.ql.expression.predicate.logical.Not; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; -import org.elasticsearch.xpack.ql.plan.logical.Aggregate; import org.elasticsearch.xpack.ql.plan.logical.Filter; import org.elasticsearch.xpack.ql.plan.logical.Limit; import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan; @@ -242,7 +243,7 @@ public void testEvalImplicitNames() { public void testStatsWithGroups() { assertEquals( - new Aggregate( + new EsqlAggregate( EMPTY, PROCESSING_CMD_INPUT, List.of(attribute("c"), attribute("d.e")), @@ -258,7 +259,7 @@ public void testStatsWithGroups() { public void testStatsWithoutGroups() { assertEquals( - new Aggregate( + new EsqlAggregate( EMPTY, PROCESSING_CMD_INPUT, List.of(), @@ -273,7 +274,7 @@ public void testStatsWithoutGroups() { public void testStatsWithoutAggs() throws Exception { assertEquals( - new Aggregate(EMPTY, PROCESSING_CMD_INPUT, List.of(attribute("a")), List.of(attribute("a"))), + new EsqlAggregate(EMPTY, PROCESSING_CMD_INPUT, List.of(attribute("a")), List.of(attribute("a"))), processingCommand("stats by a") ); } @@ -299,7 +300,7 @@ public void testAggsWithGroupKeyAsAgg() throws Exception { """ }; for (String query : queries) { - expectError(query, "Cannot specify grouping expression [a] as an aggregate"); + expectVerificationError(query, "grouping key [a] already specified in the STATS BY clause"); } } @@ -1105,6 +1106,11 @@ private void expectError(String query, String errorMessage) { assertThat(e.getMessage(), containsString(errorMessage)); } + private void expectVerificationError(String query, String errorMessage) { + VerificationException e = expectThrows(VerificationException.class, "Expected syntax error for " + query, () -> statement(query)); + assertThat(e.getMessage(), containsString(errorMessage)); + } + private void expectError(String query, List params, String errorMessage) { ParsingException e = expectThrows(ParsingException.class, "Expected syntax error for " + query, () -> statement(query, params)); assertThat(e.getMessage(), containsString(errorMessage)); From c2bd3e40746ef715144084b2d8f6ba9cf19eb4fd Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Thu, 4 Apr 2024 10:59:24 -0700 Subject: [PATCH 128/264] ESQL: Fix flaky test in LogicalPlanOptimizerTests --- .../xpack/esql/optimizer/LogicalPlanOptimizerTests.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index b2f7690108900..95843b954ef91 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -3190,8 +3190,8 @@ public void testNestedExpressionsWithMultiGrouping() { if (add2.left() instanceof Mod mod) { add2 = add2.swapLeftAndRight(); } - var add3 = as(add2.left().canonical(), Add.class); - var mod = as(add2.right().canonical(), Mod.class); + var add3 = as(add2.left(), Add.class); + var mod = as(add2.right(), Mod.class); // languages + salary assertThat(Expressions.name(add3.left()), anyOf(is("languages"), is("salary"))); assertThat(Expressions.name(add3.right()), anyOf(is("salary"), is("languages"))); From a5e7525d817a77a0ae0c91d120cc0d3f2dbf2dd4 Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Thu, 4 Apr 2024 11:22:28 -0700 Subject: [PATCH 129/264] ESQL: Disable flaky test --- .../xpack/esql/optimizer/LogicalPlanOptimizerTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 95843b954ef91..63c2a33543073 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -3164,6 +3164,7 @@ public void testNestedExpressionsWithGroupingKeyInAggs() { * 1 + salary{f}#23 + emp_no{f}#18 % 2[INTEGER] AS $$languages_+_sal>$COUNT$0]] * \_EsRelation[test][_meta_field{f}#24, emp_no{f}#18, first_name{f}#19, ..] */ + @AwaitsFix(bugUrl = "disabled since canonical representation relies on hashing which is runtime defined") public void testNestedExpressionsWithMultiGrouping() { var plan = optimizedPlan(""" from test From 3486a0815ef368e2475ed6c97cdff2145f5fec4c Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Thu, 4 Apr 2024 21:01:06 +0200 Subject: [PATCH 130/264] Remove unused methods from SearchContext (#107111) A couple methods can be removed here, they're only used on sub-classes. That also cleans up some unsupport operation exception code that is never hit. --- .../search/DefaultSearchContext.java | 5 ---- .../internal/FilteredSearchContext.java | 25 ------------------- .../search/internal/SearchContext.java | 10 -------- .../search/internal/SubSearchContext.java | 19 -------------- .../search/rank/RankSearchContext.java | 25 ------------------- .../elasticsearch/test/TestSearchContext.java | 19 -------------- 6 files changed, 103 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java b/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java index 0e6800b9c8d48..49ab7bf74ca91 100644 --- a/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java @@ -481,7 +481,6 @@ public SearchContext aggregations(SearchContextAggregations aggregations) { return this; } - @Override public void addSearchExt(SearchExtBuilder searchExtBuilder) { // it's ok to use the writeable name here given that we enforce it to be the same as the name of the element that gets // parsed by the corresponding parser. There is one single name and one single way to retrieve the parsed object from the context. @@ -508,7 +507,6 @@ public SuggestionSearchContext suggest() { return suggest; } - @Override public void suggest(SuggestionSearchContext suggest) { this.suggest = suggest; } @@ -613,7 +611,6 @@ public TimeValue timeout() { return timeout; } - @Override public void timeout(TimeValue timeout) { this.timeout = timeout; } @@ -688,7 +685,6 @@ public FieldDoc searchAfter() { return searchAfter; } - @Override public SearchContext collapse(CollapseContext collapse) { this.collapse = collapse; return this; @@ -786,7 +782,6 @@ public List groupStats() { return this.groupStats; } - @Override public void groupStats(List groupStats) { this.groupStats = groupStats; } diff --git a/server/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java b/server/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java index c02a959231a61..7e54eeacffd7d 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java @@ -139,11 +139,6 @@ public SuggestionSearchContext suggest() { return in.suggest(); } - @Override - public void suggest(SuggestionSearchContext suggest) { - in.suggest(suggest); - } - @Override public RankShardContext rankShardContext() { return in.rankShardContext(); @@ -204,11 +199,6 @@ public TimeValue timeout() { return in.timeout(); } - @Override - public void timeout(TimeValue timeout) { - in.timeout(timeout); - } - @Override public int terminateAfter() { return in.terminateAfter(); @@ -334,11 +324,6 @@ public List groupStats() { return in.groupStats(); } - @Override - public void groupStats(List groupStats) { - in.groupStats(groupStats); - } - @Override public boolean version() { return in.version(); @@ -409,11 +394,6 @@ public long getRelativeTimeInMillis() { return in.getRelativeTimeInMillis(); } - @Override - public void addSearchExt(SearchExtBuilder searchExtBuilder) { - in.addSearchExt(searchExtBuilder); - } - @Override public SearchExtBuilder getSearchExt(String name) { return in.getSearchExt(name); @@ -444,11 +424,6 @@ public boolean isCancelled() { return in.isCancelled(); } - @Override - public SearchContext collapse(CollapseContext collapse) { - return in.collapse(collapse); - } - @Override public CollapseContext collapse() { return in.collapse(); diff --git a/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java b/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java index 512df4d15dcb0..9580c450fd921 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java @@ -122,8 +122,6 @@ public final void close() { public abstract SearchContext aggregations(SearchContextAggregations aggregations); - public abstract void addSearchExt(SearchExtBuilder searchExtBuilder); - public abstract SearchExtBuilder getSearchExt(String name); public abstract SearchHighlightContext highlight(); @@ -139,8 +137,6 @@ public InnerHitsContext innerHits() { public abstract SuggestionSearchContext suggest(); - public abstract void suggest(SuggestionSearchContext suggest); - public abstract RankShardContext rankShardContext(); public abstract void rankShardContext(RankShardContext rankShardContext); @@ -217,8 +213,6 @@ public final void assignRescoreDocIds(RescoreDocIds rescoreDocIds) { public abstract TimeValue timeout(); - public abstract void timeout(TimeValue timeout); - public abstract int terminateAfter(); public abstract void terminateAfter(int terminateAfter); @@ -255,8 +249,6 @@ public final void assignRescoreDocIds(RescoreDocIds rescoreDocIds) { public abstract FieldDoc searchAfter(); - public abstract SearchContext collapse(CollapseContext collapse); - public abstract CollapseContext collapse(); public abstract SearchContext parsedPostFilter(ParsedQuery postFilter); @@ -310,8 +302,6 @@ public Query rewrittenQuery() { @Nullable public abstract List groupStats(); - public abstract void groupStats(List groupStats); - public abstract boolean version(); public abstract void version(boolean version); diff --git a/server/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java b/server/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java index f31b319882b5a..91cd647be673d 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java @@ -9,7 +9,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TotalHits; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.search.aggregations.SearchContextAggregations; import org.elasticsearch.search.collapse.CollapseContext; @@ -22,9 +21,6 @@ import org.elasticsearch.search.fetch.subphase.highlight.SearchHighlightContext; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.sort.SortAndFormats; -import org.elasticsearch.search.suggest.SuggestionSearchContext; - -import java.util.List; public class SubSearchContext extends FilteredSearchContext { @@ -104,11 +100,6 @@ public void highlight(SearchHighlightContext highlight) { this.highlight = highlight; } - @Override - public void suggest(SuggestionSearchContext suggest) { - throw new UnsupportedOperationException("Not supported"); - } - @Override public boolean hasScriptFields() { return scriptFields != null && scriptFields.fields().isEmpty() == false; @@ -160,11 +151,6 @@ public SubSearchContext fetchFieldsContext(FetchFieldsContext fetchFieldsContext return this; } - @Override - public void timeout(TimeValue timeout) { - throw new UnsupportedOperationException("Not supported"); - } - @Override public void terminateAfter(int terminateAfter) { throw new UnsupportedOperationException("Not supported"); @@ -269,11 +255,6 @@ public void explain(boolean explain) { this.explain = explain; } - @Override - public void groupStats(List groupStats) { - throw new UnsupportedOperationException("Not supported"); - } - @Override public boolean version() { return version; diff --git a/server/src/main/java/org/elasticsearch/search/rank/RankSearchContext.java b/server/src/main/java/org/elasticsearch/search/rank/RankSearchContext.java index 86f7566683d21..d144e45becc12 100644 --- a/server/src/main/java/org/elasticsearch/search/rank/RankSearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/rank/RankSearchContext.java @@ -257,11 +257,6 @@ public SearchContext aggregations(SearchContextAggregations aggregations) { throw new UnsupportedOperationException(); } - @Override - public void addSearchExt(SearchExtBuilder searchExtBuilder) { - throw new UnsupportedOperationException(); - } - @Override public SearchExtBuilder getSearchExt(String name) { throw new UnsupportedOperationException(); @@ -287,11 +282,6 @@ public SuggestionSearchContext suggest() { throw new UnsupportedOperationException(); } - @Override - public void suggest(SuggestionSearchContext suggest) { - throw new UnsupportedOperationException(); - } - @Override public RankShardContext rankShardContext() { throw new UnsupportedOperationException(); @@ -357,11 +347,6 @@ public BitsetFilterCache bitsetFilterCache() { throw new UnsupportedOperationException(); } - @Override - public void timeout(TimeValue timeout) { - throw new UnsupportedOperationException(); - } - @Override public void terminateAfter(int terminateAfter) { throw new UnsupportedOperationException(); @@ -397,11 +382,6 @@ public SearchContext searchAfter(FieldDoc searchAfter) { throw new UnsupportedOperationException(); } - @Override - public SearchContext collapse(CollapseContext collapse) { - throw new UnsupportedOperationException(); - } - @Override public SearchContext parsedPostFilter(ParsedQuery postFilter) { throw new UnsupportedOperationException(); @@ -457,11 +437,6 @@ public List groupStats() { throw new UnsupportedOperationException(); } - @Override - public void groupStats(List groupStats) { - throw new UnsupportedOperationException(); - } - @Override public boolean version() { throw new UnsupportedOperationException(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java index 80d1b82fbfcfe..8db968b59ae1f 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java +++ b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java @@ -171,11 +171,6 @@ public SearchContext aggregations(SearchContextAggregations searchContextAggrega return this; } - @Override - public void addSearchExt(SearchExtBuilder searchExtBuilder) { - searchExtBuilders.put(searchExtBuilder.getWriteableName(), searchExtBuilder); - } - @Override public SearchExtBuilder getSearchExt(String name) { return searchExtBuilders.get(name); @@ -194,9 +189,6 @@ public SuggestionSearchContext suggest() { return null; } - @Override - public void suggest(SuggestionSearchContext suggest) {} - @Override public List rescore() { return Collections.emptyList(); @@ -267,9 +259,6 @@ public TimeValue timeout() { return TimeValue.ZERO; } - @Override - public void timeout(TimeValue timeout) {} - @Override public int terminateAfter() { return terminateAfter; @@ -340,11 +329,6 @@ public FieldDoc searchAfter() { return searchAfter; } - @Override - public SearchContext collapse(CollapseContext collapse) { - return null; - } - @Override public CollapseContext collapse() { return null; @@ -431,9 +415,6 @@ public List groupStats() { return null; } - @Override - public void groupStats(List groupStats) {} - @Override public boolean version() { return false; From b3b4214e47729d007f350b08fd6749e627ea6247 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Thu, 4 Apr 2024 13:25:11 -0700 Subject: [PATCH 131/264] Skip multi-release source sets in idea project import by default (#107123) There is an existing IntelliJ bug that prevents doing a full project build when source sets for multi-release jars are present. This changes the project import behavior so that these source sets are ignored by default and can be explicitly enabled by adding `org.gradle.mrjar.idea.enabled=true` to your `~/.gradle/gradle.properties` file should you need to actively work on that code. --- .../src/main/groovy/elasticsearch.ide.gradle | 27 ++++++------ .../gradle/internal/MrjarPlugin.java | 42 +++++++++++-------- 2 files changed, 40 insertions(+), 29 deletions(-) diff --git a/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle b/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle index ccbe9cd2f4a2b..6cb22dad9bc79 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle @@ -130,7 +130,8 @@ if (providers.systemProperty('idea.active').getOrNull() == 'true') { ':server:generateModulesList', ':server:generatePluginsList', ':generateProviderImpls', - ':libs:elasticsearch-native:elasticsearch-native-libraries:extractLibs'].collect { elasticsearchProject.right()?.task(it) ?: it }) + ':libs:elasticsearch-native:elasticsearch-native-libraries:extractLibs', + ':x-pack:libs:es-opensaml-security-api:shadowJar'].collect { elasticsearchProject.right()?.task(it) ?: it }) } // this path is produced by the extractLibs task above @@ -239,20 +240,22 @@ if (providers.systemProperty('idea.active').getOrNull() == 'true') { * but before the XML document, e.g. a doctype or comment */ void modifyXml(Object path, Action action, String preface = null) { - Node xml = parseXml(path) - action.execute(xml) + if (project.file(path).exists()) { + Node xml = parseXml(path) + action.execute(xml) - File xmlFile = project.file(path) - xmlFile.withPrintWriter { writer -> - def printer = new XmlNodePrinter(writer) - printer.namespaceAware = true - printer.preserveWhitespace = true - writer.write("\n") + File xmlFile = project.file(path) + xmlFile.withPrintWriter { writer -> + def printer = new XmlNodePrinter(writer) + printer.namespaceAware = true + printer.preserveWhitespace = true + writer.write("\n") - if (preface != null) { - writer.write(preface) + if (preface != null) { + writer.write(preface) + } + printer.print(xml) } - printer.print(xml) } } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java index 6524247c4c8f6..c64bd3cc9c068 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java @@ -49,6 +49,7 @@ public class MrjarPlugin implements Plugin { private static final Pattern MRJAR_SOURCESET_PATTERN = Pattern.compile("main(\\d{2})"); + private static final String MRJAR_IDEA_ENABLED = "org.gradle.mrjar.idea.enabled"; private final JavaToolchainService javaToolchains; @@ -61,23 +62,30 @@ public class MrjarPlugin implements Plugin { public void apply(Project project) { project.getPluginManager().apply(ElasticsearchJavaBasePlugin.class); var javaExtension = project.getExtensions().getByType(JavaPluginExtension.class); - - List mainVersions = findSourceVersions(project); - List mainSourceSets = new ArrayList<>(); - mainSourceSets.add(SourceSet.MAIN_SOURCE_SET_NAME); - List testSourceSets = new ArrayList<>(mainSourceSets); - testSourceSets.add(SourceSet.TEST_SOURCE_SET_NAME); - for (int javaVersion : mainVersions) { - String mainSourceSetName = SourceSet.MAIN_SOURCE_SET_NAME + javaVersion; - SourceSet mainSourceSet = addSourceSet(project, javaExtension, mainSourceSetName, mainSourceSets, javaVersion); - configureSourceSetInJar(project, mainSourceSet, javaVersion); - mainSourceSets.add(mainSourceSetName); - testSourceSets.add(mainSourceSetName); - - String testSourceSetName = SourceSet.TEST_SOURCE_SET_NAME + javaVersion; - SourceSet testSourceSet = addSourceSet(project, javaExtension, testSourceSetName, testSourceSets, javaVersion); - testSourceSets.add(testSourceSetName); - createTestTask(project, testSourceSet, javaVersion, mainSourceSets); + var isIdea = System.getProperty("idea.active", "false").equals("true"); + var ideaSourceSetsEnabled = project.hasProperty(MRJAR_IDEA_ENABLED) && project.property(MRJAR_IDEA_ENABLED).equals("true"); + + // Ignore version-specific source sets if we are importing into IntelliJ and have not explicitly enabled this. + // Avoids an IntelliJ bug: + // https://youtrack.jetbrains.com/issue/IDEA-285640/Compiler-Options-Settings-language-level-is-set-incorrectly-with-JDK-19ea + if (isIdea == false || ideaSourceSetsEnabled) { + List mainVersions = findSourceVersions(project); + List mainSourceSets = new ArrayList<>(); + mainSourceSets.add(SourceSet.MAIN_SOURCE_SET_NAME); + List testSourceSets = new ArrayList<>(mainSourceSets); + testSourceSets.add(SourceSet.TEST_SOURCE_SET_NAME); + for (int javaVersion : mainVersions) { + String mainSourceSetName = SourceSet.MAIN_SOURCE_SET_NAME + javaVersion; + SourceSet mainSourceSet = addSourceSet(project, javaExtension, mainSourceSetName, mainSourceSets, javaVersion); + configureSourceSetInJar(project, mainSourceSet, javaVersion); + mainSourceSets.add(mainSourceSetName); + testSourceSets.add(mainSourceSetName); + + String testSourceSetName = SourceSet.TEST_SOURCE_SET_NAME + javaVersion; + SourceSet testSourceSet = addSourceSet(project, javaExtension, testSourceSetName, testSourceSets, javaVersion); + testSourceSets.add(testSourceSetName); + createTestTask(project, testSourceSet, javaVersion, mainSourceSets); + } } configureMrjar(project); From ff8f75c6795c1608313125b3337cbac15058daea Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Fri, 5 Apr 2024 03:25:25 +0200 Subject: [PATCH 132/264] Remove unused ShrinkAction (#107127) ShrinkAction isn't used anymore, we can remove it and its trivial test coverage. --- .../admin/indices/shrink/ShrinkAction.java | 23 ------------------- .../xpack/core/ilm/OperationMode.java | 4 +--- .../authz/privilege/IndexPrivilegeTests.java | 2 -- .../ResizeRequestInterceptorTests.java | 9 +++----- 4 files changed, 4 insertions(+), 34 deletions(-) delete mode 100644 server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ShrinkAction.java diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ShrinkAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ShrinkAction.java deleted file mode 100644 index 129c07b64fd4d..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ShrinkAction.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.indices.shrink; - -import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; - -public class ShrinkAction extends ActionType { - - public static final ShrinkAction INSTANCE = new ShrinkAction(); - public static final String NAME = "indices:admin/shrink"; - - private ShrinkAction() { - super(NAME); - } - -} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/OperationMode.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/OperationMode.java index 9c07db9841e23..95a1bf8493e42 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/OperationMode.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/OperationMode.java @@ -7,8 +7,6 @@ package org.elasticsearch.xpack.core.ilm; -import org.elasticsearch.action.admin.indices.shrink.ShrinkAction; - /** * Enum representing the different modes that Index Lifecycle Service can operate in. */ @@ -24,7 +22,7 @@ public boolean isValidChange(OperationMode nextMode) { }, /** - * this represents a state where only sensitive actions (like {@link ShrinkAction}) will be executed + * this represents a state where only sensitive actions (like {@link ShrinkStep}) will be executed * until they finish, at which point the operation mode will move to STOPPED. */ STOPPING { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilegeTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilegeTests.java index b755d3497f649..b05f7065ff63c 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilegeTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilegeTests.java @@ -9,7 +9,6 @@ import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; -import org.elasticsearch.action.admin.indices.shrink.ShrinkAction; import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; import org.elasticsearch.action.delete.TransportDeleteAction; import org.elasticsearch.action.index.TransportIndexAction; @@ -68,7 +67,6 @@ public void testFindPrivilegesThatGrant() { equalTo(List.of("monitor", "cross_cluster_replication", "manage", "all")) ); assertThat(findPrivilegesThatGrant(RefreshAction.NAME), equalTo(List.of("maintenance", "manage", "all"))); - assertThat(findPrivilegesThatGrant(ShrinkAction.NAME), equalTo(List.of("manage", "all"))); } public void testPrivilegesForRollupFieldCapsAction() { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/ResizeRequestInterceptorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/ResizeRequestInterceptorTests.java index f52102ded442b..817d5739b4b9f 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/ResizeRequestInterceptorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/ResizeRequestInterceptorTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.shrink.ResizeAction; import org.elasticsearch.action.admin.indices.shrink.ResizeRequest; -import org.elasticsearch.action.admin.indices.shrink.ShrinkAction; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -78,7 +77,6 @@ public void testResizeRequestInterceptorThrowsWhenFLSDLSEnabled() { } else { queries = null; } - final String action = randomFrom(ShrinkAction.NAME, ResizeAction.NAME); IndicesAccessControl accessControl = new IndicesAccessControl( true, Collections.singletonMap( @@ -94,7 +92,7 @@ public void testResizeRequestInterceptorThrowsWhenFLSDLSEnabled() { ResizeRequestInterceptor resizeRequestInterceptor = new ResizeRequestInterceptor(threadPool, licenseState, auditTrailService); PlainActionFuture plainActionFuture = new PlainActionFuture<>(); - RequestInfo requestInfo = new RequestInfo(authentication, new ResizeRequest("bar", "foo"), action, null); + RequestInfo requestInfo = new RequestInfo(authentication, new ResizeRequest("bar", "foo"), ResizeAction.NAME, null); AuthorizationEngine mockEngine = mock(AuthorizationEngine.class); doAnswer(invocationOnMock -> { ActionListener listener = (ActionListener) invocationOnMock.getArguments()[3]; @@ -126,7 +124,6 @@ public void testResizeRequestInterceptorThrowsWhenTargetHasGreaterPermissions() .user(new User("john", "role")) .realmRef(new RealmRef("realm", "type", "node", null)) .build(); - final String action = randomFrom(ShrinkAction.NAME, ResizeAction.NAME); IndicesAccessControl accessControl = new IndicesAccessControl(true, Collections.emptyMap()); new SecurityContext(Settings.EMPTY, threadContext).putIndicesAccessControl(accessControl); ResizeRequestInterceptor resizeRequestInterceptor = new ResizeRequestInterceptor(threadPool, licenseState, auditTrailService); @@ -134,7 +131,7 @@ public void testResizeRequestInterceptorThrowsWhenTargetHasGreaterPermissions() AuthorizationEngine mockEngine = mock(AuthorizationEngine.class); { PlainActionFuture plainActionFuture = new PlainActionFuture<>(); - RequestInfo requestInfo = new RequestInfo(authentication, new ResizeRequest("target", "source"), action, null); + RequestInfo requestInfo = new RequestInfo(authentication, new ResizeRequest("target", "source"), ResizeAction.NAME, null); doAnswer(invocationOnMock -> { ActionListener listener = (ActionListener) invocationOnMock.getArguments()[3]; listener.onResponse(AuthorizationResult.deny()); @@ -159,7 +156,7 @@ public void testResizeRequestInterceptorThrowsWhenTargetHasGreaterPermissions() // swap target and source for success { PlainActionFuture plainActionFuture = new PlainActionFuture<>(); - RequestInfo requestInfo = new RequestInfo(authentication, new ResizeRequest("source", "target"), action, null); + RequestInfo requestInfo = new RequestInfo(authentication, new ResizeRequest("source", "target"), ResizeAction.NAME, null); doAnswer(invocationOnMock -> { ActionListener listener = (ActionListener) invocationOnMock.getArguments()[3]; listener.onResponse(AuthorizationResult.granted()); From 952f07dcb70a7866afeeb80680315a34eef85621 Mon Sep 17 00:00:00 2001 From: Volodymyr Krasnikov <129072588+volodk85@users.noreply.github.com> Date: Thu, 4 Apr 2024 21:28:24 -0700 Subject: [PATCH 133/264] Mute TopNFunctionTests#testToXContent (#107132) --- .../org/elasticsearch/xpack/profiling/TopNFunctionTests.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TopNFunctionTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TopNFunctionTests.java index afbbe24979466..3a91550767094 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TopNFunctionTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TopNFunctionTests.java @@ -21,6 +21,8 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; public class TopNFunctionTests extends ESTestCase { + + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107117") public void testToXContent() throws IOException { String fileID = "6tVKI4mSYDEJ-ABAIpYXcg"; int frameType = 1; From ab19b60a730e9803c05eecf1fd0cba55fee1a9ca Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Fri, 5 Apr 2024 11:12:54 +0300 Subject: [PATCH 134/264] Add check for lossy params in source (#107097) Lossy params may cause issues with reindexing data under the hood. The current logic requires an extra setting to enable checking for lossy params that is not currently available. --- .../index/mapper/SourceFieldMapper.java | 41 ++++++++++++---- .../mapper/DynamicFieldsBuilderTests.java | 3 +- .../index/mapper/SourceFieldMapperTests.java | 47 +++++++++++++++++++ .../query/SearchExecutionContextTests.java | 2 +- 4 files changed, 83 insertions(+), 10 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java index 15770785e11f9..4a6eaa5b26c39 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.IndexMode; @@ -28,6 +29,7 @@ import org.elasticsearch.xcontent.XContentType; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -39,6 +41,8 @@ public class SourceFieldMapper extends MetadataFieldMapper { public static final String CONTENT_TYPE = "_source"; + public static final String LOSSY_PARAMETERS_ALLOWED_SETTING_NAME = "index.lossy.source-mapping-parameters"; + /** The source mode */ private enum Mode { DISABLED, @@ -128,9 +132,12 @@ public static class Builder extends MetadataFieldMapper.Builder { private final IndexMode indexMode; - public Builder(IndexMode indexMode) { + private final boolean supportsNonDefaultParameterValues; + + public Builder(IndexMode indexMode, final Settings settings) { super(Defaults.NAME); this.indexMode = indexMode; + this.supportsNonDefaultParameterValues = settings.getAsBoolean(LOSSY_PARAMETERS_ALLOWED_SETTING_NAME, true); } public Builder setSynthetic() { @@ -145,13 +152,10 @@ protected Parameter[] getParameters() { private boolean isDefault() { Mode m = mode.get(); - if (m != null && (indexMode == IndexMode.TIME_SERIES && m == Mode.SYNTHETIC) == false) { + if (m != null && (((indexMode == IndexMode.TIME_SERIES && m == Mode.SYNTHETIC) == false) || m == Mode.DISABLED)) { return false; } - if (enabled.get().value() == false) { - return false; - } - return includes.getValue().isEmpty() && excludes.getValue().isEmpty(); + return enabled.get().value() && includes.getValue().isEmpty() && excludes.getValue().isEmpty(); } @Override @@ -167,6 +171,27 @@ public SourceFieldMapper build() { if (isDefault()) { return indexMode == IndexMode.TIME_SERIES ? TSDB_DEFAULT : DEFAULT; } + if (supportsNonDefaultParameterValues == false) { + List disallowed = new ArrayList<>(); + if (enabled.get().value() == false) { + disallowed.add("enabled"); + } + if (includes.get().isEmpty() == false) { + disallowed.add("includes"); + } + if (excludes.get().isEmpty() == false) { + disallowed.add("excludes"); + } + if (mode.get() == Mode.DISABLED) { + disallowed.add("mode=disabled"); + } + assert disallowed.isEmpty() == false; + throw new MapperParsingException( + disallowed.size() == 1 + ? "Parameter [" + disallowed.get(0) + "] is not allowed in source" + : "Parameters [" + String.join(",", disallowed) + "] are not allowed in source" + ); + } SourceFieldMapper sourceFieldMapper = new SourceFieldMapper( mode.get(), enabled.get(), @@ -186,7 +211,7 @@ public SourceFieldMapper build() { c -> c.getIndexSettings().getMode() == IndexMode.TIME_SERIES ? c.getIndexSettings().getIndexVersionCreated().onOrAfter(IndexVersions.V_8_7_0) ? TSDB_DEFAULT : TSDB_LEGACY_DEFAULT : DEFAULT, - c -> new Builder(c.getIndexSettings().getMode()) + c -> new Builder(c.getIndexSettings().getMode(), c.getSettings()) ); static final class SourceFieldType extends MappedFieldType { @@ -321,7 +346,7 @@ protected String contentType() { @Override public FieldMapper.Builder getMergeBuilder() { - return new Builder(indexMode).init(this); + return new Builder(indexMode, Settings.EMPTY).init(this); } /** diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java index 329d8a795732f..229e2e6f72cc1 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.Explicit; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; @@ -67,7 +68,7 @@ public void testCreateDynamicStringFieldAsKeywordForDimension() throws IOExcepti XContentParser parser = createParser(JsonXContent.jsonXContent, source); SourceToParse sourceToParse = new SourceToParse("test", new BytesArray(source), XContentType.JSON); - SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null).setSynthetic().build(); + SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY).setSynthetic().build(); RootObjectMapper root = new RootObjectMapper.Builder("_doc", Explicit.IMPLICIT_TRUE).add( new PassThroughObjectMapper.Builder("labels").setContainsDimensions().dynamic(ObjectMapper.Dynamic.TRUE) ).build(MapperBuilderContext.root(false, false)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java index 5601290fed5c7..47b8bb3be36b7 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java @@ -11,6 +11,7 @@ import org.apache.lucene.index.IndexableField; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; @@ -238,4 +239,50 @@ public void testSyntheticSourceInTimeSeries() throws IOException { assertTrue(mapper.sourceMapper().isSynthetic()); assertEquals("{\"_source\":{\"mode\":\"synthetic\"}}", mapper.sourceMapper().toString()); } + + public void testSupportsNonDefaultParameterValues() throws IOException { + Settings settings = Settings.builder().put(SourceFieldMapper.LOSSY_PARAMETERS_ALLOWED_SETTING_NAME, false).build(); + Exception e = expectThrows( + MapperParsingException.class, + () -> createMapperService(settings, topMapping(b -> b.startObject("_source").field("enabled", false).endObject())) + .documentMapper() + .sourceMapper() + ); + assertThat(e.getMessage(), containsString("Parameter [enabled] is not allowed in source")); + + e = expectThrows( + MapperParsingException.class, + () -> createMapperService(settings, topMapping(b -> b.startObject("_source").array("includes", "foo").endObject())) + .documentMapper() + .sourceMapper() + ); + assertThat(e.getMessage(), containsString("Parameter [includes] is not allowed in source")); + + e = expectThrows( + MapperParsingException.class, + () -> createMapperService(settings, topMapping(b -> b.startObject("_source").array("excludes", "foo").endObject())) + .documentMapper() + .sourceMapper() + ); + assertThat(e.getMessage(), containsString("Parameter [excludes] is not allowed in source")); + + e = expectThrows( + MapperParsingException.class, + () -> createMapperService(settings, topMapping(b -> b.startObject("_source").field("mode", "disabled").endObject())) + .documentMapper() + .sourceMapper() + ); + assertThat(e.getMessage(), containsString("Parameter [mode=disabled] is not allowed in source")); + + e = expectThrows( + MapperParsingException.class, + () -> createMapperService( + settings, + topMapping( + b -> b.startObject("_source").field("enabled", false).array("includes", "foo").array("excludes", "foo").endObject() + ) + ).documentMapper().sourceMapper() + ); + assertThat(e.getMessage(), containsString("Parameters [enabled,includes,excludes] are not allowed in source")); + } } diff --git a/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java b/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java index 2f31bac135716..3085ff89603ce 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java @@ -382,7 +382,7 @@ public void testSearchRequestRuntimeFieldsAndMultifieldDetection() { public void testSyntheticSourceSearchLookup() throws IOException { // Build a mapping using synthetic source - SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null).setSynthetic().build(); + SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY).setSynthetic().build(); RootObjectMapper root = new RootObjectMapper.Builder("_doc", Explicit.IMPLICIT_TRUE).add( new KeywordFieldMapper.Builder("cat", IndexVersion.current()).ignoreAbove(100) ).build(MapperBuilderContext.root(true, false)); From ddb1b7463fa8cf773ab403895f05b9776992e256 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Fri, 5 Apr 2024 10:27:54 +0200 Subject: [PATCH 135/264] Small adjustments to metadata fields fetching (#107071) While looking at #106325, which moves fetching of metadata fields out of the StoredFieldsPhase, I noticed some small adjustments that we can make to FieldsVisitor, CustomFieldsVisitor and StoredFieldsPhase. These are not functional changes, the only goal is to make things simpler and clearer, hopefully. - add test coverage for situation where _routing is provided with docs, hence returned by default make a stronger connection between CustomFieldsVisitor and FieldsVisitor around fields that are treated differently (_ignored, _routing, _id and _source) - explicitly exclude _id from StoredFieldsPhase like we already do for _source as it's retrieved separately - move the _source exclusion in StoredFieldsPhase to after calling getMatchingFieldNames, so that patterns that match _source still exclude it --- .../search/source/MetadataFetchingIT.java | 40 +++++++++++++++++++ .../fieldvisitor/CustomFieldsVisitor.java | 17 ++++---- .../index/fieldvisitor/FieldsVisitor.java | 26 +++++------- .../fetch/subphase/StoredFieldsPhase.java | 22 +++++----- 4 files changed, 71 insertions(+), 34 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java index 7d902cf140839..b8d1d45a6f85d 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java @@ -8,6 +8,7 @@ package org.elasticsearch.search.source; import org.apache.lucene.search.join.ScoreMode; +import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.common.ValidationException; import org.elasticsearch.index.query.InnerHitBuilder; import org.elasticsearch.index.query.NestedQueryBuilder; @@ -81,6 +82,11 @@ public void testWithRouting() { prepareIndex("test").setId("1").setSource("field", "value").setRouting("toto").get(); refresh(); + assertResponse(prepareSearch("test"), response -> { + assertThat(response.getHits().getAt(0).getId(), notNullValue()); + assertThat(response.getHits().getAt(0).field("_routing"), notNullValue()); + assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); + }); assertResponse(prepareSearch("test").storedFields("_none_").setFetchSource(false), response -> { assertThat(response.getHits().getAt(0).getId(), nullValue()); assertThat(response.getHits().getAt(0).field("_routing"), nullValue()); @@ -90,6 +96,40 @@ public void testWithRouting() { assertThat(response.getHits().getAt(0).getId(), nullValue()); assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); }); + + GetResponse getResponse = client().prepareGet("test", "1").setRouting("toto").get(); + assertTrue(getResponse.isExists()); + assertEquals("toto", getResponse.getFields().get("_routing").getValue()); + } + + public void testWithIgnored() { + assertAcked(prepareCreate("test").setMapping("ip", "type=ip,ignore_malformed=true")); + ensureGreen(); + + prepareIndex("test").setId("1").setSource("ip", "value").get(); + refresh(); + + assertResponse(prepareSearch("test"), response -> { + assertThat(response.getHits().getAt(0).getId(), notNullValue()); + assertThat(response.getHits().getAt(0).field("_ignored").getValue(), equalTo("ip")); + assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); + }); + assertResponse(prepareSearch("test").storedFields("_none_"), response -> { + assertThat(response.getHits().getAt(0).getId(), nullValue()); + assertThat(response.getHits().getAt(0).field("_ignored"), nullValue()); + assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); + }); + + { + GetResponse getResponse = client().prepareGet("test", "1").get(); + assertTrue(getResponse.isExists()); + assertThat(getResponse.getField("_ignored"), nullValue()); + } + { + GetResponse getResponse = client().prepareGet("test", "1").setStoredFields("_ignored").get(); + assertTrue(getResponse.isExists()); + assertEquals("ip", getResponse.getField("_ignored").getValue()); + } } public void testInvalid() { diff --git a/server/src/main/java/org/elasticsearch/index/fieldvisitor/CustomFieldsVisitor.java b/server/src/main/java/org/elasticsearch/index/fieldvisitor/CustomFieldsVisitor.java index d7f6e3541838b..92f74615711f1 100644 --- a/server/src/main/java/org/elasticsearch/index/fieldvisitor/CustomFieldsVisitor.java +++ b/server/src/main/java/org/elasticsearch/index/fieldvisitor/CustomFieldsVisitor.java @@ -8,26 +8,27 @@ package org.elasticsearch.index.fieldvisitor; import org.apache.lucene.index.FieldInfo; +import org.elasticsearch.index.mapper.IgnoredFieldMapper; import java.util.HashSet; -import java.util.List; import java.util.Set; /** - * A field visitor that allows to load a selection of the stored fields by exact name - * {@code _id} and {@code _routing} fields are always loaded. + * A field visitor that allows to load a selection of the stored fields by exact name. + * {@code _id}, {@code _routing}, and {@code _ignored} fields are always loaded. + * {@code _source} is always loaded unless disabled explicitly. */ public class CustomFieldsVisitor extends FieldsVisitor { - private final Set fields; public CustomFieldsVisitor(Set fields, boolean loadSource) { super(loadSource); this.fields = new HashSet<>(fields); - // metadata fields are already handled by FieldsVisitor, so removing - // them here means that if the only fields requested are metadata - // fields then we can shortcut loading - List.of("_id", "_routing", "_source").forEach(this.fields::remove); + // metadata fields that are always retrieved are already handled by FieldsVisitor, so removing + // them here means that if the only fields requested are those metadata fields then we can shortcut loading + FieldsVisitor.BASE_REQUIRED_FIELDS.forEach(this.fields::remove); + this.fields.remove(this.sourceFieldName); + this.fields.remove(IgnoredFieldMapper.NAME); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/fieldvisitor/FieldsVisitor.java b/server/src/main/java/org/elasticsearch/index/fieldvisitor/FieldsVisitor.java index 4789dcc131b89..bf4ad150b1ee4 100644 --- a/server/src/main/java/org/elasticsearch/index/fieldvisitor/FieldsVisitor.java +++ b/server/src/main/java/org/elasticsearch/index/fieldvisitor/FieldsVisitor.java @@ -34,10 +34,10 @@ * Base {@link StoredFieldVisitor} that retrieves all non-redundant metadata. */ public class FieldsVisitor extends FieldNamesProvidingStoredFieldsVisitor { - private static final Set BASE_REQUIRED_FIELDS = Set.of(IdFieldMapper.NAME, RoutingFieldMapper.NAME); + static final Set BASE_REQUIRED_FIELDS = Set.of(IdFieldMapper.NAME, RoutingFieldMapper.NAME); private final boolean loadSource; - private final String sourceFieldName; + final String sourceFieldName; private final Set requiredFields; protected BytesReference source; protected String id; @@ -63,6 +63,7 @@ public Status needsField(FieldInfo fieldInfo) { // Always load _ignored to be explicit about ignored fields // This works because _ignored is added as the first metadata mapper, // so its stored fields always appear first in the list. + // Note that _ignored is also multi-valued, which is why it can't be removed from the set like other fields if (IgnoredFieldMapper.NAME.equals(fieldInfo.name)) { return Status.YES; } @@ -72,8 +73,7 @@ public Status needsField(FieldInfo fieldInfo) { return Status.YES; } } - // All these fields are single-valued so we can stop when the set is - // empty + // All these fields are single-valued so we can stop when the set is empty return requiredFields.isEmpty() ? Status.STOP : Status.NO; } @@ -100,7 +100,7 @@ public void binaryField(FieldInfo fieldInfo, byte[] value) { binaryField(fieldInfo, new BytesRef(value)); } - public void binaryField(FieldInfo fieldInfo, BytesRef value) { + private void binaryField(FieldInfo fieldInfo, BytesRef value) { if (sourceFieldName.equals(fieldInfo.name)) { source = new BytesArray(value); } else if (IdFieldMapper.NAME.equals(fieldInfo.name)) { @@ -147,12 +147,6 @@ public void doubleField(FieldInfo fieldInfo, double value) { addValue(fieldInfo.name, value); } - public void objectField(FieldInfo fieldInfo, Object object) { - assert IdFieldMapper.NAME.equals(fieldInfo.name) == false : "_id field must go through binaryField"; - assert sourceFieldName.equals(fieldInfo.name) == false : "source field must go through binaryField"; - addValue(fieldInfo.name, object); - } - public BytesReference source() { return source; } @@ -178,7 +172,9 @@ public Map> fields() { } public void reset() { - if (fieldsValues != null) fieldsValues.clear(); + if (fieldsValues != null) { + fieldsValues.clear(); + } source = null; id = null; @@ -193,11 +189,7 @@ void addValue(String name, Object value) { fieldsValues = new HashMap<>(); } - List values = fieldsValues.get(name); - if (values == null) { - values = new ArrayList<>(2); - fieldsValues.put(name, values); - } + List values = fieldsValues.computeIfAbsent(name, k -> new ArrayList<>(2)); values.add(value); } } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/StoredFieldsPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/StoredFieldsPhase.java index d6950df962433..483285dba1fa7 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/StoredFieldsPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/StoredFieldsPhase.java @@ -10,6 +10,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.elasticsearch.common.document.DocumentField; +import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.IgnoredFieldMapper; import org.elasticsearch.index.mapper.LegacyTypeFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; @@ -73,16 +74,19 @@ public FetchSubPhaseProcessor getProcessor(FetchContext fetchContext) { if (storedFieldsContext.fieldNames() != null) { SearchExecutionContext sec = fetchContext.getSearchExecutionContext(); for (String field : storedFieldsContext.fieldNames()) { - if (SourceFieldMapper.NAME.equals(field) == false) { - Collection fieldNames = sec.getMatchingFieldNames(field); - for (String fieldName : fieldNames) { - MappedFieldType ft = sec.getFieldType(fieldName); - if (ft.isStored() == false) { - continue; - } - storedFields.add(new StoredField(fieldName, ft, sec.isMetadataField(ft.name()))); - fieldsToLoad.add(ft.name()); + Collection fieldNames = sec.getMatchingFieldNames(field); + for (String fieldName : fieldNames) { + // _id and _source are always retrieved anyway, no need to do it explicitly. See FieldsVisitor. + // They are not returned as part of HitContext#loadedFields hence they are not added to documents by this sub-phase + if (IdFieldMapper.NAME.equals(field) || SourceFieldMapper.NAME.equals(field)) { + continue; + } + MappedFieldType ft = sec.getFieldType(fieldName); + if (ft.isStored() == false) { + continue; } + storedFields.add(new StoredField(fieldName, ft, sec.isMetadataField(ft.name()))); + fieldsToLoad.add(ft.name()); } } } From 3f2070896543012a7ffbd6c83788699dd19fbd60 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Fri, 5 Apr 2024 10:51:53 +0200 Subject: [PATCH 136/264] Remove unused NamedWriteableRegistry from a search REST actions (#107126) We don't need `NamedWriteableRegistry`to parse search requests any longer, this was an unused parameter. Removing it from search request parsing allows for removing it as a dependency from a number of places. --- .../script/mustache/MustachePlugin.java | 2 +- .../mustache/RestSearchTemplateAction.java | 6 +--- .../RestSearchTemplateActionTests.java | 3 +- .../AbstractBaseReindexRestHandler.java | 14 +++------ .../AbstractBulkByQueryRestHandler.java | 11 +------ .../elasticsearch/reindex/ReindexPlugin.java | 6 ++-- .../reindex/RestDeleteByQueryAction.java | 11 +++---- .../reindex/RestReindexAction.java | 9 ++---- .../reindex/RestUpdateByQueryAction.java | 11 +++---- .../reindex/RestDeleteByQueryActionTests.java | 3 +- .../reindex/RestReindexActionTests.java | 14 +++------ .../reindex/RestUpdateByQueryActionTests.java | 3 +- .../elasticsearch/action/ActionModule.java | 6 ++-- .../action/search/RestMultiSearchAction.java | 29 ++----------------- .../rest/action/search/RestSearchAction.java | 27 +++-------------- .../search/MultiSearchRequestTests.java | 7 ++--- .../search/RestMultiSearchActionTests.java | 10 +------ .../action/search/RestSearchActionTests.java | 3 +- .../xpack/search/AsyncSearch.java | 2 +- .../search/RestSubmitAsyncSearchAction.java | 19 ++---------- .../RestSubmitAsyncSearchActionTests.java | 10 +------ .../org/elasticsearch/xpack/fleet/Fleet.java | 2 +- .../rest/RestFleetMultiSearchAction.java | 1 - .../fleet/rest/RestFleetSearchAction.java | 19 ++---------- .../elasticsearch/xpack/rollup/Rollup.java | 2 +- .../rollup/rest/RestRollupSearchAction.java | 6 +--- 26 files changed, 50 insertions(+), 186 deletions(-) diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java index c698a603055ad..8c1a410ee8a66 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java @@ -67,7 +67,7 @@ public List getRestHandlers( Predicate clusterSupportsFeature ) { return Arrays.asList( - new RestSearchTemplateAction(namedWriteableRegistry, clusterSupportsFeature), + new RestSearchTemplateAction(clusterSupportsFeature), new RestMultiSearchTemplateAction(settings), new RestRenderSearchTemplateAction() ); diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java index a29c10b7501f1..ab1f90bc59933 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.rest.BaseRestHandler; @@ -36,11 +35,9 @@ public class RestSearchTemplateAction extends BaseRestHandler { private static final Set RESPONSE_PARAMS = Set.of(TYPED_KEYS_PARAM, RestSearchAction.TOTAL_HITS_AS_INT_PARAM); - private final NamedWriteableRegistry namedWriteableRegistry; private final Predicate clusterSupportsFeature; - public RestSearchTemplateAction(NamedWriteableRegistry namedWriteableRegistry, Predicate clusterSupportsFeature) { - this.namedWriteableRegistry = namedWriteableRegistry; + public RestSearchTemplateAction(Predicate clusterSupportsFeature) { this.clusterSupportsFeature = clusterSupportsFeature; } @@ -73,7 +70,6 @@ public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client searchRequest, request, null, - namedWriteableRegistry, clusterSupportsFeature, size -> searchRequest.source().size(size) ); diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestSearchTemplateActionTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestSearchTemplateActionTests.java index 1efa0ada221ef..1f1955e5ca171 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestSearchTemplateActionTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestSearchTemplateActionTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.script.mustache; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.search.RestSearchAction; @@ -28,7 +27,7 @@ public final class RestSearchTemplateActionTests extends RestActionTestCase { @Before public void setUpAction() { - controller().registerHandler(new RestSearchTemplateAction(mock(NamedWriteableRegistry.class), nf -> false)); + controller().registerHandler(new RestSearchTemplateAction(nf -> false)); verifyingClient.setExecuteVerifier((actionType, request) -> mock(SearchTemplateResponse.class)); verifyingClient.setExecuteLocallyVerifier((actionType, request) -> mock(SearchTemplateResponse.class)); } diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractBaseReindexRestHandler.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractBaseReindexRestHandler.java index 48c50450656f3..6643e2e9d20ea 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractBaseReindexRestHandler.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractBaseReindexRestHandler.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.index.reindex.AbstractBulkByScrollRequest; import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.index.reindex.BulkByScrollTask; @@ -39,15 +38,10 @@ protected AbstractBaseReindexRestHandler(A action) { this.action = action; } - protected RestChannelConsumer doPrepareRequest( - RestRequest request, - NamedWriteableRegistry namedWriteableRegistry, - NodeClient client, - boolean includeCreated, - boolean includeUpdated - ) throws IOException { + protected RestChannelConsumer doPrepareRequest(RestRequest request, NodeClient client, boolean includeCreated, boolean includeUpdated) + throws IOException { // Build the internal request - Request internal = setCommonOptions(request, buildRequest(request, namedWriteableRegistry)); + Request internal = setCommonOptions(request, buildRequest(request)); // Executes the request and waits for completion if (request.paramAsBoolean("wait_for_completion", true)) { @@ -78,7 +72,7 @@ protected RestChannelConsumer doPrepareRequest( /** * Build the Request based on the RestRequest. */ - protected abstract Request buildRequest(RestRequest request, NamedWriteableRegistry namedWriteableRegistry) throws IOException; + protected abstract Request buildRequest(RestRequest request) throws IOException; /** * Sets common options of {@link AbstractBulkByScrollRequest} requests. diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractBulkByQueryRestHandler.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractBulkByQueryRestHandler.java index cb0d09f1f2450..8cf7d2200ad36 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractBulkByQueryRestHandler.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractBulkByQueryRestHandler.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.ActionType; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.RestApiVersion; @@ -45,7 +44,6 @@ protected AbstractBulkByQueryRestHandler(A action) { protected void parseInternalRequest( Request internal, RestRequest restRequest, - NamedWriteableRegistry namedWriteableRegistry, Predicate clusterSupportsFeature, Map> bodyConsumers ) throws IOException { @@ -58,14 +56,7 @@ protected void parseInternalRequest( IntConsumer sizeConsumer = restRequest.getRestApiVersion() == RestApiVersion.V_7 ? size -> setMaxDocsFromSearchSize(internal, size) : size -> failOnSizeSpecified(); - RestSearchAction.parseSearchRequest( - searchRequest, - restRequest, - parser, - namedWriteableRegistry, - clusterSupportsFeature, - sizeConsumer - ); + RestSearchAction.parseSearchRequest(searchRequest, restRequest, parser, clusterSupportsFeature, sizeConsumer); } searchRequest.source().size(restRequest.paramAsInt("scroll_size", searchRequest.source().size())); diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexPlugin.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexPlugin.java index 8cdfc77db6f7f..1a40f77250e5f 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexPlugin.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexPlugin.java @@ -76,9 +76,9 @@ public List getRestHandlers( Predicate clusterSupportsFeature ) { return Arrays.asList( - new RestReindexAction(namedWriteableRegistry, clusterSupportsFeature), - new RestUpdateByQueryAction(namedWriteableRegistry, clusterSupportsFeature), - new RestDeleteByQueryAction(namedWriteableRegistry, clusterSupportsFeature), + new RestReindexAction(clusterSupportsFeature), + new RestUpdateByQueryAction(clusterSupportsFeature), + new RestDeleteByQueryAction(clusterSupportsFeature), new RestRethrottleAction(nodesInCluster) ); } diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/RestDeleteByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/RestDeleteByQueryAction.java index cc98dc06575b8..ff0ef1282b2d9 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/RestDeleteByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/RestDeleteByQueryAction.java @@ -9,7 +9,6 @@ package org.elasticsearch.reindex; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.reindex.DeleteByQueryAction; @@ -31,12 +30,10 @@ @ServerlessScope(Scope.PUBLIC) public class RestDeleteByQueryAction extends AbstractBulkByQueryRestHandler { - private final NamedWriteableRegistry namedWriteableRegistry; private final Predicate clusterSupportsFeature; - public RestDeleteByQueryAction(NamedWriteableRegistry namedWriteableRegistry, Predicate clusterSupportsFeature) { + public RestDeleteByQueryAction(Predicate clusterSupportsFeature) { super(DeleteByQueryAction.INSTANCE); - this.namedWriteableRegistry = namedWriteableRegistry; this.clusterSupportsFeature = clusterSupportsFeature; } @@ -58,11 +55,11 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { - return doPrepareRequest(request, namedWriteableRegistry, client, false, false); + return doPrepareRequest(request, client, false, false); } @Override - protected DeleteByQueryRequest buildRequest(RestRequest request, NamedWriteableRegistry namedWriteableRegistry) throws IOException { + protected DeleteByQueryRequest buildRequest(RestRequest request) throws IOException { /* * Passing the search request through DeleteByQueryRequest first allows * it to set its own defaults which differ from SearchRequest's @@ -74,7 +71,7 @@ protected DeleteByQueryRequest buildRequest(RestRequest request, NamedWriteableR consumers.put("conflicts", o -> internal.setConflicts((String) o)); consumers.put("max_docs", s -> setMaxDocsValidateIdentical(internal, ((Number) s).intValue())); - parseInternalRequest(internal, request, namedWriteableRegistry, clusterSupportsFeature, consumers); + parseInternalRequest(internal, request, clusterSupportsFeature, consumers); return internal; } diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/RestReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/RestReindexAction.java index 253fd581cfceb..a693b0babaa9f 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/RestReindexAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/RestReindexAction.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.reindex.ReindexAction; import org.elasticsearch.index.reindex.ReindexRequest; @@ -34,12 +33,10 @@ @ServerlessScope(Scope.PUBLIC) public class RestReindexAction extends AbstractBaseReindexRestHandler implements RestRequestFilter { - private final NamedWriteableRegistry namedWriteableRegistry; private final Predicate clusterSupportsFeature; - public RestReindexAction(NamedWriteableRegistry namedWriteableRegistry, Predicate clusterSupportsFeature) { + public RestReindexAction(Predicate clusterSupportsFeature) { super(ReindexAction.INSTANCE); - this.namedWriteableRegistry = namedWriteableRegistry; this.clusterSupportsFeature = clusterSupportsFeature; } @@ -55,11 +52,11 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { - return doPrepareRequest(request, namedWriteableRegistry, client, true, true); + return doPrepareRequest(request, client, true, true); } @Override - protected ReindexRequest buildRequest(RestRequest request, NamedWriteableRegistry namedWriteableRegistry) throws IOException { + protected ReindexRequest buildRequest(RestRequest request) throws IOException { if (request.hasParam("pipeline")) { throw new IllegalArgumentException( "_reindex doesn't support [pipeline] as a query parameter. Specify it in the [dest] object instead." diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/RestUpdateByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/RestUpdateByQueryAction.java index 50536a164727a..2a6146b9fad1c 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/RestUpdateByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/RestUpdateByQueryAction.java @@ -9,7 +9,6 @@ package org.elasticsearch.reindex; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.reindex.UpdateByQueryAction; @@ -32,12 +31,10 @@ @ServerlessScope(Scope.PUBLIC) public class RestUpdateByQueryAction extends AbstractBulkByQueryRestHandler { - private final NamedWriteableRegistry namedWriteableRegistry; private final Predicate clusterSupportsFeature; - public RestUpdateByQueryAction(NamedWriteableRegistry namedWriteableRegistry, Predicate clusterSupportsFeature) { + public RestUpdateByQueryAction(Predicate clusterSupportsFeature) { super(UpdateByQueryAction.INSTANCE); - this.namedWriteableRegistry = namedWriteableRegistry; this.clusterSupportsFeature = clusterSupportsFeature; } @@ -58,11 +55,11 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { - return doPrepareRequest(request, namedWriteableRegistry, client, false, true); + return doPrepareRequest(request, client, false, true); } @Override - protected UpdateByQueryRequest buildRequest(RestRequest request, NamedWriteableRegistry namedWriteableRegistry) throws IOException { + protected UpdateByQueryRequest buildRequest(RestRequest request) throws IOException { if (request.getRestApiVersion() == RestApiVersion.V_7 && request.hasParam("type")) { request.param("type"); } @@ -78,7 +75,7 @@ protected UpdateByQueryRequest buildRequest(RestRequest request, NamedWriteableR consumers.put("script", o -> internal.setScript(Script.parse(o))); consumers.put("max_docs", s -> setMaxDocsValidateIdentical(internal, ((Number) s).intValue())); - parseInternalRequest(internal, request, namedWriteableRegistry, clusterSupportsFeature, consumers); + parseInternalRequest(internal, request, clusterSupportsFeature, consumers); internal.setPipeline(request.param("pipeline")); return internal; diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/RestDeleteByQueryActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/RestDeleteByQueryActionTests.java index aa457fae9e377..013eace19f1b5 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/RestDeleteByQueryActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/RestDeleteByQueryActionTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.reindex; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.rest.RestRequest; @@ -31,7 +30,7 @@ public final class RestDeleteByQueryActionTests extends RestActionTestCase { @Before public void setUpAction() { - controller().registerHandler(new RestDeleteByQueryAction(mock(NamedWriteableRegistry.class), nf -> false)); + controller().registerHandler(new RestDeleteByQueryAction(nf -> false)); verifyingClient.setExecuteVerifier((actionType, request) -> mock(BulkByScrollResponse.class)); verifyingClient.setExecuteLocallyVerifier((actionType, request) -> mock(BulkByScrollResponse.class)); } diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/RestReindexActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/RestReindexActionTests.java index ddb8c2ce0225d..2e1810482bb5f 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/RestReindexActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/RestReindexActionTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.index.reindex.AbstractBulkByScrollRequest; import org.elasticsearch.index.reindex.ReindexRequest; import org.elasticsearch.test.rest.FakeRestRequest; @@ -21,10 +20,8 @@ import org.junit.Before; import java.io.IOException; -import java.util.Collections; import static java.util.Collections.singletonMap; -import static org.mockito.Mockito.mock; public class RestReindexActionTests extends RestActionTestCase { @@ -32,7 +29,7 @@ public class RestReindexActionTests extends RestActionTestCase { @Before public void setUpAction() { - action = new RestReindexAction(mock(NamedWriteableRegistry.class), nf -> false); + action = new RestReindexAction(nf -> false); controller().registerHandler(action); } @@ -56,10 +53,7 @@ public void testPipelineQueryParameterIsError() throws IOException { request.withContent(BytesReference.bytes(body), body.contentType()); } request.withParams(singletonMap("pipeline", "doesn't matter")); - Exception e = expectThrows( - IllegalArgumentException.class, - () -> action.buildRequest(request.build(), new NamedWriteableRegistry(Collections.emptyList())) - ); + Exception e = expectThrows(IllegalArgumentException.class, () -> action.buildRequest(request.build())); assertEquals("_reindex doesn't support [pipeline] as a query parameter. Specify it in the [dest] object instead.", e.getMessage()); } @@ -68,14 +62,14 @@ public void testSetScrollTimeout() throws IOException { { FakeRestRequest.Builder requestBuilder = new FakeRestRequest.Builder(xContentRegistry()); requestBuilder.withContent(new BytesArray("{}"), XContentType.JSON); - ReindexRequest request = action.buildRequest(requestBuilder.build(), new NamedWriteableRegistry(Collections.emptyList())); + ReindexRequest request = action.buildRequest(requestBuilder.build()); assertEquals(AbstractBulkByScrollRequest.DEFAULT_SCROLL_TIMEOUT, request.getScrollTime()); } { FakeRestRequest.Builder requestBuilder = new FakeRestRequest.Builder(xContentRegistry()); requestBuilder.withParams(singletonMap("scroll", "10m")); requestBuilder.withContent(new BytesArray("{}"), XContentType.JSON); - ReindexRequest request = action.buildRequest(requestBuilder.build(), new NamedWriteableRegistry(Collections.emptyList())); + ReindexRequest request = action.buildRequest(requestBuilder.build()); assertEquals("10m", request.getScrollTime().toString()); } } diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/RestUpdateByQueryActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/RestUpdateByQueryActionTests.java index a3f468df89e1e..b83f11a91d1b8 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/RestUpdateByQueryActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/RestUpdateByQueryActionTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.reindex; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.rest.RestRequest; @@ -31,7 +30,7 @@ public final class RestUpdateByQueryActionTests extends RestActionTestCase { @Before public void setUpAction() { - controller().registerHandler(new RestUpdateByQueryAction(mock(NamedWriteableRegistry.class), nf -> false)); + controller().registerHandler(new RestUpdateByQueryAction(nf -> false)); verifyingClient.setExecuteVerifier((actionType, request) -> mock(BulkByScrollResponse.class)); verifyingClient.setExecuteLocallyVerifier((actionType, request) -> mock(BulkByScrollResponse.class)); } diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index 7e03b495438d8..cd01184801c64 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -933,14 +933,12 @@ public void initRestHandlers(Supplier nodesInCluster, Predicate< registerHandler.accept(new RestBulkAction(settings)); registerHandler.accept(new RestUpdateAction()); - registerHandler.accept(new RestSearchAction(restController.getSearchUsageHolder(), namedWriteableRegistry, clusterSupportsFeature)); + registerHandler.accept(new RestSearchAction(restController.getSearchUsageHolder(), clusterSupportsFeature)); registerHandler.accept(new RestSearchScrollAction()); registerHandler.accept(new RestClearScrollAction()); registerHandler.accept(new RestOpenPointInTimeAction()); registerHandler.accept(new RestClosePointInTimeAction()); - registerHandler.accept( - new RestMultiSearchAction(settings, restController.getSearchUsageHolder(), namedWriteableRegistry, clusterSupportsFeature) - ); + registerHandler.accept(new RestMultiSearchAction(settings, restController.getSearchUsageHolder(), clusterSupportsFeature)); registerHandler.accept(new RestKnnSearchAction()); registerHandler.accept(new RestValidateQueryAction()); diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java index 69cc4f23f3956..0a7a4a9701a90 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java @@ -17,7 +17,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.TriFunction; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.Tuple; @@ -51,18 +50,11 @@ public class RestMultiSearchAction extends BaseRestHandler { private final boolean allowExplicitIndex; private final SearchUsageHolder searchUsageHolder; - private final NamedWriteableRegistry namedWriteableRegistry; private final Predicate clusterSupportsFeature; - public RestMultiSearchAction( - Settings settings, - SearchUsageHolder searchUsageHolder, - NamedWriteableRegistry namedWriteableRegistry, - Predicate clusterSupportsFeature - ) { + public RestMultiSearchAction(Settings settings, SearchUsageHolder searchUsageHolder, Predicate clusterSupportsFeature) { this.allowExplicitIndex = MULTI_ALLOW_EXPLICIT_INDEX.get(settings); this.searchUsageHolder = searchUsageHolder; - this.namedWriteableRegistry = namedWriteableRegistry; this.clusterSupportsFeature = clusterSupportsFeature; } @@ -85,13 +77,7 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - final MultiSearchRequest multiSearchRequest = parseRequest( - request, - namedWriteableRegistry, - allowExplicitIndex, - searchUsageHolder, - clusterSupportsFeature - ); + final MultiSearchRequest multiSearchRequest = parseRequest(request, allowExplicitIndex, searchUsageHolder, clusterSupportsFeature); return channel -> { final RestCancellableNodeClient cancellableClient = new RestCancellableNodeClient(client, request.getHttpChannel()); cancellableClient.execute( @@ -107,19 +93,11 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC */ public static MultiSearchRequest parseRequest( RestRequest restRequest, - NamedWriteableRegistry namedWriteableRegistry, boolean allowExplicitIndex, SearchUsageHolder searchUsageHolder, Predicate clusterSupportsFeature ) throws IOException { - return parseRequest( - restRequest, - namedWriteableRegistry, - allowExplicitIndex, - searchUsageHolder, - clusterSupportsFeature, - (k, v, r) -> false - ); + return parseRequest(restRequest, allowExplicitIndex, searchUsageHolder, clusterSupportsFeature, (k, v, r) -> false); } /** @@ -128,7 +106,6 @@ public static MultiSearchRequest parseRequest( */ public static MultiSearchRequest parseRequest( RestRequest restRequest, - NamedWriteableRegistry namedWriteableRegistry, boolean allowExplicitIndex, SearchUsageHolder searchUsageHolder, Predicate clusterSupportsFeature, diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java index cfb70da9fb454..3dbb98f7a7685 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java @@ -16,7 +16,6 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.core.Booleans; import org.elasticsearch.core.Nullable; @@ -71,16 +70,10 @@ public class RestSearchAction extends BaseRestHandler { public static final Set RESPONSE_PARAMS = Set.of(TYPED_KEYS_PARAM, TOTAL_HITS_AS_INT_PARAM, INCLUDE_NAMED_QUERIES_SCORE_PARAM); private final SearchUsageHolder searchUsageHolder; - private final NamedWriteableRegistry namedWriteableRegistry; private final Predicate clusterSupportsFeature; - public RestSearchAction( - SearchUsageHolder searchUsageHolder, - NamedWriteableRegistry namedWriteableRegistry, - Predicate clusterSupportsFeature - ) { + public RestSearchAction(SearchUsageHolder searchUsageHolder, Predicate clusterSupportsFeature) { this.searchUsageHolder = searchUsageHolder; - this.namedWriteableRegistry = namedWriteableRegistry; this.clusterSupportsFeature = clusterSupportsFeature; } @@ -124,15 +117,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC */ IntConsumer setSize = size -> searchRequest.source().size(size); request.withContentOrSourceParamParserOrNull( - parser -> parseSearchRequest( - searchRequest, - request, - parser, - namedWriteableRegistry, - clusterSupportsFeature, - setSize, - searchUsageHolder - ) + parser -> parseSearchRequest(searchRequest, request, parser, clusterSupportsFeature, setSize, searchUsageHolder) ); return channel -> { @@ -148,7 +133,6 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC * @param request the rest request to read from * @param requestContentParser body of the request to read. This method does not attempt to read the body from the {@code request} * parameter - * @param namedWriteableRegistry the registry of named writeables * @param clusterSupportsFeature used to check if certain features are available in this cluster * @param setSize how the size url parameter is handled. {@code udpate_by_query} and regular search differ here. */ @@ -156,11 +140,10 @@ public static void parseSearchRequest( SearchRequest searchRequest, RestRequest request, XContentParser requestContentParser, - NamedWriteableRegistry namedWriteableRegistry, Predicate clusterSupportsFeature, IntConsumer setSize ) throws IOException { - parseSearchRequest(searchRequest, request, requestContentParser, namedWriteableRegistry, clusterSupportsFeature, setSize, null); + parseSearchRequest(searchRequest, request, requestContentParser, clusterSupportsFeature, setSize, null); } /** @@ -170,8 +153,7 @@ public static void parseSearchRequest( * @param request the rest request to read from * @param requestContentParser body of the request to read. This method does not attempt to read the body from the {@code request} * parameter, will be null when there is no request body to parse - * @param namedWriteableRegistry the registry of named writeables - @param clusterSupportsFeature used to check if certain features are available in this cluster + * @param clusterSupportsFeature used to check if certain features are available in this cluster * @param setSize how the size url parameter is handled. {@code udpate_by_query} and regular search differ here. * @param searchUsageHolder the holder of search usage stats */ @@ -179,7 +161,6 @@ public static void parseSearchRequest( SearchRequest searchRequest, RestRequest request, @Nullable XContentParser requestContentParser, - NamedWriteableRegistry namedWriteableRegistry, Predicate clusterSupportsFeature, IntConsumer setSize, @Nullable SearchUsageHolder searchUsageHolder diff --git a/server/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java b/server/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java index 5f24f72d5cc8f..a45730a82dbc2 100644 --- a/server/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java @@ -97,7 +97,7 @@ public void testFailWithUnknownKey() { ).build(); IllegalArgumentException ex = expectThrows( IllegalArgumentException.class, - () -> RestMultiSearchAction.parseRequest(restRequest, null, true, new UsageService().getSearchUsageHolder(), nf -> false) + () -> RestMultiSearchAction.parseRequest(restRequest, true, new UsageService().getSearchUsageHolder(), nf -> false) ); assertEquals("key [unknown_key] is not supported in the metadata section", ex.getMessage()); } @@ -113,7 +113,6 @@ public void testSimpleAddWithCarriageReturn() throws Exception { ).build(); MultiSearchRequest request = RestMultiSearchAction.parseRequest( restRequest, - null, true, new UsageService().getSearchUsageHolder(), nf -> false @@ -137,7 +136,6 @@ public void testDefaultIndicesOptions() throws IOException { ).withParams(Collections.singletonMap("ignore_unavailable", "true")).build(); MultiSearchRequest request = RestMultiSearchAction.parseRequest( restRequest, - null, true, new UsageService().getSearchUsageHolder(), nf -> false @@ -250,7 +248,7 @@ public void testMsearchTerminatedByNewline() throws Exception { ).build(); IllegalArgumentException expectThrows = expectThrows( IllegalArgumentException.class, - () -> RestMultiSearchAction.parseRequest(restRequest, null, true, new UsageService().getSearchUsageHolder(), nf -> false) + () -> RestMultiSearchAction.parseRequest(restRequest, true, new UsageService().getSearchUsageHolder(), nf -> false) ); assertEquals("The msearch request must be terminated by a newline [\n]", expectThrows.getMessage()); @@ -261,7 +259,6 @@ public void testMsearchTerminatedByNewline() throws Exception { ).build(); MultiSearchRequest msearchRequest = RestMultiSearchAction.parseRequest( restRequestWithNewLine, - null, true, new UsageService().getSearchUsageHolder(), nf -> false diff --git a/server/src/test/java/org/elasticsearch/rest/action/search/RestMultiSearchActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/search/RestMultiSearchActionTests.java index f2a11336c7f4b..15e1d479ddf9a 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/search/RestMultiSearchActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/search/RestMultiSearchActionTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.search.MultiSearchResponse; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.RestRequest; @@ -30,16 +29,9 @@ public final class RestMultiSearchActionTests extends RestActionTestCase { final List contentTypeHeader = Collections.singletonList(compatibleMediaType(XContentType.VND_JSON, RestApiVersion.V_7)); - private RestMultiSearchAction action; - @Before public void setUpAction() { - action = new RestMultiSearchAction( - Settings.EMPTY, - new UsageService().getSearchUsageHolder(), - mock(NamedWriteableRegistry.class), - nf -> false - ); + RestMultiSearchAction action = new RestMultiSearchAction(Settings.EMPTY, new UsageService().getSearchUsageHolder(), nf -> false); controller().registerHandler(action); verifyingClient.setExecuteVerifier((actionType, request) -> mock(MultiSearchResponse.class)); verifyingClient.setExecuteLocallyVerifier((actionType, request) -> mock(MultiSearchResponse.class)); diff --git a/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java index 5f641ef8fd84f..77cc94c44e151 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.search.builder.SearchSourceBuilder; @@ -37,7 +36,7 @@ public final class RestSearchActionTests extends RestActionTestCase { @Before public void setUpAction() { - action = new RestSearchAction(new UsageService().getSearchUsageHolder(), mock(NamedWriteableRegistry.class), nf -> false); + action = new RestSearchAction(new UsageService().getSearchUsageHolder(), nf -> false); controller().registerHandler(action); verifyingClient.setExecuteVerifier((actionType, request) -> mock(SearchResponse.class)); verifyingClient.setExecuteLocallyVerifier((actionType, request) -> mock(SearchResponse.class)); diff --git a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearch.java b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearch.java index c551312f68c0b..b719d4ca3bf82 100644 --- a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearch.java +++ b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearch.java @@ -57,7 +57,7 @@ public List getRestHandlers( Predicate clusterSupportsFeature ) { return Arrays.asList( - new RestSubmitAsyncSearchAction(restController.getSearchUsageHolder(), namedWriteableRegistry, clusterSupportsFeature), + new RestSubmitAsyncSearchAction(restController.getSearchUsageHolder(), clusterSupportsFeature), new RestGetAsyncSearchAction(), new RestGetAsyncStatusAction(), new RestDeleteAsyncSearchAction() diff --git a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchAction.java b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchAction.java index d98677d456b90..bd09d8f7740a1 100644 --- a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchAction.java +++ b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.search; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -37,16 +36,10 @@ public final class RestSubmitAsyncSearchAction extends BaseRestHandler { static final Set RESPONSE_PARAMS = Collections.singleton(TYPED_KEYS_PARAM); private final SearchUsageHolder searchUsageHolder; - private final NamedWriteableRegistry namedWriteableRegistry; private final Predicate clusterSupportsFeature; - public RestSubmitAsyncSearchAction( - SearchUsageHolder searchUsageHolder, - NamedWriteableRegistry namedWriteableRegistry, - Predicate clusterSupportsFeature - ) { + public RestSubmitAsyncSearchAction(SearchUsageHolder searchUsageHolder, Predicate clusterSupportsFeature) { this.searchUsageHolder = searchUsageHolder; - this.namedWriteableRegistry = namedWriteableRegistry; this.clusterSupportsFeature = clusterSupportsFeature; } @@ -69,15 +62,7 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli // them as supported. We rely on SubmitAsyncSearchRequest#validate to fail in case they are set. // Note that ccs_minimize_roundtrips is also set this way, which is a supported option. request.withContentOrSourceParamParserOrNull( - parser -> parseSearchRequest( - submit.getSearchRequest(), - request, - parser, - namedWriteableRegistry, - clusterSupportsFeature, - setSize, - searchUsageHolder - ) + parser -> parseSearchRequest(submit.getSearchRequest(), request, parser, clusterSupportsFeature, setSize, searchUsageHolder) ); if (request.hasParam("wait_for_completion_timeout")) { diff --git a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchActionTests.java b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchActionTests.java index fe6ed8b57d1e0..cc1d4f4e6270d 100644 --- a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchActionTests.java +++ b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchActionTests.java @@ -8,7 +8,6 @@ import org.apache.lucene.util.SetOnce; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.TimeValue; import org.elasticsearch.rest.RestRequest; @@ -27,19 +26,12 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; -import static org.mockito.Mockito.mock; public class RestSubmitAsyncSearchActionTests extends RestActionTestCase { - private RestSubmitAsyncSearchAction action; - @Before public void setUpAction() { - action = new RestSubmitAsyncSearchAction( - new UsageService().getSearchUsageHolder(), - mock(NamedWriteableRegistry.class), - nf -> false - ); + RestSubmitAsyncSearchAction action = new RestSubmitAsyncSearchAction(new UsageService().getSearchUsageHolder(), nf -> false); controller().registerHandler(action); } diff --git a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/Fleet.java b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/Fleet.java index c2e4e2aa2ca98..b16bea7c65b5b 100644 --- a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/Fleet.java +++ b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/Fleet.java @@ -367,7 +367,7 @@ public List getRestHandlers( ) { return List.of( new RestGetGlobalCheckpointsAction(), - new RestFleetSearchAction(restController.getSearchUsageHolder(), namedWriteableRegistry, clusterSupportsFeature), + new RestFleetSearchAction(restController.getSearchUsageHolder(), clusterSupportsFeature), new RestFleetMultiSearchAction(settings, restController.getSearchUsageHolder(), namedWriteableRegistry, clusterSupportsFeature), new RestGetSecretsAction(), new RestPostSecretsAction(), diff --git a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetMultiSearchAction.java b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetMultiSearchAction.java index 28cc7c5172631..530b64729a5d1 100644 --- a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetMultiSearchAction.java +++ b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetMultiSearchAction.java @@ -77,7 +77,6 @@ public List routes() { protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { final MultiSearchRequest multiSearchRequest = RestMultiSearchAction.parseRequest( request, - namedWriteableRegistry, allowExplicitIndex, searchUsageHolder, clusterSupportsFeature, diff --git a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetSearchAction.java b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetSearchAction.java index e1281f4f20a4c..a6c369734f0e3 100644 --- a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetSearchAction.java +++ b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetSearchAction.java @@ -12,7 +12,6 @@ import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.core.TimeValue; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.rest.BaseRestHandler; @@ -39,16 +38,10 @@ public class RestFleetSearchAction extends BaseRestHandler { private final SearchUsageHolder searchUsageHolder; - private final NamedWriteableRegistry namedWriteableRegistry; private final Predicate clusterSupportsFeature; - public RestFleetSearchAction( - SearchUsageHolder searchUsageHolder, - NamedWriteableRegistry namedWriteableRegistry, - Predicate clusterSupportsFeature - ) { + public RestFleetSearchAction(SearchUsageHolder searchUsageHolder, Predicate clusterSupportsFeature) { this.searchUsageHolder = searchUsageHolder; - this.namedWriteableRegistry = namedWriteableRegistry; this.clusterSupportsFeature = clusterSupportsFeature; } @@ -79,15 +72,7 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli IntConsumer setSize = size -> searchRequest.source().size(size); request.withContentOrSourceParamParserOrNull(parser -> { - RestSearchAction.parseSearchRequest( - searchRequest, - request, - parser, - namedWriteableRegistry, - clusterSupportsFeature, - setSize, - searchUsageHolder - ); + RestSearchAction.parseSearchRequest(searchRequest, request, parser, clusterSupportsFeature, setSize, searchUsageHolder); String[] stringWaitForCheckpoints = request.paramAsStringArray("wait_for_checkpoints", Strings.EMPTY_ARRAY); final long[] waitForCheckpoints = new long[stringWaitForCheckpoints.length]; for (int i = 0; i < stringWaitForCheckpoints.length; ++i) { diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java index 1748c1be86b78..665548c432ca0 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java @@ -97,7 +97,7 @@ public List getRestHandlers( Predicate clusterSupportsFeature ) { return Arrays.asList( - new RestRollupSearchAction(namedWriteableRegistry, clusterSupportsFeature), + new RestRollupSearchAction(clusterSupportsFeature), new RestPutRollupJobAction(), new RestStartRollupJobAction(), new RestStopRollupJobAction(), diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestRollupSearchAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestRollupSearchAction.java index 2e02f1d12fb69..a2e795d07aaf2 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestRollupSearchAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestRollupSearchAction.java @@ -8,7 +8,6 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -28,11 +27,9 @@ public class RestRollupSearchAction extends BaseRestHandler { private static final Set RESPONSE_PARAMS = Set.of(RestSearchAction.TYPED_KEYS_PARAM, RestSearchAction.TOTAL_HITS_AS_INT_PARAM); - private final NamedWriteableRegistry namedWriteableRegistry; private final Predicate clusterSupportsFeature; - public RestRollupSearchAction(NamedWriteableRegistry namedWriteableRegistry, Predicate clusterSupportsFeature) { - this.namedWriteableRegistry = namedWriteableRegistry; + public RestRollupSearchAction(Predicate clusterSupportsFeature) { this.clusterSupportsFeature = clusterSupportsFeature; } @@ -54,7 +51,6 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient searchRequest, restRequest, parser, - namedWriteableRegistry, clusterSupportsFeature, size -> searchRequest.source().size(size) ) From 18be49b327581fa2be70ac792ad441da332741c1 Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Fri, 5 Apr 2024 11:56:28 +0200 Subject: [PATCH 137/264] [Profiling] Use CancellableTask internally (#107139) With this commit we eagerly cast the task provided to our central transport action to a CancellableTask so we can simplify cancellation checks while the action is being executed. Relates #107037 --- .../TransportGetStackTracesAction.java | 26 ++++++++++--------- 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java index 0acdc7c37ce09..d7c9e61b73a3a 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java @@ -149,8 +149,10 @@ public TransportGetStackTracesAction( } @Override - protected void doExecute(Task submitTask, GetStackTracesRequest request, ActionListener submitListener) { + protected void doExecute(Task task, GetStackTracesRequest request, ActionListener submitListener) { licenseChecker.requireSupportedLicense(); + assert task instanceof CancellableTask; + final CancellableTask submitTask = (CancellableTask) task; GetStackTracesResponseBuilder responseBuilder = new GetStackTracesResponseBuilder(request); Client client = new ParentTaskAssigningClient(this.nodeClient, transportService.getLocalNode(), submitTask); if (request.isUserProvidedIndices()) { @@ -161,7 +163,7 @@ protected void doExecute(Task submitTask, GetStackTracesRequest request, ActionL } private void searchProfilingEvents( - Task submitTask, + CancellableTask submitTask, Client client, GetStackTracesRequest request, ActionListener submitListener, @@ -201,7 +203,7 @@ private void searchProfilingEvents( } private void searchGenericEvents( - Task submitTask, + CancellableTask submitTask, Client client, GetStackTracesRequest request, ActionListener submitListener, @@ -240,7 +242,7 @@ private void searchGenericEvents( } private void searchGenericEventGroupedByStackTrace( - Task submitTask, + CancellableTask submitTask, Client client, GetStackTracesRequest request, ActionListener submitListener, @@ -320,7 +322,7 @@ private void searchGenericEventGroupedByStackTrace( } private void searchEventGroupedByStackTrace( - Task submitTask, + CancellableTask submitTask, Client client, GetStackTracesRequest request, ActionListener submitListener, @@ -432,7 +434,7 @@ The same stacktraces may come from different hosts (eventually from different da } private ActionListener handleEventsGroupedByStackTrace( - Task submitTask, + CancellableTask submitTask, Client client, GetStackTracesResponseBuilder responseBuilder, ActionListener submitListener, @@ -471,12 +473,12 @@ private static long getAggValueAsLong(SearchResponse searchResponse, String fiel } private void retrieveStackTraces( - Task submitTask, + CancellableTask submitTask, Client client, GetStackTracesResponseBuilder responseBuilder, ActionListener submitListener ) { - if (submitTask instanceof CancellableTask c && c.notifyIfCancelled(submitListener)) { + if (submitTask.notifyIfCancelled(submitListener)) { return; } List eventIds = new ArrayList<>(responseBuilder.getStackTraceEvents().keySet()); @@ -554,7 +556,7 @@ static List> sliced(List c, int slices) { private class StackTraceHandler { private final AtomicInteger expectedResponses; - private final Task submitTask; + private final CancellableTask submitTask; private final ClusterState clusterState; private final Client client; private final GetStackTracesResponseBuilder responseBuilder; @@ -568,7 +570,7 @@ private class StackTraceHandler { private final Map hostMetadata; private StackTraceHandler( - Task submitTask, + CancellableTask submitTask, ClusterState clusterState, Client client, GetStackTracesResponseBuilder responseBuilder, @@ -691,7 +693,7 @@ public void mayFinish() { } private void retrieveStackTraceDetails( - Task submitTask, + CancellableTask submitTask, ClusterState clusterState, Client client, GetStackTracesResponseBuilder responseBuilder, @@ -699,7 +701,7 @@ private void retrieveStackTraceDetails( List executableIds, ActionListener submitListener ) { - if (submitTask instanceof CancellableTask c && c.notifyIfCancelled(submitListener)) { + if (submitTask.notifyIfCancelled(submitListener)) { return; } List stackFrameIndices = resolver.resolve( From ad77d32271dc77860b84ed38507b6903dde24f56 Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Fri, 5 Apr 2024 12:00:52 +0200 Subject: [PATCH 138/264] ESQL: Fix version test failure on non-SNAPSHOT builds (#107138) Test the snapshot ESQL version separately and take the current build into account. --- .../esql/action/EsqlQueryRequestTests.java | 34 ++++++++++++------- 1 file changed, 22 insertions(+), 12 deletions(-) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java index 44066ff3d091d..6ec1af033f86c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.action; +import org.elasticsearch.Build; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; @@ -158,8 +159,13 @@ public void testRejectUnknownFields() { }""", "unknown field [asdf]"); } - public void testKnownVersionIsValid() throws IOException { + public void testKnownStableVersionIsValid() throws IOException { for (EsqlVersion version : EsqlVersion.values()) { + if (version == EsqlVersion.SNAPSHOT) { + // Not stable, skip. Also avoids breaking the CI as this is invalid for non-SNAPSHOT builds. + continue; + } + String validVersionString = randomBoolean() ? version.versionStringWithoutEmoji() : version.toString(); String json = String.format(Locale.ROOT, """ @@ -209,23 +215,27 @@ public void testSnapshotVersionIsOnlyValidOnSnapshot() throws IOException { "query": "ROW x = 1" } """, esqlVersion); - EsqlQueryRequest request = parseEsqlQueryRequest(json, randomBoolean()); + + String errorOnNonSnapshotBuilds = "[version] with value [" + + esqlVersion + + "] only allowed in snapshot builds, latest available version is [" + + EsqlVersion.latestReleased().versionStringWithoutEmoji() + + "]"; + + if (Build.current().isSnapshot()) { + assertNull(request.validate()); + } else { + assertNotNull(request.validate()); + assertThat(request.validate().getMessage(), containsString(errorOnNonSnapshotBuilds)); + } + request.onSnapshotBuild(true); assertNull(request.validate()); request.onSnapshotBuild(false); assertNotNull(request.validate()); - assertThat( - request.validate().getMessage(), - containsString( - "[version] with value [" - + esqlVersion - + "] only allowed in snapshot builds, latest available version is [" - + EsqlVersion.latestReleased().versionStringWithoutEmoji() - + "]" - ) - ); + assertThat(request.validate().getMessage(), containsString(errorOnNonSnapshotBuilds)); } @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104890") From d9f010abac7ea3aebdbcfc8bf8f68f8fd9ed309c Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Fri, 5 Apr 2024 12:48:23 +0200 Subject: [PATCH 139/264] Fix leak in RestVectorTileAction (#107143) Fix obvious SearchResponse leak in RestVectorTileAction. Test only issue fortunately since the response hits are empty. --- .../xpack/vectortile/rest/RestVectorTileAction.java | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/x-pack/plugin/vector-tile/src/main/java/org/elasticsearch/xpack/vectortile/rest/RestVectorTileAction.java b/x-pack/plugin/vector-tile/src/main/java/org/elasticsearch/xpack/vectortile/rest/RestVectorTileAction.java index ba5b97bbcb062..66fbf2e892b56 100644 --- a/x-pack/plugin/vector-tile/src/main/java/org/elasticsearch/xpack/vectortile/rest/RestVectorTileAction.java +++ b/x-pack/plugin/vector-tile/src/main/java/org/elasticsearch/xpack/vectortile/rest/RestVectorTileAction.java @@ -162,10 +162,14 @@ public RestResponse buildResponse(SearchResponse searchResponse) throws Exceptio searchResponse.getShardFailures(), searchResponse.getClusters() ); - tileBuilder.addLayers(buildMetaLayer(meta, bounds, request, featureFactory)); - ensureOpen(); - tileBuilder.build().writeTo(bytesOut); - return new RestResponse(RestStatus.OK, MIME_TYPE, bytesOut.bytes()); + try { + tileBuilder.addLayers(buildMetaLayer(meta, bounds, request, featureFactory)); + ensureOpen(); + tileBuilder.build().writeTo(bytesOut); + return new RestResponse(RestStatus.OK, MIME_TYPE, bytesOut.bytes()); + } finally { + meta.decRef(); + } } } }); From db802bb6cb517db46df8501ca10d1a4c9a962284 Mon Sep 17 00:00:00 2001 From: Carlos Delgado <6339205+carlosdelest@users.noreply.github.com> Date: Fri, 5 Apr 2024 13:00:52 +0200 Subject: [PATCH 140/264] Move back InferenceServiceRegistry and ModelRegistry to inference plugin, reverting #105012 (#107141) --- .../inference/InferenceServiceRegistry.java | 62 +++++++----- .../InferenceServiceRegistryImpl.java | 64 ------------ .../inference/ModelRegistry.java | 99 ------------------- .../elasticsearch/node/NodeConstruction.java | 15 --- .../plugins/InferenceRegistryPlugin.java | 22 ----- ...gistryImplIT.java => ModelRegistryIT.java} | 52 +++++----- .../xpack/inference/InferencePlugin.java | 31 ++---- .../TransportDeleteInferenceModelAction.java | 2 +- .../TransportGetInferenceModelAction.java | 2 +- .../action/TransportInferenceAction.java | 2 +- .../TransportPutInferenceModelAction.java | 2 +- ...elRegistryImpl.java => ModelRegistry.java} | 82 +++++++++------ ...ImplTests.java => ModelRegistryTests.java} | 34 +++---- 13 files changed, 143 insertions(+), 326 deletions(-) delete mode 100644 server/src/main/java/org/elasticsearch/inference/InferenceServiceRegistryImpl.java delete mode 100644 server/src/main/java/org/elasticsearch/inference/ModelRegistry.java delete mode 100644 server/src/main/java/org/elasticsearch/plugins/InferenceRegistryPlugin.java rename x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/{ModelRegistryImplIT.java => ModelRegistryIT.java} (86%) rename x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/{ModelRegistryImpl.java => ModelRegistry.java} (86%) rename x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/{ModelRegistryImplTests.java => ModelRegistryTests.java} (92%) diff --git a/server/src/main/java/org/elasticsearch/inference/InferenceServiceRegistry.java b/server/src/main/java/org/elasticsearch/inference/InferenceServiceRegistry.java index ce6f1b21b734c..d5973807d9d78 100644 --- a/server/src/main/java/org/elasticsearch/inference/InferenceServiceRegistry.java +++ b/server/src/main/java/org/elasticsearch/inference/InferenceServiceRegistry.java @@ -13,41 +13,49 @@ import java.io.Closeable; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.function.Function; +import java.util.stream.Collectors; + +public class InferenceServiceRegistry implements Closeable { + + private final Map services; + private final List namedWriteables = new ArrayList<>(); + + public InferenceServiceRegistry( + List inferenceServicePlugins, + InferenceServiceExtension.InferenceServiceFactoryContext factoryContext + ) { + // TODO check names are unique + services = inferenceServicePlugins.stream() + .flatMap(r -> r.getInferenceServiceFactories().stream()) + .map(factory -> factory.create(factoryContext)) + .collect(Collectors.toMap(InferenceService::name, Function.identity())); + } -public interface InferenceServiceRegistry extends Closeable { - void init(Client client); - - Map getServices(); - - Optional getService(String serviceName); - - List getNamedWriteables(); - - class NoopInferenceServiceRegistry implements InferenceServiceRegistry { - public NoopInferenceServiceRegistry() {} + public void init(Client client) { + services.values().forEach(s -> s.init(client)); + } - @Override - public void init(Client client) {} + public Map getServices() { + return services; + } - @Override - public Map getServices() { - return Map.of(); - } + public Optional getService(String serviceName) { + return Optional.ofNullable(services.get(serviceName)); + } - @Override - public Optional getService(String serviceName) { - return Optional.empty(); - } + public List getNamedWriteables() { + return namedWriteables; + } - @Override - public List getNamedWriteables() { - return List.of(); + @Override + public void close() throws IOException { + for (var service : services.values()) { + service.close(); } - - @Override - public void close() throws IOException {} } } diff --git a/server/src/main/java/org/elasticsearch/inference/InferenceServiceRegistryImpl.java b/server/src/main/java/org/elasticsearch/inference/InferenceServiceRegistryImpl.java deleted file mode 100644 index f0a990ded98ce..0000000000000 --- a/server/src/main/java/org/elasticsearch/inference/InferenceServiceRegistryImpl.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.inference; - -import org.elasticsearch.client.internal.Client; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.function.Function; -import java.util.stream.Collectors; - -public class InferenceServiceRegistryImpl implements InferenceServiceRegistry { - - private final Map services; - private final List namedWriteables = new ArrayList<>(); - - public InferenceServiceRegistryImpl( - List inferenceServicePlugins, - InferenceServiceExtension.InferenceServiceFactoryContext factoryContext - ) { - // TODO check names are unique - services = inferenceServicePlugins.stream() - .flatMap(r -> r.getInferenceServiceFactories().stream()) - .map(factory -> factory.create(factoryContext)) - .collect(Collectors.toMap(InferenceService::name, Function.identity())); - } - - @Override - public void init(Client client) { - services.values().forEach(s -> s.init(client)); - } - - @Override - public Map getServices() { - return services; - } - - @Override - public Optional getService(String serviceName) { - return Optional.ofNullable(services.get(serviceName)); - } - - @Override - public List getNamedWriteables() { - return namedWriteables; - } - - @Override - public void close() throws IOException { - for (var service : services.values()) { - service.close(); - } - } -} diff --git a/server/src/main/java/org/elasticsearch/inference/ModelRegistry.java b/server/src/main/java/org/elasticsearch/inference/ModelRegistry.java deleted file mode 100644 index fa90d5ba6f756..0000000000000 --- a/server/src/main/java/org/elasticsearch/inference/ModelRegistry.java +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.inference; - -import org.elasticsearch.action.ActionListener; - -import java.util.List; -import java.util.Map; - -public interface ModelRegistry { - - /** - * Get a model. - * Secret settings are not included - * @param inferenceEntityId Model to get - * @param listener Model listener - */ - void getModel(String inferenceEntityId, ActionListener listener); - - /** - * Get a model with its secret settings - * @param inferenceEntityId Model to get - * @param listener Model listener - */ - void getModelWithSecrets(String inferenceEntityId, ActionListener listener); - - /** - * Get all models of a particular task type. - * Secret settings are not included - * @param taskType The task type - * @param listener Models listener - */ - void getModelsByTaskType(TaskType taskType, ActionListener> listener); - - /** - * Get all models. - * Secret settings are not included - * @param listener Models listener - */ - void getAllModels(ActionListener> listener); - - void storeModel(Model model, ActionListener listener); - - void deleteModel(String modelId, ActionListener listener); - - /** - * Semi parsed model where inference entity id, task type and service - * are known but the settings are not parsed. - */ - record UnparsedModel( - String inferenceEntityId, - TaskType taskType, - String service, - Map settings, - Map secrets - ) {} - - class NoopModelRegistry implements ModelRegistry { - @Override - public void getModel(String modelId, ActionListener listener) { - fail(listener); - } - - @Override - public void getModelsByTaskType(TaskType taskType, ActionListener> listener) { - listener.onResponse(List.of()); - } - - @Override - public void getAllModels(ActionListener> listener) { - listener.onResponse(List.of()); - } - - @Override - public void storeModel(Model model, ActionListener listener) { - fail(listener); - } - - @Override - public void deleteModel(String modelId, ActionListener listener) { - fail(listener); - } - - @Override - public void getModelWithSecrets(String inferenceEntityId, ActionListener listener) { - fail(listener); - } - - private static void fail(ActionListener listener) { - listener.onFailure(new IllegalArgumentException("No model registry configured")); - } - } -} diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index 15ebe2752451d..5bf19c4b87157 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -127,8 +127,6 @@ import org.elasticsearch.indices.recovery.plan.PeerOnlyRecoveryPlannerService; import org.elasticsearch.indices.recovery.plan.RecoveryPlannerService; import org.elasticsearch.indices.recovery.plan.ShardSnapshotsService; -import org.elasticsearch.inference.InferenceServiceRegistry; -import org.elasticsearch.inference.ModelRegistry; import org.elasticsearch.ingest.IngestService; import org.elasticsearch.monitor.MonitorService; import org.elasticsearch.monitor.fs.FsHealthService; @@ -147,7 +145,6 @@ import org.elasticsearch.plugins.ClusterPlugin; import org.elasticsearch.plugins.DiscoveryPlugin; import org.elasticsearch.plugins.HealthPlugin; -import org.elasticsearch.plugins.InferenceRegistryPlugin; import org.elasticsearch.plugins.IngestPlugin; import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.MetadataUpgrader; @@ -1114,18 +1111,6 @@ record PluginServiceInstances( ); } - // Register noop versions of inference services if Inference plugin is not available - Optional inferenceRegistryPlugin = getSinglePlugin(InferenceRegistryPlugin.class); - modules.bindToInstance( - InferenceServiceRegistry.class, - inferenceRegistryPlugin.map(InferenceRegistryPlugin::getInferenceServiceRegistry) - .orElse(new InferenceServiceRegistry.NoopInferenceServiceRegistry()) - ); - modules.bindToInstance( - ModelRegistry.class, - inferenceRegistryPlugin.map(InferenceRegistryPlugin::getModelRegistry).orElse(new ModelRegistry.NoopModelRegistry()) - ); - injector = modules.createInjector(); postInjection(clusterModule, actionModule, clusterService, transportService, featureService); diff --git a/server/src/main/java/org/elasticsearch/plugins/InferenceRegistryPlugin.java b/server/src/main/java/org/elasticsearch/plugins/InferenceRegistryPlugin.java deleted file mode 100644 index 696c3a067dad1..0000000000000 --- a/server/src/main/java/org/elasticsearch/plugins/InferenceRegistryPlugin.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.plugins; - -import org.elasticsearch.inference.InferenceServiceRegistry; -import org.elasticsearch.inference.ModelRegistry; - -/** - * Plugins that provide inference services should implement this interface. - * There should be a single one in the classpath, as we currently support a single instance for ModelRegistry / InfereceServiceRegistry. - */ -public interface InferenceRegistryPlugin { - InferenceServiceRegistry getInferenceServiceRegistry(); - - ModelRegistry getModelRegistry(); -} diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryImplIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java similarity index 86% rename from x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryImplIT.java rename to x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java index ccda986a8d280..0f23e0b33d774 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryImplIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java @@ -26,7 +26,7 @@ import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.inference.InferencePlugin; -import org.elasticsearch.xpack.inference.registry.ModelRegistryImpl; +import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.elasticsearch.xpack.inference.services.elser.ElserInternalModel; import org.elasticsearch.xpack.inference.services.elser.ElserInternalService; import org.elasticsearch.xpack.inference.services.elser.ElserInternalServiceSettingsTests; @@ -55,13 +55,13 @@ import static org.hamcrest.Matchers.nullValue; import static org.mockito.Mockito.mock; -public class ModelRegistryImplIT extends ESSingleNodeTestCase { +public class ModelRegistryIT extends ESSingleNodeTestCase { - private ModelRegistryImpl ModelRegistryImpl; + private ModelRegistry modelRegistry; @Before public void createComponents() { - ModelRegistryImpl = new ModelRegistryImpl(client()); + modelRegistry = new ModelRegistry(client()); } @Override @@ -75,7 +75,7 @@ public void testStoreModel() throws Exception { AtomicReference storeModelHolder = new AtomicReference<>(); AtomicReference exceptionHolder = new AtomicReference<>(); - blockingCall(listener -> ModelRegistryImpl.storeModel(model, listener), storeModelHolder, exceptionHolder); + blockingCall(listener -> modelRegistry.storeModel(model, listener), storeModelHolder, exceptionHolder); assertThat(storeModelHolder.get(), is(true)); assertThat(exceptionHolder.get(), is(nullValue())); @@ -87,7 +87,7 @@ public void testStoreModelWithUnknownFields() throws Exception { AtomicReference storeModelHolder = new AtomicReference<>(); AtomicReference exceptionHolder = new AtomicReference<>(); - blockingCall(listener -> ModelRegistryImpl.storeModel(model, listener), storeModelHolder, exceptionHolder); + blockingCall(listener -> modelRegistry.storeModel(model, listener), storeModelHolder, exceptionHolder); assertNull(storeModelHolder.get()); assertNotNull(exceptionHolder.get()); @@ -106,12 +106,12 @@ public void testGetModel() throws Exception { AtomicReference putModelHolder = new AtomicReference<>(); AtomicReference exceptionHolder = new AtomicReference<>(); - blockingCall(listener -> ModelRegistryImpl.storeModel(model, listener), putModelHolder, exceptionHolder); + blockingCall(listener -> modelRegistry.storeModel(model, listener), putModelHolder, exceptionHolder); assertThat(putModelHolder.get(), is(true)); // now get the model - AtomicReference modelHolder = new AtomicReference<>(); - blockingCall(listener -> ModelRegistryImpl.getModelWithSecrets(inferenceEntityId, listener), modelHolder, exceptionHolder); + AtomicReference modelHolder = new AtomicReference<>(); + blockingCall(listener -> modelRegistry.getModelWithSecrets(inferenceEntityId, listener), modelHolder, exceptionHolder); assertThat(exceptionHolder.get(), is(nullValue())); assertThat(modelHolder.get(), not(nullValue())); @@ -133,13 +133,13 @@ public void testStoreModelFailsWhenModelExists() throws Exception { AtomicReference putModelHolder = new AtomicReference<>(); AtomicReference exceptionHolder = new AtomicReference<>(); - blockingCall(listener -> ModelRegistryImpl.storeModel(model, listener), putModelHolder, exceptionHolder); + blockingCall(listener -> modelRegistry.storeModel(model, listener), putModelHolder, exceptionHolder); assertThat(putModelHolder.get(), is(true)); assertThat(exceptionHolder.get(), is(nullValue())); putModelHolder.set(false); // an model with the same id exists - blockingCall(listener -> ModelRegistryImpl.storeModel(model, listener), putModelHolder, exceptionHolder); + blockingCall(listener -> modelRegistry.storeModel(model, listener), putModelHolder, exceptionHolder); assertThat(putModelHolder.get(), is(false)); assertThat(exceptionHolder.get(), not(nullValue())); assertThat( @@ -154,20 +154,20 @@ public void testDeleteModel() throws Exception { Model model = buildElserModelConfig(id, TaskType.SPARSE_EMBEDDING); AtomicReference putModelHolder = new AtomicReference<>(); AtomicReference exceptionHolder = new AtomicReference<>(); - blockingCall(listener -> ModelRegistryImpl.storeModel(model, listener), putModelHolder, exceptionHolder); + blockingCall(listener -> modelRegistry.storeModel(model, listener), putModelHolder, exceptionHolder); assertThat(putModelHolder.get(), is(true)); } AtomicReference deleteResponseHolder = new AtomicReference<>(); AtomicReference exceptionHolder = new AtomicReference<>(); - blockingCall(listener -> ModelRegistryImpl.deleteModel("model1", listener), deleteResponseHolder, exceptionHolder); + blockingCall(listener -> modelRegistry.deleteModel("model1", listener), deleteResponseHolder, exceptionHolder); assertThat(exceptionHolder.get(), is(nullValue())); assertTrue(deleteResponseHolder.get()); // get should fail deleteResponseHolder.set(false); - AtomicReference modelHolder = new AtomicReference<>(); - blockingCall(listener -> ModelRegistryImpl.getModelWithSecrets("model1", listener), modelHolder, exceptionHolder); + AtomicReference modelHolder = new AtomicReference<>(); + blockingCall(listener -> modelRegistry.getModelWithSecrets("model1", listener), modelHolder, exceptionHolder); assertThat(exceptionHolder.get(), not(nullValue())); assertFalse(deleteResponseHolder.get()); @@ -187,13 +187,13 @@ public void testGetModelsByTaskType() throws InterruptedException { AtomicReference putModelHolder = new AtomicReference<>(); AtomicReference exceptionHolder = new AtomicReference<>(); - blockingCall(listener -> ModelRegistryImpl.storeModel(model, listener), putModelHolder, exceptionHolder); + blockingCall(listener -> modelRegistry.storeModel(model, listener), putModelHolder, exceptionHolder); assertThat(putModelHolder.get(), is(true)); } AtomicReference exceptionHolder = new AtomicReference<>(); - AtomicReference> modelHolder = new AtomicReference<>(); - blockingCall(listener -> ModelRegistryImpl.getModelsByTaskType(TaskType.SPARSE_EMBEDDING, listener), modelHolder, exceptionHolder); + AtomicReference> modelHolder = new AtomicReference<>(); + blockingCall(listener -> modelRegistry.getModelsByTaskType(TaskType.SPARSE_EMBEDDING, listener), modelHolder, exceptionHolder); assertThat(modelHolder.get(), hasSize(3)); var sparseIds = sparseAndTextEmbeddingModels.stream() .filter(m -> m.getConfigurations().getTaskType() == TaskType.SPARSE_EMBEDDING) @@ -204,7 +204,7 @@ public void testGetModelsByTaskType() throws InterruptedException { assertThat(m.secrets().keySet(), empty()); }); - blockingCall(listener -> ModelRegistryImpl.getModelsByTaskType(TaskType.TEXT_EMBEDDING, listener), modelHolder, exceptionHolder); + blockingCall(listener -> modelRegistry.getModelsByTaskType(TaskType.TEXT_EMBEDDING, listener), modelHolder, exceptionHolder); assertThat(modelHolder.get(), hasSize(2)); var denseIds = sparseAndTextEmbeddingModels.stream() .filter(m -> m.getConfigurations().getTaskType() == TaskType.TEXT_EMBEDDING) @@ -228,13 +228,13 @@ public void testGetAllModels() throws InterruptedException { var model = createModel(randomAlphaOfLength(5), randomFrom(TaskType.values()), service); createdModels.add(model); - blockingCall(listener -> ModelRegistryImpl.storeModel(model, listener), putModelHolder, exceptionHolder); + blockingCall(listener -> modelRegistry.storeModel(model, listener), putModelHolder, exceptionHolder); assertThat(putModelHolder.get(), is(true)); assertNull(exceptionHolder.get()); } - AtomicReference> modelHolder = new AtomicReference<>(); - blockingCall(listener -> ModelRegistryImpl.getAllModels(listener), modelHolder, exceptionHolder); + AtomicReference> modelHolder = new AtomicReference<>(); + blockingCall(listener -> modelRegistry.getAllModels(listener), modelHolder, exceptionHolder); assertThat(modelHolder.get(), hasSize(modelCount)); var getAllModels = modelHolder.get(); @@ -258,18 +258,18 @@ public void testGetModelWithSecrets() throws InterruptedException { AtomicReference exceptionHolder = new AtomicReference<>(); var modelWithSecrets = createModelWithSecrets(inferenceEntityId, randomFrom(TaskType.values()), service, secret); - blockingCall(listener -> ModelRegistryImpl.storeModel(modelWithSecrets, listener), putModelHolder, exceptionHolder); + blockingCall(listener -> modelRegistry.storeModel(modelWithSecrets, listener), putModelHolder, exceptionHolder); assertThat(putModelHolder.get(), is(true)); assertNull(exceptionHolder.get()); - AtomicReference modelHolder = new AtomicReference<>(); - blockingCall(listener -> ModelRegistryImpl.getModelWithSecrets(inferenceEntityId, listener), modelHolder, exceptionHolder); + AtomicReference modelHolder = new AtomicReference<>(); + blockingCall(listener -> modelRegistry.getModelWithSecrets(inferenceEntityId, listener), modelHolder, exceptionHolder); assertThat(modelHolder.get().secrets().keySet(), hasSize(1)); var secretSettings = (Map) modelHolder.get().secrets().get("secret_settings"); assertThat(secretSettings.get("secret"), equalTo(secret)); // get model without secrets - blockingCall(listener -> ModelRegistryImpl.getModel(inferenceEntityId, listener), modelHolder, exceptionHolder); + blockingCall(listener -> modelRegistry.getModel(inferenceEntityId, listener), modelHolder, exceptionHolder); assertThat(modelHolder.get().secrets().keySet(), empty()); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index c598a58d014f9..c707f99e7eb65 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -24,11 +24,8 @@ import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.inference.InferenceServiceExtension; import org.elasticsearch.inference.InferenceServiceRegistry; -import org.elasticsearch.inference.InferenceServiceRegistryImpl; -import org.elasticsearch.inference.ModelRegistry; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.ExtensiblePlugin; -import org.elasticsearch.plugins.InferenceRegistryPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.SystemIndexPlugin; import org.elasticsearch.rest.RestController; @@ -53,7 +50,7 @@ import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; import org.elasticsearch.xpack.inference.external.http.sender.RequestExecutorServiceSettings; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; -import org.elasticsearch.xpack.inference.registry.ModelRegistryImpl; +import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.elasticsearch.xpack.inference.rest.RestDeleteInferenceModelAction; import org.elasticsearch.xpack.inference.rest.RestGetInferenceModelAction; import org.elasticsearch.xpack.inference.rest.RestInferenceAction; @@ -74,7 +71,7 @@ import java.util.stream.Collectors; import java.util.stream.Stream; -public class InferencePlugin extends Plugin implements ActionPlugin, ExtensiblePlugin, SystemIndexPlugin, InferenceRegistryPlugin { +public class InferencePlugin extends Plugin implements ActionPlugin, ExtensiblePlugin, SystemIndexPlugin { /** * When this setting is true the verification check that @@ -99,8 +96,6 @@ public class InferencePlugin extends Plugin implements ActionPlugin, ExtensibleP private final SetOnce serviceComponents = new SetOnce<>(); private final SetOnce inferenceServiceRegistry = new SetOnce<>(); - private final SetOnce modelRegistry = new SetOnce<>(); - private List inferenceServiceExtensions; public InferencePlugin(Settings settings) { @@ -151,7 +146,7 @@ public Collection createComponents(PluginServices services) { ); httpFactory.set(httpRequestSenderFactory); - ModelRegistry modelReg = new ModelRegistryImpl(services.client()); + ModelRegistry modelRegistry = new ModelRegistry(services.client()); if (inferenceServiceExtensions == null) { inferenceServiceExtensions = new ArrayList<>(); @@ -162,13 +157,11 @@ public Collection createComponents(PluginServices services) { var factoryContext = new InferenceServiceExtension.InferenceServiceFactoryContext(services.client()); // This must be done after the HttpRequestSenderFactory is created so that the services can get the // reference correctly - var inferenceRegistry = new InferenceServiceRegistryImpl(inferenceServices, factoryContext); - inferenceRegistry.init(services.client()); - inferenceServiceRegistry.set(inferenceRegistry); - modelRegistry.set(modelReg); + var registry = new InferenceServiceRegistry(inferenceServices, factoryContext); + registry.init(services.client()); + inferenceServiceRegistry.set(registry); - // Don't return components as they will be registered using InferenceRegistryPlugin methods to retrieve them - return List.of(); + return List.of(modelRegistry, registry); } @Override @@ -266,14 +259,4 @@ public void close() { IOUtils.closeWhileHandlingException(inferenceServiceRegistry.get(), throttlerToClose); } - - @Override - public InferenceServiceRegistry getInferenceServiceRegistry() { - return inferenceServiceRegistry.get(); - } - - @Override - public ModelRegistry getModelRegistry() { - return modelRegistry.get(); - } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceModelAction.java index ad6042581f264..b55e2e6f8ebed 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceModelAction.java @@ -23,12 +23,12 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.inference.InferenceServiceRegistry; -import org.elasticsearch.inference.ModelRegistry; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.inference.action.DeleteInferenceModelAction; +import org.elasticsearch.xpack.inference.registry.ModelRegistry; public class TransportDeleteInferenceModelAction extends AcknowledgedTransportMasterNodeAction { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java index 0f7e48c4f8140..2de1aecea118c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java @@ -17,7 +17,6 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.inference.InferenceServiceRegistry; import org.elasticsearch.inference.ModelConfigurations; -import org.elasticsearch.inference.ModelRegistry; import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; @@ -25,6 +24,7 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.inference.action.GetInferenceModelAction; import org.elasticsearch.xpack.inference.InferencePlugin; +import org.elasticsearch.xpack.inference.registry.ModelRegistry; import java.util.ArrayList; import java.util.List; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java index a480763f33c47..edaf42d7f1fc6 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java @@ -16,11 +16,11 @@ import org.elasticsearch.inference.InferenceService; import org.elasticsearch.inference.InferenceServiceRegistry; import org.elasticsearch.inference.Model; -import org.elasticsearch.inference.ModelRegistry; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.inference.registry.ModelRegistry; public class TransportInferenceAction extends HandledTransportAction { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java index 556acfd89c9c6..85e8481f749d5 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java @@ -29,7 +29,6 @@ import org.elasticsearch.inference.InferenceServiceRegistry; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; -import org.elasticsearch.inference.ModelRegistry; import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; @@ -44,6 +43,7 @@ import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.ml.utils.MlPlatformArchitecturesUtil; import org.elasticsearch.xpack.inference.InferencePlugin; +import org.elasticsearch.xpack.inference.registry.ModelRegistry; import java.io.IOException; import java.util.Map; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistryImpl.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java similarity index 86% rename from x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistryImpl.java rename to x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java index 40921cd38f181..0f3aa5b82b189 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistryImpl.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; @@ -32,7 +31,6 @@ import org.elasticsearch.index.reindex.DeleteByQueryRequest; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; -import org.elasticsearch.inference.ModelRegistry; import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; @@ -57,21 +55,49 @@ import static org.elasticsearch.core.Strings.format; -public class ModelRegistryImpl implements ModelRegistry { +public class ModelRegistry { public record ModelConfigMap(Map config, Map secrets) {} + /** + * Semi parsed model where inference entity id, task type and service + * are known but the settings are not parsed. + */ + public record UnparsedModel( + String inferenceEntityId, + TaskType taskType, + String service, + Map settings, + Map secrets + ) { + + public static UnparsedModel unparsedModelFromMap(ModelConfigMap modelConfigMap) { + if (modelConfigMap.config() == null) { + throw new ElasticsearchStatusException("Missing config map", RestStatus.BAD_REQUEST); + } + String inferenceEntityId = ServiceUtils.removeStringOrThrowIfNull(modelConfigMap.config(), ModelConfigurations.MODEL_ID); + String service = ServiceUtils.removeStringOrThrowIfNull(modelConfigMap.config(), ModelConfigurations.SERVICE); + String taskTypeStr = ServiceUtils.removeStringOrThrowIfNull(modelConfigMap.config(), TaskType.NAME); + TaskType taskType = TaskType.fromString(taskTypeStr); + + return new UnparsedModel(inferenceEntityId, taskType, service, modelConfigMap.config(), modelConfigMap.secrets()); + } + } + private static final String TASK_TYPE_FIELD = "task_type"; private static final String MODEL_ID_FIELD = "model_id"; - private static final Logger logger = LogManager.getLogger(ModelRegistryImpl.class); + private static final Logger logger = LogManager.getLogger(ModelRegistry.class); private final OriginSettingClient client; - @Inject - public ModelRegistryImpl(Client client) { + public ModelRegistry(Client client) { this.client = new OriginSettingClient(client, ClientHelper.INFERENCE_ORIGIN); } - @Override + /** + * Get a model with its secret settings + * @param inferenceEntityId Model to get + * @param listener Model listener + */ public void getModelWithSecrets(String inferenceEntityId, ActionListener listener) { ActionListener searchListener = listener.delegateFailureAndWrap((delegate, searchResponse) -> { // There should be a hit for the configurations and secrets @@ -80,7 +106,7 @@ public void getModelWithSecrets(String inferenceEntityId, ActionListener listener) { ActionListener searchListener = listener.delegateFailureAndWrap((delegate, searchResponse) -> { // There should be a hit for the configurations and secrets @@ -101,7 +132,7 @@ public void getModel(String inferenceEntityId, ActionListener lis return; } - var modelConfigs = parseHitsAsModels(searchResponse.getHits()).stream().map(ModelRegistryImpl::unparsedModelFromMap).toList(); + var modelConfigs = parseHitsAsModels(searchResponse.getHits()).stream().map(UnparsedModel::unparsedModelFromMap).toList(); assert modelConfigs.size() == 1; delegate.onResponse(modelConfigs.get(0)); }); @@ -116,7 +147,12 @@ public void getModel(String inferenceEntityId, ActionListener lis client.search(modelSearch, searchListener); } - @Override + /** + * Get all models of a particular task type. + * Secret settings are not included + * @param taskType The task type + * @param listener Models listener + */ public void getModelsByTaskType(TaskType taskType, ActionListener> listener) { ActionListener searchListener = listener.delegateFailureAndWrap((delegate, searchResponse) -> { // Not an error if no models of this task_type @@ -125,7 +161,7 @@ public void getModelsByTaskType(TaskType taskType, ActionListener> listener) { ActionListener searchListener = listener.delegateFailureAndWrap((delegate, searchResponse) -> { // Not an error if no models of this task_type @@ -150,7 +190,7 @@ public void getAllModels(ActionListener> listener) { return; } - var modelConfigs = parseHitsAsModels(searchResponse.getHits()).stream().map(ModelRegistryImpl::unparsedModelFromMap).toList(); + var modelConfigs = parseHitsAsModels(searchResponse.getHits()).stream().map(UnparsedModel::unparsedModelFromMap).toList(); delegate.onResponse(modelConfigs); }); @@ -217,7 +257,6 @@ private ModelConfigMap createModelConfigMap(SearchHits hits, String inferenceEnt ); } - @Override public void storeModel(Model model, ActionListener listener) { ActionListener bulkResponseActionListener = getStoreModelListener(model, listener); @@ -314,7 +353,6 @@ private static BulkItemResponse.Failure getFirstBulkFailure(BulkResponse bulkRes return null; } - @Override public void deleteModel(String inferenceEntityId, ActionListener listener) { DeleteByQueryRequest request = new DeleteByQueryRequest().setAbortOnVersionConflict(false); request.indices(InferenceIndex.INDEX_PATTERN, InferenceSecretsIndex.INDEX_PATTERN); @@ -339,16 +377,4 @@ private static IndexRequest createIndexRequest(String docId, String indexName, T private QueryBuilder documentIdQuery(String inferenceEntityId) { return QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds(Model.documentId(inferenceEntityId))); } - - private static UnparsedModel unparsedModelFromMap(ModelRegistryImpl.ModelConfigMap modelConfigMap) { - if (modelConfigMap.config() == null) { - throw new ElasticsearchStatusException("Missing config map", RestStatus.BAD_REQUEST); - } - String modelId = ServiceUtils.removeStringOrThrowIfNull(modelConfigMap.config(), ModelConfigurations.MODEL_ID); - String service = ServiceUtils.removeStringOrThrowIfNull(modelConfigMap.config(), ModelConfigurations.SERVICE); - String taskTypeStr = ServiceUtils.removeStringOrThrowIfNull(modelConfigMap.config(), TaskType.NAME); - TaskType taskType = TaskType.fromString(taskTypeStr); - - return new UnparsedModel(modelId, taskType, service, modelConfigMap.config(), modelConfigMap.secrets()); - } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryImplTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java similarity index 92% rename from x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryImplTests.java rename to x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java index 10fd4f09e86ac..768f053295d13 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryImplTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java @@ -46,7 +46,7 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -public class ModelRegistryImplTests extends ESTestCase { +public class ModelRegistryTests extends ESTestCase { private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); @@ -66,9 +66,9 @@ public void testGetUnparsedModelMap_ThrowsResourceNotFound_WhenNoHitsReturned() var client = mockClient(); mockClientExecuteSearch(client, mockSearchResponse(SearchHits.EMPTY)); - var registry = new ModelRegistryImpl(client); + var registry = new ModelRegistry(client); - var listener = new PlainActionFuture(); + var listener = new PlainActionFuture(); registry.getModelWithSecrets("1", listener); ResourceNotFoundException exception = expectThrows(ResourceNotFoundException.class, () -> listener.actionGet(TIMEOUT)); @@ -80,9 +80,9 @@ public void testGetUnparsedModelMap_ThrowsIllegalArgumentException_WhenInvalidIn var unknownIndexHit = SearchResponseUtils.searchHitFromMap(Map.of("_index", "unknown_index")); mockClientExecuteSearch(client, mockSearchResponse(new SearchHit[] { unknownIndexHit })); - var registry = new ModelRegistryImpl(client); + var registry = new ModelRegistry(client); - var listener = new PlainActionFuture(); + var listener = new PlainActionFuture(); registry.getModelWithSecrets("1", listener); IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> listener.actionGet(TIMEOUT)); @@ -97,9 +97,9 @@ public void testGetUnparsedModelMap_ThrowsIllegalStateException_WhenUnableToFind var inferenceSecretsHit = SearchResponseUtils.searchHitFromMap(Map.of("_index", ".secrets-inference")); mockClientExecuteSearch(client, mockSearchResponse(new SearchHit[] { inferenceSecretsHit })); - var registry = new ModelRegistryImpl(client); + var registry = new ModelRegistry(client); - var listener = new PlainActionFuture(); + var listener = new PlainActionFuture(); registry.getModelWithSecrets("1", listener); IllegalStateException exception = expectThrows(IllegalStateException.class, () -> listener.actionGet(TIMEOUT)); @@ -114,9 +114,9 @@ public void testGetUnparsedModelMap_ThrowsIllegalStateException_WhenUnableToFind var inferenceHit = SearchResponseUtils.searchHitFromMap(Map.of("_index", ".inference")); mockClientExecuteSearch(client, mockSearchResponse(new SearchHit[] { inferenceHit })); - var registry = new ModelRegistryImpl(client); + var registry = new ModelRegistry(client); - var listener = new PlainActionFuture(); + var listener = new PlainActionFuture(); registry.getModelWithSecrets("1", listener); IllegalStateException exception = expectThrows(IllegalStateException.class, () -> listener.actionGet(TIMEOUT)); @@ -148,9 +148,9 @@ public void testGetModelWithSecrets() { mockClientExecuteSearch(client, mockSearchResponse(new SearchHit[] { inferenceHit, inferenceSecretsHit })); - var registry = new ModelRegistryImpl(client); + var registry = new ModelRegistry(client); - var listener = new PlainActionFuture(); + var listener = new PlainActionFuture(); registry.getModelWithSecrets("1", listener); var modelConfig = listener.actionGet(TIMEOUT); @@ -177,9 +177,9 @@ public void testGetModelNoSecrets() { mockClientExecuteSearch(client, mockSearchResponse(new SearchHit[] { inferenceHit })); - var registry = new ModelRegistryImpl(client); + var registry = new ModelRegistry(client); - var listener = new PlainActionFuture(); + var listener = new PlainActionFuture(); registry.getModel("1", listener); registry.getModel("1", listener); @@ -202,7 +202,7 @@ public void testStoreModel_ReturnsTrue_WhenNoFailuresOccur() { mockClientExecuteBulk(client, bulkResponse); var model = TestModel.createRandomInstance(); - var registry = new ModelRegistryImpl(client); + var registry = new ModelRegistry(client); var listener = new PlainActionFuture(); registry.storeModel(model, listener); @@ -219,7 +219,7 @@ public void testStoreModel_ThrowsException_WhenBulkResponseIsEmpty() { mockClientExecuteBulk(client, bulkResponse); var model = TestModel.createRandomInstance(); - var registry = new ModelRegistryImpl(client); + var registry = new ModelRegistry(client); var listener = new PlainActionFuture(); registry.storeModel(model, listener); @@ -250,7 +250,7 @@ public void testStoreModel_ThrowsResourceAlreadyExistsException_WhenFailureIsAVe mockClientExecuteBulk(client, bulkResponse); var model = TestModel.createRandomInstance(); - var registry = new ModelRegistryImpl(client); + var registry = new ModelRegistry(client); var listener = new PlainActionFuture(); registry.storeModel(model, listener); @@ -276,7 +276,7 @@ public void testStoreModel_ThrowsException_WhenFailureIsNotAVersionConflict() { mockClientExecuteBulk(client, bulkResponse); var model = TestModel.createRandomInstance(); - var registry = new ModelRegistryImpl(client); + var registry = new ModelRegistry(client); var listener = new PlainActionFuture(); registry.storeModel(model, listener); From 173900f89e0e4eabbacb29b76985d82b84af0708 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Fri, 5 Apr 2024 12:10:03 +0100 Subject: [PATCH 141/264] [ML] Update last usages of model_id to inference_id (#107133) --- docs/reference/inference/put-inference.asciidoc | 2 +- .../xpack/core/inference/action/PutInferenceModelAction.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/inference/put-inference.asciidoc b/docs/reference/inference/put-inference.asciidoc index 87a865b9487e5..6df1993175a0d 100644 --- a/docs/reference/inference/put-inference.asciidoc +++ b/docs/reference/inference/put-inference.asciidoc @@ -18,7 +18,7 @@ or if you want to use non-NLP models, use the <>. [[put-inference-api-request]] ==== {api-request-title} -`PUT /_inference//` +`PUT /_inference//` [discrete] diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/PutInferenceModelAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/PutInferenceModelAction.java index 5cc1c98c6d89b..4617d1f6bccaa 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/PutInferenceModelAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/PutInferenceModelAction.java @@ -86,7 +86,7 @@ public void writeTo(StreamOutput out) throws IOException { public ActionRequestValidationException validate() { ActionRequestValidationException validationException = new ActionRequestValidationException(); if (MlStrings.isValidId(this.inferenceEntityId) == false) { - validationException.addValidationError(Messages.getMessage(Messages.INVALID_ID, "model_id", this.inferenceEntityId)); + validationException.addValidationError(Messages.getMessage(Messages.INVALID_ID, "inference_id", this.inferenceEntityId)); } if (validationException.validationErrors().isEmpty() == false) { From 040eb1e5d68e35a9c58c237329fcfb406b177438 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Fri, 5 Apr 2024 12:56:55 +0100 Subject: [PATCH 142/264] [Ml] Mute frequent items failure (#107151) --- .../resources/rest-api-spec/test/ml/frequent_item_sets_agg.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/frequent_item_sets_agg.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/frequent_item_sets_agg.yml index 4a88762ddb9ea..53f07b35482ed 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/frequent_item_sets_agg.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/frequent_item_sets_agg.yml @@ -433,6 +433,9 @@ setup: --- "Test frequent item sets unsupported types": - do: + skip: + version: "all" + reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/106215" catch: /Field \[geo_point\] of type \[geo_point\] is not supported for aggregation \[frequent_item_sets\]/ search: index: store From 8716188b15189e60b2b115ba67177a5ce0b9bffa Mon Sep 17 00:00:00 2001 From: David Kyle Date: Fri, 5 Apr 2024 13:36:08 +0100 Subject: [PATCH 143/264] Mute Freq Items Test frequent item sets unsupported types (#107153) For https://github.com/elastic/elasticsearch/issues/106215, fixing what was not done properly in #107151 --- .../rest-api-spec/test/ml/frequent_item_sets_agg.yml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/frequent_item_sets_agg.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/frequent_item_sets_agg.yml index 53f07b35482ed..db41e0d0efaa1 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/frequent_item_sets_agg.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/frequent_item_sets_agg.yml @@ -432,10 +432,11 @@ setup: --- "Test frequent item sets unsupported types": + + - skip: + version: "all" + reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/106215" - do: - skip: - version: "all" - reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/106215" catch: /Field \[geo_point\] of type \[geo_point\] is not supported for aggregation \[frequent_item_sets\]/ search: index: store From ee667c40d7a096a99d0c677d9b89e494f76f1158 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Przemys=C5=82aw=20Witek?= Date: Fri, 5 Apr 2024 14:36:37 +0200 Subject: [PATCH 144/264] [Transform] Extract common test code to TransformCommonRestTestCase class (#107103) --- .../plugin/transform/qa/common/build.gradle | 8 ++ .../common/TransformCommonRestTestCase.java | 127 ++++++++++++++++++ .../qa/multi-node-tests/build.gradle | 1 + .../xpack/transform/integration/LatestIT.java | 2 +- .../transform/integration/TransformIT.java | 70 +--------- .../integration/TransformRestTestCase.java | 58 +------- .../TransformUsingSearchRuntimeFieldsIT.java | 4 +- .../continuous/TransformContinuousIT.java | 2 +- .../qa/single-node-tests/build.gradle | 1 + .../integration/TransformRestTestCase.java | 86 +----------- .../integration/TransformRobustnessIT.java | 6 + 11 files changed, 157 insertions(+), 208 deletions(-) create mode 100644 x-pack/plugin/transform/qa/common/build.gradle create mode 100644 x-pack/plugin/transform/qa/common/src/main/java/org/elasticsearch/xpack/transform/integration/common/TransformCommonRestTestCase.java diff --git a/x-pack/plugin/transform/qa/common/build.gradle b/x-pack/plugin/transform/qa/common/build.gradle new file mode 100644 index 0000000000000..9e7abfa2f977e --- /dev/null +++ b/x-pack/plugin/transform/qa/common/build.gradle @@ -0,0 +1,8 @@ +apply plugin: 'elasticsearch.internal-java-rest-test' + +dependencies { + api project(':libs:elasticsearch-x-content') + api project(':test:framework') + api project(xpackModule('core')) +} + diff --git a/x-pack/plugin/transform/qa/common/src/main/java/org/elasticsearch/xpack/transform/integration/common/TransformCommonRestTestCase.java b/x-pack/plugin/transform/qa/common/src/main/java/org/elasticsearch/xpack/transform/integration/common/TransformCommonRestTestCase.java new file mode 100644 index 0000000000000..486dd7c581032 --- /dev/null +++ b/x-pack/plugin/transform/qa/common/src/main/java/org/elasticsearch/xpack/transform/integration/common/TransformCommonRestTestCase.java @@ -0,0 +1,127 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.transform.integration.common; + +import org.apache.logging.log4j.Level; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.xpack.core.transform.TransformField; +import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; + +import java.io.IOException; +import java.time.Instant; +import java.util.ArrayList; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +public abstract class TransformCommonRestTestCase extends ESRestTestCase { + + protected static final String TRANSFORM_ENDPOINT = TransformField.REST_BASE_PATH_TRANSFORMS; + protected static final String AUTH_KEY = "Authorization"; + protected static final String SECONDARY_AUTH_KEY = "es-secondary-authorization"; + + protected static String getTransformEndpoint() { + return TRANSFORM_ENDPOINT; + } + + /** + * Returns the list of transform tasks as reported by the _tasks API. + */ + @SuppressWarnings("unchecked") + protected List getTransformTasks() throws IOException { + Request tasksRequest = new Request("GET", "/_tasks"); + tasksRequest.addParameter("actions", TransformField.TASK_NAME + "*"); + Map tasksResponse = entityAsMap(client().performRequest(tasksRequest)); + + Map nodes = (Map) tasksResponse.get("nodes"); + if (nodes == null) { + return List.of(); + } + + List foundTasks = new ArrayList<>(); + for (Map.Entry node : nodes.entrySet()) { + Map nodeInfo = (Map) node.getValue(); + Map tasks = (Map) nodeInfo.get("tasks"); + if (tasks != null) { + foundTasks.addAll(tasks.keySet()); + } + } + return foundTasks; + } + + /** + * Returns the list of transform tasks for the given transform as reported by the _cluster/state API. + */ + @SuppressWarnings("unchecked") + protected List getTransformTasksFromClusterState(String transformId) throws IOException { + Request request = new Request("GET", "_cluster/state"); + Map response = entityAsMap(adminClient().performRequest(request)); + + List> tasks = (List>) XContentMapValues.extractValue( + response, + "metadata", + "persistent_tasks", + "tasks" + ); + + return tasks.stream().map(t -> (String) t.get("id")).filter(transformId::equals).toList(); + } + + @SuppressWarnings("unchecked") + protected void logAudits() throws Exception { + logger.info("writing audit messages to the log"); + Request searchRequest = new Request("GET", TransformInternalIndexConstants.AUDIT_INDEX + "/_search?ignore_unavailable=true"); + searchRequest.setJsonEntity(""" + { + "size": 100, + "sort": [ { "timestamp": { "order": "asc" } } ] + }"""); + + assertBusy(() -> { + try { + refreshIndex(TransformInternalIndexConstants.AUDIT_INDEX_PATTERN); + Response searchResponse = client().performRequest(searchRequest); + + Map searchResult = entityAsMap(searchResponse); + List> searchHits = (List>) XContentMapValues.extractValue( + "hits.hits", + searchResult + ); + + for (Map hit : searchHits) { + Map source = (Map) XContentMapValues.extractValue("_source", hit); + String level = (String) source.getOrDefault("level", "info"); + logger.log( + Level.getLevel(level.toUpperCase(Locale.ROOT)), + "Transform audit: [{}] [{}] [{}] [{}]", + Instant.ofEpochMilli((long) source.getOrDefault("timestamp", 0)), + source.getOrDefault("transform_id", "n/a"), + source.getOrDefault("message", "n/a"), + source.getOrDefault("node_name", "n/a") + ); + } + } catch (ResponseException e) { + // see gh#54810, wrap temporary 503's as assertion error for retry + if (e.getResponse().getStatusLine().getStatusCode() != 503) { + throw e; + } + throw new AssertionError("Failed to retrieve audit logs", e); + } + }, 5, TimeUnit.SECONDS); + } + + protected void refreshIndex(String index) throws IOException { + Request refreshRequest = new Request("POST", index + "/_refresh"); + assertOK(adminClient().performRequest(refreshRequest)); + } +} diff --git a/x-pack/plugin/transform/qa/multi-node-tests/build.gradle b/x-pack/plugin/transform/qa/multi-node-tests/build.gradle index aab0d16e54f5c..32bb44850df6b 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/build.gradle +++ b/x-pack/plugin/transform/qa/multi-node-tests/build.gradle @@ -3,6 +3,7 @@ apply plugin: 'elasticsearch.legacy-java-rest-test' dependencies { javaRestTestImplementation(testArtifact(project(xpackModule('core')))) javaRestTestImplementation project(path: xpackModule('transform')) + javaRestTestImplementation project(path: xpackModule('transform:qa:common')) } // location for keys and certificates diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/LatestIT.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/LatestIT.java index 07b6bc9bd7770..27695eb080b0a 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/LatestIT.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/LatestIT.java @@ -126,7 +126,7 @@ public void testLatest() throws Exception { waitUntilCheckpoint(transformConfig.getId(), 1L); stopTransform(transformConfig.getId()); - refreshIndex(destIndexName, RequestOptions.DEFAULT); + refreshIndex(destIndexName); var mappings = getIndexMapping(destIndexName, RequestOptions.DEFAULT); assertThat( (Map) XContentMapValues.extractValue(destIndexName + ".mappings", mappings), diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIT.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIT.java index 538479c33b084..e7d54028caa20 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIT.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIT.java @@ -22,7 +22,6 @@ import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.transform.TransformConfigVersion; -import org.elasticsearch.xpack.core.transform.TransformField; import org.elasticsearch.xpack.core.transform.transforms.QueryConfig; import org.elasticsearch.xpack.core.transform.transforms.SettingsConfig; import org.elasticsearch.xpack.core.transform.transforms.SyncConfig; @@ -37,9 +36,7 @@ import java.io.IOException; import java.time.Instant; -import java.util.ArrayList; import java.util.HashMap; -import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; @@ -247,23 +244,23 @@ public void testTransformLifecycleInALoop() throws Exception { // Create the continuous transform putTransform(transformId, config, RequestOptions.DEFAULT); assertThat(getTransformTasks(), is(empty())); - assertThatTransformTaskDoesNotExist(transformId); + assertThat(getTransformTasksFromClusterState(transformId), is(empty())); startTransform(transformId, RequestOptions.DEFAULT); // There is 1 transform task after start assertThat(getTransformTasks(), hasSize(1)); - assertThatTransformTaskExists(transformId); + assertThat(getTransformTasksFromClusterState(transformId), hasSize(1)); Thread.sleep(sleepAfterStartMillis); // There should still be 1 transform task as the transform is continuous assertThat(getTransformTasks(), hasSize(1)); - assertThatTransformTaskExists(transformId); + assertThat(getTransformTasksFromClusterState(transformId), hasSize(1)); // Stop the transform with force set randomly stopTransform(transformId, true, null, false, force); // After the transform is stopped, there should be no transform task left assertThat(getTransformTasks(), is(empty())); - assertThatTransformTaskDoesNotExist(transformId); + assertThat(getTransformTasksFromClusterState(transformId), is(empty())); // Delete the transform deleteTransform(transformId); @@ -303,63 +300,6 @@ private String createConfig(String transformId, String sourceIndex, String destI return Strings.toString(config); } - /** - * Returns the list of transform tasks as reported by _tasks API. - */ - @SuppressWarnings("unchecked") - protected List getTransformTasks() throws IOException { - final Request tasksRequest = new Request("GET", "/_tasks"); - tasksRequest.addParameter("actions", TransformField.TASK_NAME + "*"); - final Map tasksResponse = entityAsMap(client().performRequest(tasksRequest)); - - Map nodes = (Map) tasksResponse.get("nodes"); - if (nodes == null) { - return List.of(); - } - - List foundTasks = new ArrayList<>(); - for (Map.Entry node : nodes.entrySet()) { - Map nodeInfo = (Map) node.getValue(); - Map tasks = (Map) nodeInfo.get("tasks"); - if (tasks != null) { - foundTasks.addAll(tasks.keySet()); - } - } - return foundTasks; - } - - /** - * Verifies that the given transform task exists in cluster state. - */ - private void assertThatTransformTaskExists(String transformId) throws IOException { - assertThatTransformTaskCountIsEqualTo(transformId, 1); - } - - /** - * Verifies that the given transform task does not exist in cluster state. - */ - private void assertThatTransformTaskDoesNotExist(String transformId) throws IOException { - assertThatTransformTaskCountIsEqualTo(transformId, 0); - } - - /** - * Verifies that the number of transform tasks in cluster state for the given transform is as expected. - */ - @SuppressWarnings("unchecked") - private void assertThatTransformTaskCountIsEqualTo(String transformId, int expectedCount) throws IOException { - Request request = new Request("GET", "_cluster/state"); - Map response = entityAsMap(adminClient().performRequest(request)); - - List> tasks = (List>) XContentMapValues.extractValue( - response, - "metadata", - "persistent_tasks", - "tasks" - ); - - assertThat("Tasks were: " + tasks, tasks.stream().filter(t -> transformId.equals(t.get("id"))).toList(), hasSize(expectedCount)); - } - public void testContinuousTransformUpdate() throws Exception { String indexName = "continuous-reviews-update"; createReviewsIndex(indexName, 10, NUM_USERS, TransformIT::getUserIdForRow, TransformIT::getDateStringForRow); @@ -447,7 +387,7 @@ public void testContinuousTransformUpdate() throws Exception { assertOK(searchResponse); var responseMap = entityAsMap(searchResponse); assertThat((Integer) XContentMapValues.extractValue("hits.total.value", responseMap), greaterThan(0)); - refreshIndex(dest, RequestOptions.DEFAULT); + refreshIndex(dest); }, 30, TimeUnit.SECONDS); stopTransform(config.getId()); diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java index 6e13e936f5532..eb1a1258d5a96 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java @@ -10,7 +10,6 @@ import org.apache.http.client.methods.HttpGet; import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; -import org.apache.logging.log4j.Level; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; @@ -27,7 +26,6 @@ import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; -import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -40,22 +38,20 @@ import org.elasticsearch.xpack.core.transform.transforms.QueryConfig; import org.elasticsearch.xpack.core.transform.transforms.SourceConfig; import org.elasticsearch.xpack.core.transform.transforms.TransformConfig; -import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import org.elasticsearch.xpack.core.transform.transforms.pivot.AggregationConfig; import org.elasticsearch.xpack.core.transform.transforms.pivot.DateHistogramGroupSource; import org.elasticsearch.xpack.core.transform.transforms.pivot.GroupConfig; import org.elasticsearch.xpack.core.transform.transforms.pivot.PivotConfig; import org.elasticsearch.xpack.core.transform.transforms.pivot.SingleGroupSource; +import org.elasticsearch.xpack.transform.integration.common.TransformCommonRestTestCase; import java.io.IOException; import java.nio.charset.StandardCharsets; -import java.time.Instant; import java.time.ZoneId; import java.util.Base64; import java.util.Collections; import java.util.HashSet; import java.util.List; -import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; @@ -67,9 +63,8 @@ import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.core.Is.is; -public abstract class TransformRestTestCase extends ESRestTestCase { +public abstract class TransformRestTestCase extends TransformCommonRestTestCase { - protected static String TRANSFORM_ENDPOINT = "/_transform/"; protected static final String AUTH_KEY = "Authorization"; protected static final String SECONDARY_AUTH_KEY = "es-secondary-authorization"; @@ -81,49 +76,6 @@ protected void cleanUp() throws Exception { waitForPendingTasks(); } - @SuppressWarnings("unchecked") - private void logAudits() throws Exception { - logger.info("writing audit messages to the log"); - Request searchRequest = new Request("GET", TransformInternalIndexConstants.AUDIT_INDEX + "/_search?ignore_unavailable=true"); - searchRequest.setJsonEntity(""" - { - "size": 100, - "sort": [ { "timestamp": { "order": "asc" } } ] - }"""); - - assertBusy(() -> { - try { - refreshIndex(TransformInternalIndexConstants.AUDIT_INDEX_PATTERN, RequestOptions.DEFAULT); - Response searchResponse = client().performRequest(searchRequest); - - Map searchResult = entityAsMap(searchResponse); - List> searchHits = (List>) XContentMapValues.extractValue( - "hits.hits", - searchResult - ); - - for (Map hit : searchHits) { - Map source = (Map) XContentMapValues.extractValue("_source", hit); - String level = (String) source.getOrDefault("level", "info"); - logger.log( - Level.getLevel(level.toUpperCase(Locale.ROOT)), - "Transform audit: [{}] [{}] [{}] [{}]", - Instant.ofEpochMilli((long) source.getOrDefault("timestamp", 0)), - source.getOrDefault("transform_id", "n/a"), - source.getOrDefault("message", "n/a"), - source.getOrDefault("node_name", "n/a") - ); - } - } catch (ResponseException e) { - // see gh#54810, wrap temporary 503's as assertion error for retry - if (e.getResponse().getStatusLine().getStatusCode() != 503) { - throw e; - } - throw new AssertionError("Failed to retrieve audit logs", e); - } - }, 5, TimeUnit.SECONDS); - } - protected void cleanUpTransforms() throws IOException { for (String id : createdTransformIds) { try { @@ -140,12 +92,6 @@ protected void cleanUpTransforms() throws IOException { createdTransformIds.clear(); } - protected void refreshIndex(String index, RequestOptions options) throws IOException { - var r = new Request("POST", index + "/_refresh"); - r.setOptions(options); - assertOK(adminClient().performRequest(r)); - } - protected Map getIndexMapping(String index, RequestOptions options) throws IOException { var r = new Request("GET", "/" + index + "/_mapping"); r.setOptions(options); diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformUsingSearchRuntimeFieldsIT.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformUsingSearchRuntimeFieldsIT.java index 2e509bedbce39..d17d9dbd20ffd 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformUsingSearchRuntimeFieldsIT.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformUsingSearchRuntimeFieldsIT.java @@ -138,7 +138,7 @@ public void testPivotTransform() throws Exception { stopTransform(config.getId()); assertBusy(() -> { assertEquals("stopped", getTransformState(config.getId())); }); - refreshIndex(destIndexName, RequestOptions.DEFAULT); + refreshIndex(destIndexName); // Verify destination index mappings var mappings = (Map) XContentMapValues.extractValue( destIndexName + ".mappings", @@ -235,7 +235,7 @@ public void testLatestTransform() throws Exception { stopTransform(configWithRuntimeFields.getId()); assertBusy(() -> { assertEquals("stopped", getTransformState(configWithRuntimeFields.getId())); }); - refreshIndex(destIndexName, RequestOptions.DEFAULT); + refreshIndex(destIndexName); // Verify destination index mappings var destIndexMapping = getIndexMapping(destIndexName, RequestOptions.DEFAULT); diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/continuous/TransformContinuousIT.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/continuous/TransformContinuousIT.java index 5eac2bd2ebdf6..69c0e12ca4b55 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/continuous/TransformContinuousIT.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/continuous/TransformContinuousIT.java @@ -254,7 +254,7 @@ public void testContinuousEvents() throws Exception { source.append("\r\n"); doBulk(source.toString(), false); } - refreshIndex(sourceIndexName, RequestOptions.DEFAULT); + refreshIndex(sourceIndexName); // start all transforms, wait until the processed all data and stop them startTransforms(); diff --git a/x-pack/plugin/transform/qa/single-node-tests/build.gradle b/x-pack/plugin/transform/qa/single-node-tests/build.gradle index d4f84ecb37c9c..7eeb8c97d1ae4 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/build.gradle +++ b/x-pack/plugin/transform/qa/single-node-tests/build.gradle @@ -4,6 +4,7 @@ apply plugin: 'elasticsearch.legacy-java-rest-test' dependencies { javaRestTestImplementation(testArtifact(project(xpackModule('core')))) javaRestTestImplementation project(path: xpackModule('transform')) + javaRestTestImplementation project(path: xpackModule('transform:qa:common')) } testClusters.configureEach { diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java index 7c74e918a039f..09fbea29d4b15 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java @@ -9,33 +9,27 @@ import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; -import org.apache.logging.log4j.Level; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; -import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.transform.TransformField; import org.elasticsearch.xpack.core.transform.transforms.DestAlias; import org.elasticsearch.xpack.core.transform.transforms.SettingsConfig; -import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; +import org.elasticsearch.xpack.transform.integration.common.TransformCommonRestTestCase; import org.junit.After; import org.junit.AfterClass; import java.io.IOException; -import java.time.Instant; -import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; -import java.util.Locale; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; @@ -44,10 +38,9 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; -public abstract class TransformRestTestCase extends ESRestTestCase { +public abstract class TransformRestTestCase extends TransformCommonRestTestCase { protected static final String TEST_PASSWORD = "x-pack-test-password"; - private static final String SECONDARY_AUTH_KEY = "es-secondary-authorization"; protected static final SecureString TEST_PASSWORD_SECURE_STRING = new SecureString(TEST_PASSWORD.toCharArray()); private static final String BASIC_AUTH_VALUE_SUPER_USER = basicAuthHeaderValue("x_pack_rest_user", TEST_PASSWORD_SECURE_STRING); @@ -538,7 +531,7 @@ protected Request createRequestWithSecondaryAuth( RequestOptions.Builder options = request.getOptions().toBuilder(); if (authHeader != null) { - options.addHeader("Authorization", authHeader); + options.addHeader(AUTH_KEY, authHeader); } if (secondaryAuthHeader != null) { options.addHeader(SECONDARY_AUTH_KEY, secondaryAuthHeader); @@ -563,10 +556,6 @@ void waitForTransformCheckpoint(String transformId, long checkpoint) throws Exce }, 30, TimeUnit.SECONDS); } - void refreshIndex(String index) throws IOException { - assertOK(client().performRequest(new Request("POST", index + "/_refresh"))); - } - @SuppressWarnings("unchecked") protected static List> getTransforms(List> expectedErrors) throws IOException { Request request = new Request("GET", getTransformEndpoint() + "_all"); @@ -688,73 +677,4 @@ protected void assertOneCount(String query, String field, int expected) throws I int actual = (Integer) ((List) XContentMapValues.extractValue(field, searchResult)).get(0); assertEquals(expected, actual); } - - protected static String getTransformEndpoint() { - return TransformField.REST_BASE_PATH_TRANSFORMS; - } - - @SuppressWarnings("unchecked") - private void logAudits() throws Exception { - logger.info("writing audit messages to the log"); - Request searchRequest = new Request("GET", TransformInternalIndexConstants.AUDIT_INDEX + "/_search?ignore_unavailable=true"); - searchRequest.setJsonEntity(""" - { - "size": 100, - "sort": [ { "timestamp": { "order": "asc" } } ] - }"""); - - assertBusy(() -> { - try { - refreshIndex(TransformInternalIndexConstants.AUDIT_INDEX_PATTERN); - Response searchResponse = client().performRequest(searchRequest); - - Map searchResult = entityAsMap(searchResponse); - List> searchHits = (List>) XContentMapValues.extractValue( - "hits.hits", - searchResult - ); - - for (Map hit : searchHits) { - Map source = (Map) XContentMapValues.extractValue("_source", hit); - String level = (String) source.getOrDefault("level", "info"); - logger.log( - Level.getLevel(level.toUpperCase(Locale.ROOT)), - "Transform audit: [{}] [{}] [{}] [{}]", - Instant.ofEpochMilli((long) source.getOrDefault("timestamp", 0)), - source.getOrDefault("transform_id", "n/a"), - source.getOrDefault("message", "n/a"), - source.getOrDefault("node_name", "n/a") - ); - } - } catch (ResponseException e) { - // see gh#54810, wrap temporary 503's as assertion error for retry - if (e.getResponse().getStatusLine().getStatusCode() != 503) { - throw e; - } - throw new AssertionError("Failed to retrieve audit logs", e); - } - }, 5, TimeUnit.SECONDS); - } - - @SuppressWarnings("unchecked") - protected List getTransformTasks() throws IOException { - final Request tasksRequest = new Request("GET", "/_tasks"); - tasksRequest.addParameter("actions", TransformField.TASK_NAME + "*"); - Map tasksResponse = entityAsMap(client().performRequest(tasksRequest)); - - Map nodes = (Map) tasksResponse.get("nodes"); - if (nodes == null) { - return List.of(); - } - - List foundTasks = new ArrayList<>(); - for (Map.Entry node : nodes.entrySet()) { - Map nodeInfo = (Map) node.getValue(); - Map tasks = (Map) nodeInfo.get("tasks"); - if (tasks != null) { - foundTasks.addAll(tasks.keySet()); - } - } - return foundTasks; - } } diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRobustnessIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRobustnessIT.java index e537a6f280ac0..0f807fbae45d1 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRobustnessIT.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRobustnessIT.java @@ -95,10 +95,16 @@ public void testCreateAndDeleteTransformInALoop() throws IOException { try { // Create the batch transform createPivotReviewsTransform(transformId, destIndex, null); + assertThat(getTransformTasks(), is(empty())); + assertThat(getTransformTasksFromClusterState(transformId), is(empty())); + // Wait until the transform finishes startAndWaitForTransform(transformId, destIndex); + // After the transform finishes, there should be no transform task left assertThat(getTransformTasks(), is(empty())); + assertThat(getTransformTasksFromClusterState(transformId), is(empty())); + // Delete the transform deleteTransform(transformId); } catch (AssertionError | Exception e) { From 54eeb622d523ff4c1ac901a471f86927444af2bd Mon Sep 17 00:00:00 2001 From: Tommaso Teofili Date: Fri, 5 Apr 2024 15:29:54 +0200 Subject: [PATCH 145/264] Add ES|QL Locate function (#106899) * Add ES|QL Locate function --- docs/changelog/106899.yaml | 6 + .../functions/description/locate.asciidoc | 5 + .../esql/functions/layout/locate.asciidoc | 14 ++ .../esql/functions/parameters/locate.asciidoc | 12 ++ .../esql/functions/signature/locate.svg | 1 + .../esql/functions/types/locate.asciidoc | 12 ++ .../src/main/resources/meta.csv-spec | 6 +- .../src/main/resources/string.csv-spec | 112 +++++++++++ .../scalar/string/LocateEvaluator.java | 166 +++++++++++++++++ .../function/EsqlFunctionRegistry.java | 4 +- .../function/scalar/string/Locate.java | 140 ++++++++++++++ .../xpack/esql/io/stream/PlanNamedTypes.java | 15 ++ .../function/scalar/string/LocateTests.java | 175 ++++++++++++++++++ 13 files changed, 666 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/106899.yaml create mode 100644 docs/reference/esql/functions/description/locate.asciidoc create mode 100644 docs/reference/esql/functions/layout/locate.asciidoc create mode 100644 docs/reference/esql/functions/parameters/locate.asciidoc create mode 100644 docs/reference/esql/functions/signature/locate.svg create mode 100644 docs/reference/esql/functions/types/locate.asciidoc create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Locate.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java diff --git a/docs/changelog/106899.yaml b/docs/changelog/106899.yaml new file mode 100644 index 0000000000000..a2db24236a47e --- /dev/null +++ b/docs/changelog/106899.yaml @@ -0,0 +1,6 @@ +pr: 106899 +summary: Add ES|QL Locate function +area: ES|QL +type: enhancement +issues: + - 106818 diff --git a/docs/reference/esql/functions/description/locate.asciidoc b/docs/reference/esql/functions/description/locate.asciidoc new file mode 100644 index 0000000000000..60a6d435e37b6 --- /dev/null +++ b/docs/reference/esql/functions/description/locate.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns an integer that indicates the position of a keyword substring within another string diff --git a/docs/reference/esql/functions/layout/locate.asciidoc b/docs/reference/esql/functions/layout/locate.asciidoc new file mode 100644 index 0000000000000..1017c7f844dd6 --- /dev/null +++ b/docs/reference/esql/functions/layout/locate.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-locate]] +=== `LOCATE` + +*Syntax* + +[.text-center] +image::esql/functions/signature/locate.svg[Embedded,opts=inline] + +include::../parameters/locate.asciidoc[] +include::../description/locate.asciidoc[] +include::../types/locate.asciidoc[] diff --git a/docs/reference/esql/functions/parameters/locate.asciidoc b/docs/reference/esql/functions/parameters/locate.asciidoc new file mode 100644 index 0000000000000..e48a7a891712c --- /dev/null +++ b/docs/reference/esql/functions/parameters/locate.asciidoc @@ -0,0 +1,12 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Parameters* + +`string`:: +An input string + +`substring`:: +A substring to locate in the input string + +`start`:: +The start index diff --git a/docs/reference/esql/functions/signature/locate.svg b/docs/reference/esql/functions/signature/locate.svg new file mode 100644 index 0000000000000..2b7bc2dac0e86 --- /dev/null +++ b/docs/reference/esql/functions/signature/locate.svg @@ -0,0 +1 @@ +LOCATE(string,substring,start) \ No newline at end of file diff --git a/docs/reference/esql/functions/types/locate.asciidoc b/docs/reference/esql/functions/types/locate.asciidoc new file mode 100644 index 0000000000000..895dce1335813 --- /dev/null +++ b/docs/reference/esql/functions/types/locate.asciidoc @@ -0,0 +1,12 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +string | substring | start | result +keyword | keyword | integer | integer +keyword | text | integer | integer +text | keyword | integer | integer +text | text | integer | integer +|=== diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec index 746684aca3e38..5af5d9d3417de 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -30,6 +30,7 @@ double e() "integer|long|double|boolean|keyword|text|ip|version least(first:integer|long|double|boolean|keyword|text|ip|version, ?rest...:integer|long|double|boolean|keyword|text|ip|version)" "keyword left(string:keyword|text, length:integer)" "integer length(string:keyword|text)" +"integer locate(string:keyword|text, substring:keyword|text, ?start:integer)" "double log(?base:integer|unsigned_long|long|double, number:integer|unsigned_long|long|double)" "double log10(number:double|integer|long|unsigned_long)" "keyword|text ltrim(string:keyword|text)" @@ -138,6 +139,7 @@ greatest |first |"integer|long|double|boolean least |first |"integer|long|double|boolean|keyword|text|ip|version" |[""] left |[string, length] |["keyword|text", integer] |[The string from which to return a substring., The number of characters to return.] length |string |"keyword|text" |[""] +locate |[string, substring, start] |["keyword|text", "keyword|text", "integer"] |[An input string, A substring to locate in the input string, The start index] log |[base, number] |["integer|unsigned_long|long|double", "integer|unsigned_long|long|double"] |["Base of logarithm. If `null`\, the function returns `null`. If not provided\, this function returns the natural logarithm (base e) of a value.", "Numeric expression. If `null`\, the function returns `null`."] log10 |number |"double|integer|long|unsigned_long" |Numeric expression. If `null`, the function returns `null`. ltrim |string |"keyword|text" |[""] @@ -247,6 +249,7 @@ greatest |Returns the maximum value from many columns. least |Returns the minimum value from many columns. left |Returns the substring that extracts 'length' chars from 'string' starting from the left. length |Returns the character length of a string. +locate |Returns an integer that indicates the position of a keyword substring within another string log |Returns the logarithm of a value to a base. The input can be any numeric value, the return value is always a double. Logs of zero, negative numbers, and base of one return `null` as well as a warning. log10 |Returns the logarithm of a value to base 10. The input can be any numeric value, the return value is always a double. Logs of 0 and negative numbers return `null` as well as a warning. ltrim |Removes leading whitespaces from a string. @@ -357,6 +360,7 @@ greatest |"integer|long|double|boolean|keyword|text|ip|version" least |"integer|long|double|boolean|keyword|text|ip|version" |false |true |false left |keyword |[false, false] |false |false length |integer |false |false |false +locate |integer |[false, false, true] |false |false log |double |[true, false] |false |false log10 |double |false |false |false ltrim |"keyword|text" |false |false |false @@ -447,5 +451,5 @@ countFunctions#[skip:-8.13.99] meta functions | stats a = count(*), b = count(*), c = count(*) | mv_expand c; a:long | b:long | c:long -100 | 100 | 100 +101 | 101 | 101 ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec index d9c9e535c2c45..f22e1b2de7f6a 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec @@ -1168,3 +1168,115 @@ from employees | where emp_no == 10001 | eval split = split("fooMbar", gender) | gender:keyword | split:keyword M | [foo, bar] ; + +locate#[skip:-8.13.99,reason:new string function added in 8.14] +row a = "hello" | eval a_ll = locate(a, "ll"); + +a:keyword | a_ll:integer +hello | 3 +; + +locateFail#[skip:-8.13.99,reason:new string function added in 8.14] +row a = "hello" | eval a_ll = locate(a, "int"); + +a:keyword | a_ll:integer +hello | 0 +; + +locateZeroStart#[skip:-8.13.99,reason:new string function added in 8.14] +row a = "hello" | eval a_ll = locate(a, "ll", 0); + +a:keyword | a_ll:integer +hello | 3 +; + +locateExactStart#[skip:-8.13.99,reason:new string function added in 8.14] +row a = "hello" | eval a_ll = locate(a, "ll", 3); + +a:keyword | a_ll:integer +hello | 3 +; + +locateLongerStart#[skip:-8.13.99,reason:new string function added in 8.14] +row a = "hello" | eval a_ll = locate(a, "ll", 10); + +a:keyword | a_ll:integer +hello | 0 +; + +locateLongerSubstr#[skip:-8.13.99,reason:new string function added in 8.14] +row a = "hello" | eval a_ll = locate(a, "farewell"); + +a:keyword | a_ll:integer +hello | 0 +; + +locateSame#[skip:-8.13.99,reason:new string function added in 8.14] +row a = "hello" | eval a_ll = locate(a, "hello"); + +a:keyword | a_ll:integer +hello | 1 +; + +locateWithSubstring#[skip:-8.13.99,reason:new string function added in 8.14] +from employees | where emp_no <= 10010 | eval f_s = substring(last_name, 2) | eval f_l = locate(last_name, f_s) | keep emp_no, last_name, f_s, f_l; +ignoreOrder:true + +emp_no:integer | last_name:keyword | f_s:keyword | f_l:integer +10001 | Facello | acello | 2 +10002 | Simmel | immel | 2 +10003 | Bamford | amford | 2 +10004 | Koblick | oblick | 2 +10005 | Maliniak | aliniak | 2 +10006 | Preusig | reusig | 2 +10007 | Zielinski | ielinski | 2 +10008 | Kalloufi | alloufi | 2 +10009 | Peac | eac | 2 +10010 | Piveteau | iveteau | 2 +; + +locateUtf16Emoji#[skip:-8.13.99,reason:new string function added in 8.14] +row a = "🐱Meow!🐶Woof!" | eval f_s = substring(a, 3) | eval f_l = locate(a, f_s); + +a:keyword | f_s:keyword | f_l:integer +🐱Meow!🐶Woof! | Meow!🐶Woof! | 3 +; + +locateNestedSubstring#[skip:-8.13.99,reason:new string function added in 8.14] +row a = "hello" | eval a_ll = substring(a, locate(a, "ll")); + +a:keyword | a_ll:keyword +hello | llo +; + +locateNestSubstring#[skip:-8.13.99,reason:new string function added in 8.14] +row a = "hello" | eval a_ll = locate(substring(a, 2), "ll"); + +a:keyword | a_ll:integer +hello | 2 +; + +locateStats#[skip:-8.13.99,reason:new string function added in 8.14] +from employees | where emp_no <= 10010 | eval f_l = locate(last_name, "ll") | stats min(f_l), max(f_l) by job_positions | sort job_positions | limit 5; + +min(f_l):integer | max(f_l):integer | job_positions:keyword +5 | 5 | Accountant +0 | 0 | Architect +0 | 0 | Head Human Resources +0 | 3 | Internship +3 | 3 | Junior Developer +; + +locateWarnings#[skip:-8.13.99,reason:new string function added in 8.14] +required_feature: esql.mv_warn + +from hosts | where host=="epsilon" | eval l1 = locate(host_group, "ate"), l2 = locate(description, "ate") | keep l1, l2; +ignoreOrder:true +warning:Line 1:80: evaluation of [locate(description, \"ate\")] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:80: java.lang.IllegalArgumentException: single-value function encountered multi-value + +l1:integer | l2:integer +2 | null +2 | null +null | 0 +; diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateEvaluator.java new file mode 100644 index 0000000000000..24055ad44f624 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateEvaluator.java @@ -0,0 +1,166 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Locate}. + * This class is generated. Do not edit it. + */ +public final class LocateEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator str; + + private final EvalOperator.ExpressionEvaluator substr; + + private final EvalOperator.ExpressionEvaluator start; + + private final DriverContext driverContext; + + public LocateEvaluator(Source source, EvalOperator.ExpressionEvaluator str, + EvalOperator.ExpressionEvaluator substr, EvalOperator.ExpressionEvaluator start, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.str = str; + this.substr = substr; + this.start = start; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock strBlock = (BytesRefBlock) str.eval(page)) { + try (BytesRefBlock substrBlock = (BytesRefBlock) substr.eval(page)) { + try (IntBlock startBlock = (IntBlock) start.eval(page)) { + BytesRefVector strVector = strBlock.asVector(); + if (strVector == null) { + return eval(page.getPositionCount(), strBlock, substrBlock, startBlock); + } + BytesRefVector substrVector = substrBlock.asVector(); + if (substrVector == null) { + return eval(page.getPositionCount(), strBlock, substrBlock, startBlock); + } + IntVector startVector = startBlock.asVector(); + if (startVector == null) { + return eval(page.getPositionCount(), strBlock, substrBlock, startBlock); + } + return eval(page.getPositionCount(), strVector, substrVector, startVector).asBlock(); + } + } + } + } + + public IntBlock eval(int positionCount, BytesRefBlock strBlock, BytesRefBlock substrBlock, + IntBlock startBlock) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + BytesRef strScratch = new BytesRef(); + BytesRef substrScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (strBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (strBlock.getValueCount(p) != 1) { + if (strBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (substrBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (substrBlock.getValueCount(p) != 1) { + if (substrBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (startBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (startBlock.getValueCount(p) != 1) { + if (startBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendInt(Locate.process(strBlock.getBytesRef(strBlock.getFirstValueIndex(p), strScratch), substrBlock.getBytesRef(substrBlock.getFirstValueIndex(p), substrScratch), startBlock.getInt(startBlock.getFirstValueIndex(p)))); + } + return result.build(); + } + } + + public IntVector eval(int positionCount, BytesRefVector strVector, BytesRefVector substrVector, + IntVector startVector) { + try(IntVector.Builder result = driverContext.blockFactory().newIntVectorBuilder(positionCount)) { + BytesRef strScratch = new BytesRef(); + BytesRef substrScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + result.appendInt(Locate.process(strVector.getBytesRef(p, strScratch), substrVector.getBytesRef(p, substrScratch), startVector.getInt(p))); + } + return result.build(); + } + } + + @Override + public String toString() { + return "LocateEvaluator[" + "str=" + str + ", substr=" + substr + ", start=" + start + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(str, substr, start); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory str; + + private final EvalOperator.ExpressionEvaluator.Factory substr; + + private final EvalOperator.ExpressionEvaluator.Factory start; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory str, + EvalOperator.ExpressionEvaluator.Factory substr, + EvalOperator.ExpressionEvaluator.Factory start) { + this.source = source; + this.str = str; + this.substr = substr; + this.start = start; + } + + @Override + public LocateEvaluator get(DriverContext context) { + return new LocateEvaluator(source, str.get(context), substr.get(context), start.get(context), context); + } + + @Override + public String toString() { + return "LocateEvaluator[" + "str=" + str + ", substr=" + substr + ", start=" + start + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index 1a27c7b69c1e6..3db7ae3cac7b5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -90,6 +90,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.string.LTrim; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Left; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Length; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Locate; import org.elasticsearch.xpack.esql.expression.function.scalar.string.RTrim; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Replace; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Right; @@ -174,7 +175,8 @@ private FunctionDefinition[][] functions() { def(StartsWith.class, StartsWith::new, "starts_with"), def(EndsWith.class, EndsWith::new, "ends_with"), def(ToLower.class, ToLower::new, "to_lower"), - def(ToUpper.class, ToUpper::new, "to_upper") }, + def(ToUpper.class, ToUpper::new, "to_upper"), + def(Locate.class, Locate::new, "locate") }, // date new FunctionDefinition[] { def(DateDiff.class, DateDiff::new, "date_diff"), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Locate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Locate.java new file mode 100644 index 0000000000000..a1157fad6c46f --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Locate.java @@ -0,0 +1,140 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.UnicodeUtil; +import org.elasticsearch.compute.ann.Evaluator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.function.OptionalArgument; +import org.elasticsearch.xpack.ql.tree.NodeInfo; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.util.Arrays; +import java.util.List; +import java.util.function.Function; + +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.SECOND; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.THIRD; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isInteger; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isString; + +/** + * Locate function, given a string 'a' and a substring 'b', it returns the index of the first occurrence of the substring 'b' in 'a'. + */ +public class Locate extends EsqlScalarFunction implements OptionalArgument { + + private final Expression str; + private final Expression substr; + private final Expression start; + + @FunctionInfo( + returnType = "integer", + description = "Returns an integer that indicates the position of a keyword substring within another string" + ) + public Locate( + Source source, + @Param(name = "string", type = { "keyword", "text" }, description = "An input string") Expression str, + @Param( + name = "substring", + type = { "keyword", "text" }, + description = "A substring to locate in the input string" + ) Expression substr, + @Param(optional = true, name = "start", type = { "integer" }, description = "The start index") Expression start + ) { + super(source, start == null ? Arrays.asList(str, substr) : Arrays.asList(str, substr, start)); + this.str = str; + this.substr = substr; + this.start = start; + } + + @Override + public DataType dataType() { + return DataTypes.INTEGER; + } + + @Override + protected TypeResolution resolveType() { + if (childrenResolved() == false) { + return new TypeResolution("Unresolved children"); + } + + TypeResolution resolution = isString(str, sourceText(), FIRST); + if (resolution.unresolved()) { + return resolution; + } + resolution = isString(substr, sourceText(), SECOND); + if (resolution.unresolved()) { + return resolution; + } + + return start == null ? TypeResolution.TYPE_RESOLVED : isInteger(start, sourceText(), THIRD); + } + + @Override + public boolean foldable() { + return str.foldable() && substr.foldable() && (start == null || start.foldable()); + } + + @Evaluator + static int process(BytesRef str, BytesRef substr, int start) { + if (str == null || substr == null || str.length < substr.length) { + return 0; + } + int codePointCount = UnicodeUtil.codePointCount(str); + int indexStart = indexStart(codePointCount, start); + String utf8ToString = str.utf8ToString(); + return 1 + utf8ToString.indexOf(substr.utf8ToString(), utf8ToString.offsetByCodePoints(0, indexStart)); + } + + @Evaluator(extraName = "NoStart") + static int process(BytesRef str, BytesRef substr) { + return process(str, substr, 0); + } + + private static int indexStart(int codePointCount, int start) { + // esql is 1-based when it comes to string manipulation. We treat start = 0 and 1 the same + // a negative value is relative to the end of the string + int indexStart; + if (start > 0) { + indexStart = start - 1; + } else if (start < 0) { + indexStart = codePointCount + start; // start is negative, so this is a subtraction + } else { + indexStart = start; // start == 0 + } + return Math.min(Math.max(0, indexStart), codePointCount); // sanitise string start index + } + + @Override + public Expression replaceChildren(List newChildren) { + return new Locate(source(), newChildren.get(0), newChildren.get(1), start == null ? null : newChildren.get(2)); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, Locate::new, str, substr, start); + } + + @Override + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + ExpressionEvaluator.Factory strExpr = toEvaluator.apply(str); + ExpressionEvaluator.Factory substrExpr = toEvaluator.apply(substr); + if (start == null) { + return new LocateNoStartEvaluator.Factory(source(), strExpr, substrExpr); + } + return new LocateEvaluator.Factory(source(), strExpr, substrExpr, toEvaluator.apply(start)); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java index a85ddac532241..4640f1a7168c0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java @@ -113,6 +113,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.string.LTrim; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Left; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Length; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Locate; import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike; import org.elasticsearch.xpack.esql.expression.function.scalar.string.RTrim; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Replace; @@ -400,6 +401,7 @@ public static List namedTypeEntries() { of(ScalarFunction.class, SpatialContains.class, PlanNamedTypes::writeSpatialRelatesFunction, PlanNamedTypes::readContains), of(ScalarFunction.class, SpatialWithin.class, PlanNamedTypes::writeSpatialRelatesFunction, PlanNamedTypes::readWithin), of(ScalarFunction.class, Substring.class, PlanNamedTypes::writeSubstring, PlanNamedTypes::readSubstring), + of(ScalarFunction.class, Locate.class, PlanNamedTypes::writeLocate, PlanNamedTypes::readLocate), of(ScalarFunction.class, Left.class, PlanNamedTypes::writeLeft, PlanNamedTypes::readLeft), of(ScalarFunction.class, Right.class, PlanNamedTypes::writeRight, PlanNamedTypes::readRight), of(ScalarFunction.class, Split.class, PlanNamedTypes::writeSplit, PlanNamedTypes::readSplit), @@ -1592,6 +1594,19 @@ static void writeSubstring(PlanStreamOutput out, Substring substring) throws IOE out.writeOptionalWriteable(fields.size() == 3 ? o -> out.writeExpression(fields.get(2)) : null); } + static Locate readLocate(PlanStreamInput in) throws IOException { + return new Locate(in.readSource(), in.readExpression(), in.readExpression(), in.readOptionalNamed(Expression.class)); + } + + static void writeLocate(PlanStreamOutput out, Locate locate) throws IOException { + out.writeSource(locate.source()); + List fields = locate.children(); + assert fields.size() == 2 || fields.size() == 3; + out.writeExpression(fields.get(0)); + out.writeExpression(fields.get(1)); + out.writeOptionalWriteable(fields.size() == 3 ? o -> out.writeExpression(fields.get(2)) : null); + } + static Replace readReplace(PlanStreamInput in) throws IOException { return new Replace(Source.EMPTY, in.readExpression(), in.readExpression(), in.readExpression()); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java new file mode 100644 index 0000000000000..c1d3df53ece60 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java @@ -0,0 +1,175 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.Literal; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.Supplier; + +import static org.elasticsearch.compute.data.BlockUtils.toJavaObject; +import static org.hamcrest.Matchers.equalTo; + +/** + * Tests for {@link Locate} function. + */ +public class LocateTests extends AbstractFunctionTestCase { + public LocateTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + List suppliers = new ArrayList<>(); + suppliers.add( + supplier( + "keywords", + DataTypes.KEYWORD, + DataTypes.KEYWORD, + () -> randomRealisticUnicodeOfCodepointLength(10), + () -> randomRealisticUnicodeOfCodepointLength(2), + () -> 0 + ) + ); + suppliers.add( + supplier( + "mixed keyword, text", + DataTypes.KEYWORD, + DataTypes.TEXT, + () -> randomRealisticUnicodeOfCodepointLength(10), + () -> randomRealisticUnicodeOfCodepointLength(2), + () -> 0 + ) + ); + suppliers.add( + supplier( + "texts", + DataTypes.TEXT, + DataTypes.TEXT, + () -> randomRealisticUnicodeOfCodepointLength(10), + () -> randomRealisticUnicodeOfCodepointLength(2), + () -> 0 + ) + ); + suppliers.add( + supplier( + "mixed text, keyword", + DataTypes.TEXT, + DataTypes.KEYWORD, + () -> randomRealisticUnicodeOfCodepointLength(10), + () -> randomRealisticUnicodeOfCodepointLength(2), + () -> 0 + ) + ); + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); + } + + public void testToString() { + assertThat( + evaluator( + new Locate( + Source.EMPTY, + field("str", DataTypes.KEYWORD), + field("substr", DataTypes.KEYWORD), + field("start", DataTypes.INTEGER) + ) + ).get(driverContext()).toString(), + equalTo("LocateEvaluator[str=Attribute[channel=0], substr=Attribute[channel=1], start=Attribute[channel=2]]") + ); + } + + @Override + protected Expression build(Source source, List args) { + return new Locate(source, args.get(0), args.get(1), args.size() < 3 ? null : args.get(2)); + } + + public void testPrefixString() { + assertThat(process("a tiger", "a t", 0), equalTo(1)); + assertThat(process("a tiger", "a", 0), equalTo(1)); + assertThat(process("界世", "界", 0), equalTo(1)); + } + + public void testSuffixString() { + assertThat(process("a tiger", "er", 0), equalTo(6)); + assertThat(process("a tiger", "r", 0), equalTo(7)); + assertThat(process("世界", "界", 0), equalTo(2)); + } + + public void testMidString() { + assertThat(process("a tiger", "ti", 0), equalTo(3)); + assertThat(process("a tiger", "ige", 0), equalTo(4)); + assertThat(process("世界世", "界", 0), equalTo(2)); + } + + public void testOutOfRange() { + assertThat(process("a tiger", "tigers", 0), equalTo(0)); + assertThat(process("a tiger", "ipa", 0), equalTo(0)); + assertThat(process("世界世", "\uD83C\uDF0D", 0), equalTo(0)); + } + + public void testExactString() { + assertThat(process("a tiger", "a tiger", 0), equalTo(1)); + assertThat(process("tigers", "tigers", 0), equalTo(1)); + assertThat(process("界世", "界世", 0), equalTo(1)); + } + + private Integer process(String str, String substr, Integer start) { + try ( + EvalOperator.ExpressionEvaluator eval = evaluator( + new Locate( + Source.EMPTY, + field("str", DataTypes.KEYWORD), + field("substr", DataTypes.KEYWORD), + new Literal(Source.EMPTY, start, DataTypes.INTEGER) + ) + ).get(driverContext()); + Block block = eval.eval(row(List.of(new BytesRef(str), new BytesRef(substr)))) + ) { + return block.isNull(0) ? Integer.valueOf(0) : ((Integer) toJavaObject(block, 0)); + } + } + + private static TestCaseSupplier supplier( + String name, + DataType firstType, + DataType secondType, + Supplier strValueSupplier, + Supplier substrValueSupplier, + Supplier startSupplier + ) { + return new TestCaseSupplier(name, List.of(firstType, secondType), () -> { + List values = new ArrayList<>(); + String expectedToString = "LocateEvaluator[str=Attribute[channel=0], substr=Attribute[channel=1], start=Attribute[channel=2]]"; + + String value = strValueSupplier.get(); + values.add(new TestCaseSupplier.TypedData(new BytesRef(value), firstType, "0")); + + String substrValue = substrValueSupplier.get(); + values.add(new TestCaseSupplier.TypedData(new BytesRef(substrValue), secondType, "1")); + + Integer startValue = startSupplier.get(); + values.add(new TestCaseSupplier.TypedData(startValue, DataTypes.INTEGER, "2")); + + int expectedValue = 1 + value.indexOf(substrValue); + return new TestCaseSupplier.TestCase(values, expectedToString, DataTypes.INTEGER, equalTo(expectedValue)); + }); + } +} From 2cfcefba9c39e7927ff672ffb0c6ec38a138a34f Mon Sep 17 00:00:00 2001 From: Moritz Mack Date: Fri, 5 Apr 2024 15:46:22 +0200 Subject: [PATCH 146/264] Update 8.13 known issues with JDK 22 bug / recommendation to downgrade (#107156) Update 8.13 known issues with JDK 22 bug / recommendation to downgrade. I'll follow up adding this to 8.13.1 as well once backported. --- docs/reference/release-notes/8.13.0.asciidoc | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/reference/release-notes/8.13.0.asciidoc b/docs/reference/release-notes/8.13.0.asciidoc index 47855773d0543..ed3c159386a8a 100644 --- a/docs/reference/release-notes/8.13.0.asciidoc +++ b/docs/reference/release-notes/8.13.0.asciidoc @@ -7,6 +7,9 @@ Also see <>. [float] === Known issues +* Due to a bug in the bundled JDK 22 nodes might crash abruptly under high memory pressure. + We recommend https://www.elastic.co/guide/en/elasticsearch/reference/current/install-elasticsearch.html#jvm-version[downgrading to JDK 21.0.2] asap to mitigate the issue. + * Nodes upgraded to 8.13.0 fail to load downsampling persistent tasks. This prevents them from joining the cluster, blocking its upgrade (issue: {es-issue}106880[#106880]) + This affects clusters running version 8.10 or later, with an active downsampling From cd6af63022b159c837d4a9ea03699c68217c938d Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Fri, 5 Apr 2024 16:25:42 +0200 Subject: [PATCH 147/264] [Profiling] Annotate TODOs for 9.0.0 upgrade (#107150) As suggested by David in #106592, we're adding the annotation `@UpdateForV9` to all places in the Universal Profiling plugin that can be removed once we move to 9.0.0. Closes #106592 Closes #106593 Closes #106596 Closes #106597 Closes #106681 --- .../java/org/elasticsearch/xpack/profiling/CO2Calculator.java | 3 +++ .../elasticsearch/xpack/profiling/GetFlamegraphResponse.java | 3 +++ .../elasticsearch/xpack/profiling/GetStackTracesResponse.java | 3 +++ .../java/org/elasticsearch/xpack/profiling/HostMetadata.java | 2 ++ .../java/org/elasticsearch/xpack/profiling/InstanceType.java | 2 ++ 5 files changed, 13 insertions(+) diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java index d69178f158a88..fcdc116cab725 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.profiling; +import org.elasticsearch.core.UpdateForV9; + import java.util.Map; final class CO2Calculator { @@ -52,6 +54,7 @@ public double getAnnualCO2Tons(String hostID, long samples) { return getKiloWattsPerCore(host) * getCO2TonsPerKWH(host) * annualCoreHours * getDatacenterPUE(host); } + @UpdateForV9 // only allow OTEL semantic conventions private double getKiloWattsPerCore(HostMetadata host) { return switch (host.hostArchitecture) { // For the OTEL donation of the profiling agent, we switch to OTEL semantic conventions, diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphResponse.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphResponse.java index 65b342abddd9d..c851b372cb2db 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphResponse.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphResponse.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.xcontent.ToXContent; import java.io.IOException; @@ -24,7 +25,9 @@ public class GetFlamegraphResponse extends ActionResponse implements ChunkedToXC private final int size; private final double samplingRate; private final long selfCPU; + @UpdateForV9 // remove this field - it is unused in Kibana private final long totalCPU; + @UpdateForV9 // remove this field - it is unused in Kibana private final long totalSamples; private final List> edges; private final List fileIds; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesResponse.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesResponse.java index 89c0b4ab6b0fb..4cad1104f783b 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesResponse.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesResponse.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.xcontent.ToXContent; import java.util.Collections; @@ -29,8 +30,10 @@ public class GetStackTracesResponse extends ActionResponse implements ChunkedToX private final Map stackFrames; @Nullable private final Map executables; + @UpdateForV9 // remove this field - it is unused in Kibana @Nullable private final Map stackTraceEvents; + @UpdateForV9 // remove this field - it is unused in Kibana private final int totalFrames; private final double samplingRate; private final long totalSamples; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/HostMetadata.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/HostMetadata.java index e1e3e27e951bf..aae6615114f43 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/HostMetadata.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/HostMetadata.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.profiling; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -30,6 +31,7 @@ final class HostMetadata implements ToXContentObject { this.profilingNumCores = profilingNumCores != null ? profilingNumCores : DEFAULT_PROFILING_NUM_CORES; } + @UpdateForV9 // remove fallback to the "profiling.host.machine" field and remove it from the component template "profiling-hosts". public static HostMetadata fromSource(Map source) { if (source != null) { String hostID = (String) source.get("host.id"); diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/InstanceType.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/InstanceType.java index 3aa0a79df13bc..d694ffd2cbebc 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/InstanceType.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/InstanceType.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.profiling; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -73,6 +74,7 @@ public static InstanceType fromHostSource(Map source) { return new InstanceType(provider, region, null); } + @UpdateForV9 // remove this method private static InstanceType fromObsoleteHostSource(Map source) { // Check and handle AWS. String region = (String) source.get("ec2.placement.region"); From 95c7c0978020de5bac685802655bfab3f475e628 Mon Sep 17 00:00:00 2001 From: Chris Hegarty <62058229+ChrisHegarty@users.noreply.github.com> Date: Fri, 5 Apr 2024 15:32:16 +0100 Subject: [PATCH 148/264] Downgrade the bundled JDK to JDK 21.0.2 (#107137) This commit downgrades the bundled JDK to JDK 21.0.2. --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 26 ++++++++++++------------- 2 files changed, 14 insertions(+), 14 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index a2e8651810042..0883097e75aad 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -2,7 +2,7 @@ elasticsearch = 8.14.0 lucene = 9.10.0 bundled_jdk_vendor = openjdk -bundled_jdk = 22+36@830ec9fcccef480bb3e73fb7ecafe059 +bundled_jdk = 21.0.2+13@f2283984656d49d69e91c558476027ac # optional dependencies spatial4j = 0.7 jts = 1.15.0 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 7e2e781d3ce62..8978274e6df95 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -1694,25 +1694,25 @@ - - - + + + - - + + - - - + + + - - + + - - - + + + From 667a0609714ef429172096f9174e6625713706ae Mon Sep 17 00:00:00 2001 From: Moritz Mack Date: Fri, 5 Apr 2024 17:09:03 +0200 Subject: [PATCH 149/264] Fix link in 8.13 release notes. (#107161) Use id for link instead. Relates to https://github.com/elastic/elasticsearch/pull/107159 --- docs/reference/release-notes/8.13.0.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/release-notes/8.13.0.asciidoc b/docs/reference/release-notes/8.13.0.asciidoc index ed3c159386a8a..99ee4e5fb86e1 100644 --- a/docs/reference/release-notes/8.13.0.asciidoc +++ b/docs/reference/release-notes/8.13.0.asciidoc @@ -8,7 +8,7 @@ Also see <>. === Known issues * Due to a bug in the bundled JDK 22 nodes might crash abruptly under high memory pressure. - We recommend https://www.elastic.co/guide/en/elasticsearch/reference/current/install-elasticsearch.html#jvm-version[downgrading to JDK 21.0.2] asap to mitigate the issue. + We recommend <> asap to mitigate the issue. * Nodes upgraded to 8.13.0 fail to load downsampling persistent tasks. This prevents them from joining the cluster, blocking its upgrade (issue: {es-issue}106880[#106880]) + From da8151023fe6fa672184fde30b062a0a949d59d3 Mon Sep 17 00:00:00 2001 From: Parker Timmins Date: Fri, 5 Apr 2024 09:50:28 -0600 Subject: [PATCH 150/264] GET /_all should return hidden indices with visible aliases (#106975) GET /_all should return hidden indices if they are accessible through a visible alias. This is currently the behavior when resolution occurs in the security layer. This changes adds this behavior to name resolution when security is not used. --- docs/changelog/106975.yaml | 5 + .../metadata/IndexNameExpressionResolver.java | 48 +++---- .../IndexNameExpressionResolverTests.java | 4 +- .../WildcardExpressionResolverTests.java | 123 ++++++++++++++++++ 4 files changed, 156 insertions(+), 24 deletions(-) create mode 100644 docs/changelog/106975.yaml diff --git a/docs/changelog/106975.yaml b/docs/changelog/106975.yaml new file mode 100644 index 0000000000000..bd32b3574c4f9 --- /dev/null +++ b/docs/changelog/106975.yaml @@ -0,0 +1,5 @@ +pr: 106975 +summary: GET /_all should return hidden indices with visible aliases +area: Indices APIs +type: bug +issues: [] diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java index e8e8ca767cc34..b88292d4ed79b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java @@ -1246,32 +1246,36 @@ private WildcardExpressionResolver() { } /** - * Returns all the indices and all the datastreams, considering the open/closed, system, and hidden context parameters. + * Returns all the indices, datastreams, and aliases, considering the open/closed, system, and hidden context parameters. * Depending on the context, returns the names of the datastreams themselves or their backing indices. */ public static Collection resolveAll(Context context) { - List resolvedExpressions = resolveEmptyOrTrivialWildcard(context); - if (context.includeDataStreams() == false) { - return resolvedExpressions; - } else { - Stream dataStreamsAbstractions = context.getState() - .metadata() - .getIndicesLookup() - .values() - .stream() - .filter(indexAbstraction -> indexAbstraction.getType() == Type.DATA_STREAM) - .filter( - indexAbstraction -> indexAbstraction.isSystem() == false - || context.systemIndexAccessPredicate.test(indexAbstraction.getName()) - ); - if (context.getOptions().expandWildcardsHidden() == false) { - dataStreamsAbstractions = dataStreamsAbstractions.filter(indexAbstraction -> indexAbstraction.isHidden() == false); - } - // dedup backing indices if expand hidden indices option is true - Set resolvedIncludingDataStreams = expandToOpenClosed(context, dataStreamsAbstractions).collect(Collectors.toSet()); - resolvedIncludingDataStreams.addAll(resolvedExpressions); - return resolvedIncludingDataStreams; + List concreteIndices = resolveEmptyOrTrivialWildcard(context); + + if (context.includeDataStreams() == false && context.getOptions().ignoreAliases()) { + return concreteIndices; } + + Stream ias = context.getState() + .metadata() + .getIndicesLookup() + .values() + .stream() + .filter(ia -> context.getOptions().expandWildcardsHidden() || ia.isHidden() == false) + .filter(ia -> shouldIncludeIfDataStream(ia, context) || shouldIncludeIfAlias(ia, context)) + .filter(ia -> ia.isSystem() == false || context.systemIndexAccessPredicate.test(ia.getName())); + + Set resolved = expandToOpenClosed(context, ias).collect(Collectors.toSet()); + resolved.addAll(concreteIndices); + return resolved; + } + + private static boolean shouldIncludeIfDataStream(IndexAbstraction ia, IndexNameExpressionResolver.Context context) { + return context.includeDataStreams() && ia.getType() == Type.DATA_STREAM; + } + + private static boolean shouldIncludeIfAlias(IndexAbstraction ia, IndexNameExpressionResolver.Context context) { + return context.getOptions().ignoreAliases() == false && ia.getType() == Type.ALIAS; } /** diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java index a1eeceba8a390..2fba37772ef94 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java @@ -1217,9 +1217,9 @@ public void testHiddenAliasAndHiddenIndexResolution() { indexNames = indexNameExpressionResolver.concreteIndexNames(state, includeHiddenOptions, visibleAlias); assertThat(Arrays.asList(indexNames), containsInAnyOrder(visibleIndex, hiddenIndex)); - // A total wildcards does not resolve the hidden index in this case + // total wildcards should also resolve both visible and hidden indices if there is a visible alias indexNames = indexNameExpressionResolver.concreteIndexNames(state, excludeHiddenOptions, "*"); - assertThat(Arrays.asList(indexNames), containsInAnyOrder(visibleIndex)); + assertThat(Arrays.asList(indexNames), containsInAnyOrder(visibleIndex, hiddenIndex)); } { diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java index 2406eb8e76ab9..9980e1b27e48c 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java @@ -280,6 +280,129 @@ public void testAll() { assertThat(IndexNameExpressionResolver.resolveExpressions(noExpandContext, "_all").size(), equalTo(0)); } + public void testAllAliases() { + { + // hidden index with hidden alias should not be returned + Metadata.Builder mdBuilder = Metadata.builder() + .put( + indexBuilder("index-hidden-alias", true) // index hidden + .state(State.OPEN) + .putAlias(AliasMetadata.builder("alias-hidden").isHidden(true)) // alias hidden + ); + + ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); + + IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context( + state, + IndicesOptions.lenientExpandOpen(), // don't include hidden + SystemIndexAccessLevel.NONE + ); + assertThat(newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context)), equalTo(newHashSet())); + } + + { + // hidden index with visible alias should be returned + Metadata.Builder mdBuilder = Metadata.builder() + .put( + indexBuilder("index-visible-alias", true) // index hidden + .state(State.OPEN) + .putAlias(AliasMetadata.builder("alias-visible").isHidden(false)) // alias visible + ); + + ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); + + IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context( + state, + IndicesOptions.lenientExpandOpen(), // don't include hidden + SystemIndexAccessLevel.NONE + ); + assertThat( + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context)), + equalTo(newHashSet("index-visible-alias")) + ); + } + } + + public void testAllDataStreams() { + + String dataStreamName = "foo_logs"; + long epochMillis = randomLongBetween(1580536800000L, 1583042400000L); + IndexMetadata firstBackingIndexMetadata = createBackingIndex(dataStreamName, 1, epochMillis).build(); + + IndicesOptions indicesAndAliasesOptions = IndicesOptions.fromOptions( + randomBoolean(), + randomBoolean(), + true, + false, + true, + false, + false, + false + ); + + { + // visible data streams should be returned by _all even show backing indices are hidden + Metadata.Builder mdBuilder = Metadata.builder() + .put(firstBackingIndexMetadata, true) + .put(DataStreamTestHelper.newInstance(dataStreamName, List.of(firstBackingIndexMetadata.getIndex()))); + + ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); + + IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context( + state, + indicesAndAliasesOptions, + false, + false, + true, + SystemIndexAccessLevel.NONE, + NONE, + NONE + ); + + assertThat( + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context)), + equalTo(newHashSet(DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis))) + ); + } + + { + // if data stream itself is hidden, backing indices should not be returned + boolean hidden = true; + var dataStream = new DataStream( + dataStreamName, + List.of(firstBackingIndexMetadata.getIndex()), + 1, + null, + hidden, + false, + false, + false, + null, + null, + false, + List.of(), + null + ); + + Metadata.Builder mdBuilder = Metadata.builder().put(firstBackingIndexMetadata, true).put(dataStream); + + ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); + + IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context( + state, + indicesAndAliasesOptions, + false, + false, + true, + SystemIndexAccessLevel.NONE, + NONE, + NONE + ); + + assertThat(newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context)), equalTo(newHashSet())); + } + } + public void testResolveEmpty() { Metadata.Builder mdBuilder = Metadata.builder() .put( From e8747a6f48e0ab695a11bfe534b726da1862077c Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Fri, 5 Apr 2024 09:08:17 -0700 Subject: [PATCH 151/264] Always enable fast path for load values from single segment (#106977) I've been looking to simplify the execution of the enrich lookup. There are several issues we need to address in the enrich process. One of the problems is that we currently perform lookup and extract enrich fields term by term. To ensure that these incoming changes don't degrade performance, we need to enable a fast path for a single segment when the document IDs are not sorted. --- .../elasticsearch/compute/data/DocVector.java | 76 ++++++++++++------ .../lucene/ValuesSourceReaderOperator.java | 77 ++++++++++++------- 2 files changed, 102 insertions(+), 51 deletions(-) diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java index 9893ea1826945..2404217d11f95 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java @@ -84,6 +84,10 @@ public boolean singleSegmentNonDecreasing() { return singleSegmentNonDecreasing; } + public boolean singleSegment() { + return shards.isConstant() && segments.isConstant(); + } + private boolean checkIfSingleSegmentNonDecreasing() { if (getPositionCount() < 2) { return true; @@ -138,35 +142,57 @@ private void buildShardSegmentDocMapIfMissing() { for (int p = 0; p < forwards.length; p++) { forwards[p] = p; } - new IntroSorter() { - int pivot; - - @Override - protected void setPivot(int i) { - pivot = finalForwards[i]; - } - - @Override - protected int comparePivot(int j) { - int cmp = Integer.compare(shards.getInt(pivot), shards.getInt(finalForwards[j])); - if (cmp != 0) { - return cmp; + if (singleSegment()) { + new IntroSorter() { + int pivot; + + @Override + protected void setPivot(int i) { + pivot = finalForwards[i]; + } + + @Override + protected int comparePivot(int j) { + return Integer.compare(docs.getInt(pivot), docs.getInt(finalForwards[j])); } - cmp = Integer.compare(segments.getInt(pivot), segments.getInt(finalForwards[j])); - if (cmp != 0) { - return cmp; + + @Override + protected void swap(int i, int j) { + int tmp = finalForwards[i]; + finalForwards[i] = finalForwards[j]; + finalForwards[j] = tmp; + } + }.sort(0, forwards.length); + } else { + new IntroSorter() { + int pivot; + + @Override + protected void setPivot(int i) { + pivot = finalForwards[i]; } - return Integer.compare(docs.getInt(pivot), docs.getInt(finalForwards[j])); - } - @Override - protected void swap(int i, int j) { - int tmp = finalForwards[i]; - finalForwards[i] = finalForwards[j]; - finalForwards[j] = tmp; - } - }.sort(0, forwards.length); + @Override + protected int comparePivot(int j) { + int cmp = Integer.compare(shards.getInt(pivot), shards.getInt(finalForwards[j])); + if (cmp != 0) { + return cmp; + } + cmp = Integer.compare(segments.getInt(pivot), segments.getInt(finalForwards[j])); + if (cmp != 0) { + return cmp; + } + return Integer.compare(docs.getInt(pivot), docs.getInt(finalForwards[j])); + } + @Override + protected void swap(int i, int j) { + int tmp = finalForwards[i]; + finalForwards[i] = finalForwards[j]; + finalForwards[j] = tmp; + } + }.sort(0, forwards.length); + } backwards = new int[forwards.length]; for (int p = 0; p < forwards.length; p++) { backwards[forwards[p]] = p; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperator.java index 08be21f95786f..eab2a314b2074 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperator.java @@ -137,7 +137,22 @@ protected Page process(Page page) { boolean success = false; try { if (docVector.singleSegmentNonDecreasing()) { - loadFromSingleLeaf(blocks, docVector); + IntVector docs = docVector.docs(); + int shard = docVector.shards().getInt(0); + int segment = docVector.segments().getInt(0); + loadFromSingleLeaf(blocks, shard, segment, new BlockLoader.Docs() { + @Override + public int count() { + return docs.getPositionCount(); + } + + @Override + public int get(int i) { + return docs.getInt(i); + } + }); + } else if (docVector.singleSegment()) { + loadFromSingleLeafUnsorted(blocks, docVector); } else { try (LoadFromMany many = new LoadFromMany(blocks, docVector)) { many.run(); @@ -200,38 +215,24 @@ private boolean positionFieldWorkDocGuarteedAscending(int shard, int segment) { return true; } - private void loadFromSingleLeaf(Block[] blocks, DocVector docVector) throws IOException { - int shard = docVector.shards().getInt(0); - int segment = docVector.segments().getInt(0); - int firstDoc = docVector.docs().getInt(0); + private void loadFromSingleLeaf(Block[] blocks, int shard, int segment, BlockLoader.Docs docs) throws IOException { + int firstDoc = docs.get(0); positionFieldWork(shard, segment, firstDoc); - IntVector docs = docVector.docs(); - BlockLoader.Docs loaderDocs = new BlockLoader.Docs() { - @Override - public int count() { - return docs.getPositionCount(); - } - - @Override - public int get(int i) { - return docs.getInt(i); - } - }; StoredFieldsSpec storedFieldsSpec = StoredFieldsSpec.NO_REQUIREMENTS; List rowStrideReaders = new ArrayList<>(fields.length); - ComputeBlockLoaderFactory loaderBlockFactory = new ComputeBlockLoaderFactory(blockFactory, docs.getPositionCount()); + ComputeBlockLoaderFactory loaderBlockFactory = new ComputeBlockLoaderFactory(blockFactory, docs.count()); LeafReaderContext ctx = ctx(shard, segment); try { for (int f = 0; f < fields.length; f++) { FieldWork field = fields[f]; BlockLoader.ColumnAtATimeReader columnAtATime = field.columnAtATime(ctx); if (columnAtATime != null) { - blocks[f] = (Block) columnAtATime.read(loaderBlockFactory, loaderDocs); + blocks[f] = (Block) columnAtATime.read(loaderBlockFactory, docs); } else { rowStrideReaders.add( new RowStrideReaderWork( field.rowStride(ctx), - (Block.Builder) field.loader.builder(loaderBlockFactory, docs.getPositionCount()), + (Block.Builder) field.loader.builder(loaderBlockFactory, docs.count()), f ) ); @@ -248,7 +249,7 @@ public int get(int i) { ); } StoredFieldLoader storedFieldLoader; - if (useSequentialStoredFieldsReader(docVector.docs())) { + if (useSequentialStoredFieldsReader(docs)) { storedFieldLoader = StoredFieldLoader.fromSpecSequential(storedFieldsSpec); trackStoredFields(storedFieldsSpec, true); } else { @@ -259,8 +260,8 @@ public int get(int i) { storedFieldLoader.getLoader(ctx, null), storedFieldsSpec.requiresSource() ? shardContexts.get(shard).newSourceLoader.get().leaf(ctx.reader(), null) : null ); - for (int p = 0; p < docs.getPositionCount(); p++) { - int doc = docs.getInt(p); + for (int p = 0; p < docs.count(); p++) { + int doc = docs.get(p); if (storedFields != null) { storedFields.advanceTo(doc); } @@ -278,6 +279,30 @@ public int get(int i) { } } + private void loadFromSingleLeafUnsorted(Block[] blocks, DocVector docVector) throws IOException { + IntVector docs = docVector.docs(); + int[] forwards = docVector.shardSegmentDocMapForwards(); + int shard = docVector.shards().getInt(0); + int segment = docVector.segments().getInt(0); + loadFromSingleLeaf(blocks, shard, segment, new BlockLoader.Docs() { + @Override + public int count() { + return docs.getPositionCount(); + } + + @Override + public int get(int i) { + return docs.getInt(forwards[i]); + } + }); + final int[] backwards = docVector.shardSegmentDocMapBackwards(); + for (int i = 0; i < blocks.length; i++) { + Block in = blocks[i]; + blocks[i] = in.filter(backwards); + in.close(); + } + } + private class LoadFromMany implements Releasable { private final Block[] target; private final IntVector shards; @@ -371,9 +396,9 @@ public void close() { * Is it more efficient to use a sequential stored field reader * when reading stored fields for the documents contained in {@code docIds}? */ - private boolean useSequentialStoredFieldsReader(IntVector docIds) { - return docIds.getPositionCount() >= SEQUENTIAL_BOUNDARY - && docIds.getInt(docIds.getPositionCount() - 1) - docIds.getInt(0) == docIds.getPositionCount() - 1; + private boolean useSequentialStoredFieldsReader(BlockLoader.Docs docs) { + int count = docs.count(); + return count >= SEQUENTIAL_BOUNDARY && docs.get(count - 1) - docs.get(0) == count - 1; } private void trackStoredFields(StoredFieldsSpec spec, boolean sequential) { From a9388e16addbb8146888e0cee68d7eea17a7013e Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Fri, 5 Apr 2024 20:16:39 +0300 Subject: [PATCH 152/264] ESQL: Fix bug when combining projections (#107131) Recursive aliases (eval x = 1, x1 = x) were not taken into account when combining projections causing the target field to be lost (and only the immediate intermediate named expression to be used instead which became invalid). Fix #107083 --- docs/changelog/107131.yaml | 6 ++ .../src/main/resources/stats.csv-spec | 34 ++++++++++ .../esql/optimizer/LogicalPlanOptimizer.java | 27 +++++--- .../optimizer/LogicalPlanOptimizerTests.java | 67 +++++++++++++++++++ 4 files changed, 123 insertions(+), 11 deletions(-) create mode 100644 docs/changelog/107131.yaml diff --git a/docs/changelog/107131.yaml b/docs/changelog/107131.yaml new file mode 100644 index 0000000000000..ebb696931777b --- /dev/null +++ b/docs/changelog/107131.yaml @@ -0,0 +1,6 @@ +pr: 107131 +summary: "ESQL: Fix bug when combining projections" +area: ES|QL +type: bug +issues: + - 107083 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index 70d5053c64c45..6ccaf1eb0b6e7 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -1550,3 +1550,37 @@ s2point1:d | s_mv:i | languages:i 2.1 | 3 | 5 2.1 | 3 | null ; + +evalOverridingKey +FROM employees +| EVAL k = languages +| STATS c = COUNT() BY languages, k +| DROP k +| SORT languages +; + +c:l| languages:i +15 | 1 +19 | 2 +17 | 3 +18 | 4 +21 | 5 +10 | null +; + +evalMultipleOverridingKeys#[skip:-8.13.99,reason:supported in 8.14] +FROM employees +| EVAL k = languages, k1 = k +| STATS c = COUNT() BY languages, k, k1, languages +| DROP k +| SORT languages +; + +c:l | k1:i | languages:i +15 | 1 | 1 +19 | 2 | 2 +17 | 3 | 3 +18 | 4 | 4 +21 | 5 | 5 +10 | null | null +; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java index d0375e0b50849..7fb2784bb044f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java @@ -403,11 +403,6 @@ private static List projectAggregations( List upperProjection, List lowerAggregations ) { - AttributeMap lowerAliases = new AttributeMap<>(); - for (NamedExpression ne : lowerAggregations) { - lowerAliases.put(ne.toAttribute(), Alias.unwrap(ne)); - } - AttributeSet seen = new AttributeSet(); for (NamedExpression upper : upperProjection) { Expression unwrapped = Alias.unwrap(upper); @@ -431,11 +426,18 @@ private static List combineProjections( List lower ) { - // collect aliases in the lower list - AttributeMap aliases = new AttributeMap<>(); + // collect named expressions declaration in the lower list + AttributeMap namedExpressions = new AttributeMap<>(); + // while also collecting the alias map for resolving the source (f1 = 1, f2 = f1, etc..) + AttributeMap aliases = new AttributeMap<>(); for (NamedExpression ne : lower) { - if ((ne instanceof Attribute) == false) { - aliases.put(ne.toAttribute(), ne); + // record the alias + aliases.put(ne.toAttribute(), Alias.unwrap(ne)); + + // record named expression as is + if (ne instanceof Alias as) { + Expression child = as.child(); + namedExpressions.put(ne.toAttribute(), as.replaceChild(aliases.resolve(child, child))); } } List replaced = new ArrayList<>(); @@ -443,7 +445,7 @@ private static List combineProjections( // replace any matching attribute with a lower alias (if there's a match) // but clean-up non-top aliases at the end for (NamedExpression ne : upper) { - NamedExpression replacedExp = (NamedExpression) ne.transformUp(Attribute.class, a -> aliases.resolve(a, a)); + NamedExpression replacedExp = (NamedExpression) ne.transformUp(Attribute.class, a -> namedExpressions.resolve(a, a)); replaced.add((NamedExpression) trimNonTopLevelAliases(replacedExp)); } return replaced; @@ -476,7 +478,10 @@ private List replacePrunedAliasesUsedInGroupBy( var newGroupings = new ArrayList(groupings.size()); for (Expression group : groupings) { - newGroupings.add(group.transformUp(Attribute.class, a -> removedAliases.resolve(a, a))); + var transformed = group.transformUp(Attribute.class, a -> removedAliases.resolve(a, a)); + if (Expressions.anyMatch(newGroupings, g -> Expressions.equalsAsAttribute(g, transformed)) == false) { + newGroupings.add(transformed); + } } return newGroupings; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 63c2a33543073..eb3901f37b99a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -324,6 +324,52 @@ public void testCombineProjections() { var relation = as(limit.child(), EsRelation.class); } + /** + * Expects + * Project[[languages{f}#12 AS f2]] + * \_Limit[1000[INTEGER]] + * \_EsRelation[test][_meta_field{f}#15, emp_no{f}#9, first_name{f}#10, g..] + */ + public void testCombineProjectionsWithEvalAndDrop() { + var plan = plan(""" + from test + | eval f1 = languages, f2 = f1 + | keep f2 + """); + + var keep = as(plan, Project.class); + assertThat(Expressions.names(keep.projections()), contains("f2")); + assertThat(Expressions.name(Alias.unwrap(keep.projections().get(0))), is("languages")); + var limit = as(keep.child(), Limit.class); + var relation = as(limit.child(), EsRelation.class); + + } + + /** + * Expects + * Project[[last_name{f}#26, languages{f}#25 AS f2, f4{r}#13]] + * \_Eval[[languages{f}#25 + 3[INTEGER] AS f4]] + * \_Limit[1000[INTEGER]] + * \_EsRelation[test][_meta_field{f}#28, emp_no{f}#22, first_name{f}#23, ..] + */ + public void testCombineProjectionsWithEval() { + var plan = plan(""" + from test + | eval f1 = languages, f2 = f1, f3 = 1 + 2, f4 = f3 + languages + | keep emp_no, *name, salary, f* + | drop f3 + | keep last_name, f2, f4 + """); + + var keep = as(plan, Project.class); + assertThat(Expressions.names(keep.projections()), contains("last_name", "f2", "f4")); + var eval = as(keep.child(), Eval.class); + assertThat(Expressions.names(eval.fields()), contains("f4")); + var add = as(Alias.unwrap(eval.fields().get(0)), Add.class); + var limit = as(eval.child(), Limit.class); + var relation = as(limit.child(), EsRelation.class); + } + public void testCombineProjectionWithFilterInBetween() { var plan = plan(""" from test @@ -366,6 +412,27 @@ public void testCombineProjectionWithAggregation() { assertThat(Expressions.names(agg.groupings()), contains("last_name", "first_name")); } + /** + * Expects + * Limit[1000[INTEGER]] + * \_Aggregate[[last_name{f}#23, first_name{f}#20, k{r}#4],[SUM(salary{f}#24) AS s, last_name{f}#23, first_name{f}#20, first_n + * ame{f}#20 AS k]] + * \_EsRelation[test][_meta_field{f}#25, emp_no{f}#19, first_name{f}#20, ..] + */ + public void testCombineProjectionWithAggregationAndEval() { + var plan = plan(""" + from test + | eval k = first_name, k1 = k + | stats s = sum(salary) by last_name, first_name, k, k1 + | keep s, last_name, first_name, k + """); + + var limit = as(plan, Limit.class); + var agg = as(limit.child(), Aggregate.class); + assertThat(Expressions.names(agg.aggregates()), contains("s", "last_name", "first_name", "k")); + assertThat(Expressions.names(agg.groupings()), contains("last_name", "first_name", "k")); + } + /** * Expects * TopN[[Order[x{r}#10,ASC,LAST]],1000[INTEGER]] From 5a9a9b87ac78556fd3dd0953630a8c6e96e9ea1c Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Fri, 5 Apr 2024 13:55:22 -0400 Subject: [PATCH 153/264] Adding tests and fixing test failure #106964 (#107118) closes: https://github.com/elastic/elasticsearch/issues/106964 --- .../80_dense_vector_indexed_by_default.yml | 27 +++++++++++++++++++ .../vectors/DenseVectorFieldMapperTests.java | 20 +++++++++----- 2 files changed, 41 insertions(+), 6 deletions(-) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/80_dense_vector_indexed_by_default.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/80_dense_vector_indexed_by_default.yml index 784edfdac3469..407313a59c5e8 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/80_dense_vector_indexed_by_default.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/80_dense_vector_indexed_by_default.yml @@ -147,3 +147,30 @@ setup: - match: { test_default_index_options.mappings.properties.vector.index: true } - match: { test_default_index_options.mappings.properties.vector.similarity: cosine } - match: { test_default_index_options.mappings.properties.vector.index_options.type: int8_hnsw } +--- +"Default index options for dense_vector element type byte": + - skip: + version: ' - 8.13.99' + reason: 'dense_vector indexed as int8_hnsw by default was added in 8.14' + - do: + indices.create: + index: test_default_index_options + body: + mappings: + properties: + vector: + element_type: byte + type: dense_vector + dims: 5 + + - match: { acknowledged: true } + + - do: + indices.get_mapping: + index: test_default_index_options + + - match: { test_default_index_options.mappings.properties.vector.type: dense_vector } + - match: { test_default_index_options.mappings.properties.vector.dims: 5 } + - match: { test_default_index_options.mappings.properties.vector.index: true } + - match: { test_default_index_options.mappings.properties.vector.similarity: cosine } + - is_false: test_default_index_options.mappings.properties.vector.index_options.type diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java index e05cc92c8a76b..ec7d0a85f4486 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java @@ -20,7 +20,6 @@ import org.apache.lucene.index.VectorSimilarityFunction; import org.apache.lucene.search.FieldExistsQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.tests.util.LuceneTestCase.AwaitsFix; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.VectorUtil; import org.elasticsearch.common.bytes.BytesReference; @@ -65,7 +64,6 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106964") public class DenseVectorFieldMapperTests extends MapperTestCase { private static final IndexVersion INDEXED_BY_DEFAULT_PREVIOUS_INDEX_VERSION = IndexVersions.V_8_10_0; @@ -81,23 +79,33 @@ public DenseVectorFieldMapperTests() { @Override protected void minimalMapping(XContentBuilder b) throws IOException { - indexMapping(b, true); + indexMapping(b, IndexVersion.current()); } @Override protected void minimalMapping(XContentBuilder b, IndexVersion indexVersion) throws IOException { - indexMapping(b, indexVersion.onOrAfter(DenseVectorFieldMapper.INDEXED_BY_DEFAULT_INDEX_VERSION)); + indexMapping(b, indexVersion); } - private void indexMapping(XContentBuilder b, boolean indexedByDefault) throws IOException { + private void indexMapping(XContentBuilder b, IndexVersion indexVersion) throws IOException { b.field("type", "dense_vector").field("dims", 4); if (elementType != ElementType.FLOAT) { b.field("element_type", elementType.toString()); } - if (indexedByDefault || indexed) { + if (indexVersion.onOrAfter(DenseVectorFieldMapper.INDEXED_BY_DEFAULT_INDEX_VERSION) || indexed) { // Serialize if it's new index version, or it was not the default for previous indices b.field("index", indexed); } + if (indexVersion.onOrAfter(DenseVectorFieldMapper.DEFAULT_TO_INT8) + && indexed + && elementType.equals(ElementType.FLOAT) + && indexOptionsSet == false) { + b.startObject("index_options"); + b.field("type", "int8_hnsw"); + b.field("m", 16); + b.field("ef_construction", 100); + b.endObject(); + } if (indexed) { b.field("similarity", "dot_product"); if (indexOptionsSet) { From 12d819a92325fddf4313cbffc68f690eaf5fd9b2 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Fri, 5 Apr 2024 14:06:21 -0700 Subject: [PATCH 154/264] Revert toolchain changes for Java 22 upgrade. (#107164) This is a follow up to #107137 to also revert the Java toolchain changes necessary to support the required toolchains. --- .../AdoptiumJdkToolchainResolver.java | 40 ++++++++++++++----- .../OracleOpenJdkToolchainResolver.java | 6 ++- .../AdoptiumJdkToolchainResolverSpec.groovy | 8 +++- 3 files changed, 42 insertions(+), 12 deletions(-) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolver.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolver.java index 89a40711c9a19..0270ee22ca8c5 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolver.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolver.java @@ -11,6 +11,7 @@ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.compress.utils.Lists; import org.gradle.jvm.toolchain.JavaLanguageVersion; import org.gradle.jvm.toolchain.JavaToolchainDownload; import org.gradle.jvm.toolchain.JavaToolchainRequest; @@ -20,17 +21,17 @@ import java.io.IOException; import java.net.URI; import java.net.URL; +import java.util.Comparator; import java.util.Map; import java.util.Optional; import java.util.concurrent.ConcurrentHashMap; -import java.util.stream.StreamSupport; import static org.gradle.jvm.toolchain.JavaToolchainDownload.fromUri; public abstract class AdoptiumJdkToolchainResolver extends AbstractCustomJavaToolchainResolver { // package protected for better testing - final Map> CACHED_RELEASES = new ConcurrentHashMap<>(); + final Map> CACHED_SEMVERS = new ConcurrentHashMap<>(); @Override public Optional resolve(JavaToolchainRequest request) { @@ -38,7 +39,7 @@ public Optional resolve(JavaToolchainRequest request) { return Optional.empty(); } AdoptiumVersionRequest versionRequestKey = toVersionRequest(request); - Optional versionInfo = CACHED_RELEASES.computeIfAbsent( + Optional versionInfo = CACHED_SEMVERS.computeIfAbsent( versionRequestKey, (r) -> resolveAvailableVersion(versionRequestKey) ); @@ -53,12 +54,12 @@ private AdoptiumVersionRequest toVersionRequest(JavaToolchainRequest request) { return new AdoptiumVersionRequest(platform, arch, javaLanguageVersion); } - private Optional resolveAvailableVersion(AdoptiumVersionRequest requestKey) { + private Optional resolveAvailableVersion(AdoptiumVersionRequest requestKey) { ObjectMapper mapper = new ObjectMapper(); try { int languageVersion = requestKey.languageVersion.asInt(); URL source = new URL( - "https://api.adoptium.net/v3/info/release_names?architecture=" + "https://api.adoptium.net/v3/info/release_versions?architecture=" + requestKey.arch + "&image_type=jdk&os=" + requestKey.platform @@ -70,8 +71,14 @@ private Optional resolveAvailableVersion(AdoptiumVersionRequest requestK + ")" ); JsonNode jsonNode = mapper.readTree(source); - JsonNode versionsNode = jsonNode.get("releases"); - return StreamSupport.stream(versionsNode.spliterator(), false).map(JsonNode::textValue).findFirst(); + JsonNode versionsNode = jsonNode.get("versions"); + return Optional.of( + Lists.newArrayList(versionsNode.iterator()) + .stream() + .map(this::toVersionInfo) + .max(Comparator.comparing(AdoptiumVersionInfo::semver)) + .get() + ); } catch (FileNotFoundException e) { // request combo not supported (e.g. aarch64 + windows return Optional.empty(); @@ -80,10 +87,21 @@ private Optional resolveAvailableVersion(AdoptiumVersionRequest requestK } } - private URI resolveDownloadURI(AdoptiumVersionRequest request, String version) { + private AdoptiumVersionInfo toVersionInfo(JsonNode node) { + return new AdoptiumVersionInfo( + node.get("build").asInt(), + node.get("major").asInt(), + node.get("minor").asInt(), + node.get("openjdk_version").asText(), + node.get("security").asInt(), + node.get("semver").asText() + ); + } + + private URI resolveDownloadURI(AdoptiumVersionRequest request, AdoptiumVersionInfo versionInfo) { return URI.create( - "https://api.adoptium.net/v3/binary/version/" - + version + "https://api.adoptium.net/v3/binary/version/jdk-" + + versionInfo.semver + "/" + request.platform + "/" @@ -100,5 +118,7 @@ private boolean requestIsSupported(JavaToolchainRequest request) { return anyVendorOr(request.getJavaToolchainSpec().getVendor().get(), JvmVendorSpec.ADOPTIUM); } + record AdoptiumVersionInfo(int build, int major, int minor, String openjdkVersion, int security, String semver) {} + record AdoptiumVersionRequest(String platform, String arch, JavaLanguageVersion languageVersion) {} } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/OracleOpenJdkToolchainResolver.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/OracleOpenJdkToolchainResolver.java index 162895fd486cf..818cb040c172e 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/OracleOpenJdkToolchainResolver.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/OracleOpenJdkToolchainResolver.java @@ -39,7 +39,11 @@ record JdkBuild(JavaLanguageVersion languageVersion, String version, String buil ); // package private so it can be replaced by tests - List builds = List.of(getBundledJdkBuild()); + List builds = List.of( + getBundledJdkBuild(), + // 22 release candidate + new JdkBuild(JavaLanguageVersion.of(22), "22", "36", "830ec9fcccef480bb3e73fb7ecafe059") + ); private JdkBuild getBundledJdkBuild() { String bundledJdkVersion = VersionProperties.getBundledJdkVersion(); diff --git a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolverSpec.groovy b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolverSpec.groovy index fe4a644ddfc1d..6383d577f027f 100644 --- a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolverSpec.groovy +++ b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolverSpec.groovy @@ -11,6 +11,7 @@ package org.elasticsearch.gradle.internal.toolchain import org.gradle.api.services.BuildServiceParameters import org.gradle.jvm.toolchain.JavaLanguageVersion import org.gradle.jvm.toolchain.JavaToolchainResolver +import org.gradle.platform.OperatingSystem import static org.elasticsearch.gradle.internal.toolchain.AbstractCustomJavaToolchainResolver.toArchString import static org.elasticsearch.gradle.internal.toolchain.AbstractCustomJavaToolchainResolver.toOsString @@ -37,7 +38,12 @@ class AdoptiumJdkToolchainResolverSpec extends AbstractToolchainResolverSpec { toOsString(it[2], it[1]), toArchString(it[3]), languageVersion); - resolver.CACHED_RELEASES.put(request, Optional.of('jdk-' + languageVersion.asInt() + '.1.1.1+37.1')) + resolver.CACHED_SEMVERS.put(request, Optional.of(new AdoptiumJdkToolchainResolver.AdoptiumVersionInfo(languageVersion.asInt(), + 1, + 1, + "" + languageVersion.asInt() + ".1.1.1+37", + 0, "" + languageVersion.asInt() + ".1.1.1+37.1" + ))) } return resolver From 0f3ac367ac4a2f90546cfd97d76cb2d31068155b Mon Sep 17 00:00:00 2001 From: Niels Bauman <33722607+nielsbauman@users.noreply.github.com> Date: Sat, 6 Apr 2024 08:46:38 +0200 Subject: [PATCH 155/264] Rename values of `FailureStoreOptions` (#107062) With these new values, there's a better match between selecting failure stores in read and write operations. --- .../ml/anomaly-detection/apis/put-job.asciidoc | 2 +- .../datastreams/FailureStoreQueryParamIT.java | 10 +++++----- .../test/data_stream/200_rollover_failure_store.yml | 6 +++--- .../resources/rest-api-spec/api/indices.rollover.json | 2 +- .../elasticsearch/action/support/IndicesOptions.java | 4 ++-- .../action/admin/indices/RestRolloverIndexAction.java | 2 +- 6 files changed, 13 insertions(+), 13 deletions(-) diff --git a/docs/reference/ml/anomaly-detection/apis/put-job.asciidoc b/docs/reference/ml/anomaly-detection/apis/put-job.asciidoc index 1ab5de76a94b0..e4e10e2ae2fc5 100644 --- a/docs/reference/ml/anomaly-detection/apis/put-job.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/put-job.asciidoc @@ -537,4 +537,4 @@ The API returns the following results: // TESTRESPONSE[s/"job_version" : "8.4.0"/"job_version" : $body.job_version/] // TESTRESPONSE[s/1656087283340/$body.$_path/] // TESTRESPONSE[s/"superuser"/"_es_test_root"/] -// TESTRESPONSE[s/"ignore_throttled" : true/"ignore_throttled" : true,"failure_store":"false"/] +// TESTRESPONSE[s/"ignore_throttled" : true/"ignore_throttled" : true,"failure_store":"exclude"/] diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/FailureStoreQueryParamIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/FailureStoreQueryParamIT.java index a6b235e8d566f..1d8de6b9ac5f6 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/FailureStoreQueryParamIT.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/FailureStoreQueryParamIT.java @@ -75,7 +75,7 @@ public void testGetIndexApi() throws IOException { assertThat(indices.containsKey(failureStoreIndex), is(true)); } { - final Response indicesResponse = client().performRequest(new Request("GET", "/" + DATA_STREAM_NAME + "?failure_store=false")); + final Response indicesResponse = client().performRequest(new Request("GET", "/" + DATA_STREAM_NAME + "?failure_store=exclude")); Map indices = entityAsMap(indicesResponse); assertThat(indices.size(), is(1)); assertThat(indices.containsKey(backingIndex), is(true)); @@ -98,7 +98,7 @@ public void testGetIndexStatsApi() throws IOException { } { final Response statsResponse = client().performRequest( - new Request("GET", "/" + DATA_STREAM_NAME + "/_stats?failure_store=true") + new Request("GET", "/" + DATA_STREAM_NAME + "/_stats?failure_store=include") ); Map indices = (Map) entityAsMap(statsResponse).get("indices"); assertThat(indices.size(), is(2)); @@ -124,7 +124,7 @@ public void testGetIndexSettingsApi() throws IOException { } { final Response indicesResponse = client().performRequest( - new Request("GET", "/" + DATA_STREAM_NAME + "/_settings?failure_store=true") + new Request("GET", "/" + DATA_STREAM_NAME + "/_settings?failure_store=include") ); Map indices = entityAsMap(indicesResponse); assertThat(indices.size(), is(2)); @@ -150,7 +150,7 @@ public void testGetIndexMappingApi() throws IOException { } { final Response indicesResponse = client().performRequest( - new Request("GET", "/" + DATA_STREAM_NAME + "/_mapping?failure_store=true") + new Request("GET", "/" + DATA_STREAM_NAME + "/_mapping?failure_store=include") ); Map indices = entityAsMap(indicesResponse); assertThat(indices.size(), is(2)); @@ -183,7 +183,7 @@ public void testPutIndexMappingApi() throws IOException { assertAcknowledged(client().performRequest(mappingRequest)); } { - final Request mappingRequest = new Request("PUT", "/" + DATA_STREAM_NAME + "/_mapping?failure_store=true"); + final Request mappingRequest = new Request("PUT", "/" + DATA_STREAM_NAME + "/_mapping?failure_store=include"); mappingRequest.setJsonEntity(""" { "properties": { diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/200_rollover_failure_store.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/200_rollover_failure_store.yml index 82c757fc4af76..46d46e8291ae9 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/200_rollover_failure_store.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/200_rollover_failure_store.yml @@ -32,7 +32,7 @@ setup: - do: indices.rollover: alias: "data-stream-for-rollover" - failure_store: true + target_failure_store: true - match: { old_index: "/\\.fs-data-stream-for-rollover-(\\d{4}\\.\\d{2}\\.\\d{2}-)?000001/" } - match: { new_index: "/\\.fs-data-stream-for-rollover-(\\d{4}\\.\\d{2}\\.\\d{2}-)?000002/" } @@ -67,7 +67,7 @@ setup: - do: indices.rollover: alias: "data-stream-for-rollover" - failure_store: true + target_failure_store: true body: conditions: max_docs: 1 @@ -96,7 +96,7 @@ setup: - do: indices.rollover: alias: "data-stream-for-rollover" - failure_store: true + target_failure_store: true body: conditions: max_docs: 1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json index e04786ec14cf7..299c24f987d8d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json @@ -64,7 +64,7 @@ "default":"false", "description":"If set to true, the rollover action will only mark a data stream to signal that it needs to be rolled over at the next write. Only allowed on data streams." }, - "failure_store":{ + "target_failure_store":{ "type":"boolean", "description":"If set to true, the rollover action will be applied on the failure store of the data stream.", "visibility": "feature_flag", diff --git a/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java b/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java index 3b03b1cf0a4f6..e46a7bd5f0ec2 100644 --- a/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java +++ b/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java @@ -417,8 +417,8 @@ public record FailureStoreOptions(boolean includeRegularIndices, boolean include ToXContentFragment { public static final String FAILURE_STORE = "failure_store"; - public static final String INCLUDE_ALL = "true"; - public static final String INCLUDE_ONLY_REGULAR_INDICES = "false"; + public static final String INCLUDE_ALL = "include"; + public static final String INCLUDE_ONLY_REGULAR_INDICES = "exclude"; public static final String INCLUDE_ONLY_FAILURE_INDICES = "only"; public static final FailureStoreOptions DEFAULT = new FailureStoreOptions(true, false); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java index a0796c0f95639..98895a49fae6e 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java @@ -54,7 +54,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC rolloverIndexRequest.timeout(request.paramAsTime("timeout", rolloverIndexRequest.timeout())); rolloverIndexRequest.masterNodeTimeout(request.paramAsTime("master_timeout", rolloverIndexRequest.masterNodeTimeout())); if (DataStream.isFailureStoreEnabled()) { - boolean failureStore = request.paramAsBoolean("failure_store", false); + boolean failureStore = request.paramAsBoolean("target_failure_store", false); if (failureStore) { rolloverIndexRequest.setIndicesOptions( IndicesOptions.builder(rolloverIndexRequest.indicesOptions()) From 0faac52ac49f15d3dc9f3af3a2e46d3b8e5b3b00 Mon Sep 17 00:00:00 2001 From: Volodymyr Krasnikov <129072588+volodk85@users.noreply.github.com> Date: Sun, 7 Apr 2024 21:09:58 -0700 Subject: [PATCH 156/264] Metric for rejected indexing primary operations (per document) (#107080) * Fix number of rejected primary operations * Update docs/changelog/107080.yaml * Update test * Add metric 'es.indexing.primary_operations.document.rejections.ratio' + test * rm useless changelog * update docs * use -1 as a default version on unsupported version --- docs/reference/cluster/stats.asciidoc | 3 +- .../index/IndexingPressureIT.java | 3 +- .../metrics/NodeIndexingMetricsIT.java | 64 ++++++++++++++----- .../org/elasticsearch/TransportVersions.java | 1 + .../cluster/stats/ClusterStatsNodes.java | 5 +- .../elasticsearch/index/IndexingPressure.java | 5 +- .../index/stats/IndexingPressureStats.java | 22 ++++++- .../monitor/metrics/NodeMetrics.java | 12 ++-- .../cluster/node/stats/NodeStatsTests.java | 1 + .../cluster/stats/ClusterStatsNodesTests.java | 10 ++- .../ClusterStatsMonitoringDocTests.java | 3 +- 11 files changed, 99 insertions(+), 30 deletions(-) diff --git a/docs/reference/cluster/stats.asciidoc b/docs/reference/cluster/stats.asciidoc index 6d8a8f748fa0e..bdd3e166c22d6 100644 --- a/docs/reference/cluster/stats.asciidoc +++ b/docs/reference/cluster/stats.asciidoc @@ -1821,7 +1821,8 @@ The API returns the following response: "all_in_bytes": 0, "coordinating_rejections": 0, "primary_rejections": 0, - "replica_rejections": 0 + "replica_rejections": 0, + "primary_document_rejections": 0 }, "limit" : "0b", "limit_in_bytes": 0 diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/IndexingPressureIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/IndexingPressureIT.java index 206aa57bc84b3..da89f3252bec0 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/IndexingPressureIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/IndexingPressureIT.java @@ -301,7 +301,8 @@ public void testWriteCanBeRejectedAtCoordinatingLevel() throws Exception { public void testWriteCanBeRejectedAtPrimaryLevel() throws Exception { final BulkRequest bulkRequest = new BulkRequest(); long totalRequestSize = 0; - for (int i = 0; i < 80; ++i) { + int numberOfIndexRequests = randomIntBetween(50, 100); + for (int i = 0; i < numberOfIndexRequests; ++i) { IndexRequest request = new IndexRequest(INDEX_NAME).id(UUIDs.base64UUID()) .source(Collections.singletonMap("key", randomAlphaOfLength(50))); totalRequestSize += request.ramBytesUsed(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/NodeIndexingMetricsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/NodeIndexingMetricsIT.java index 6cca0ccb3fdf3..97f052367fbc6 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/NodeIndexingMetricsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/NodeIndexingMetricsIT.java @@ -164,11 +164,11 @@ public void testNodeIndexingMetricsArePublishing() throws Exception { ); assertThat(primaryOperationsRejectionsTotal.getLong(), equalTo(0L)); - var primaryOperationsRejectionsRatio = getRecordedMetric( + var primaryOperationsDocumentRejectionsRatio = getRecordedMetric( plugin::getDoubleGaugeMeasurement, - "es.indexing.primary_operations.rejections.ratio" + "es.indexing.primary_operations.document.rejections.ratio" ); - assertThat(primaryOperationsRejectionsRatio.getDouble(), equalTo(0.0)); + assertThat(primaryOperationsDocumentRejectionsRatio.getDouble(), equalTo(0.0)); }); @@ -207,13 +207,19 @@ public void testCoordinatingRejectionMetricsArePublishing() throws Exception { "es.indexing.coordinating_operations.rejections.total" ); assertThat(coordinatingOperationsRejectionsTotal.getLong(), equalTo(1L)); + + var coordinatingOperationsRejectionsRatio = getRecordedMetric( + plugin::getDoubleGaugeMeasurement, + "es.indexing.coordinating_operations.rejections.ratio" + ); + assertThat(coordinatingOperationsRejectionsRatio.getDouble(), equalTo(1.0)); }); } - public void testPrimaryRejectionMetricsArePublishing() throws Exception { + public void testPrimaryDocumentRejectionMetricsArePublishing() throws Exception { // setting low Indexing Pressure limits to trigger primary rejections - final String dataNode = internalCluster().startNode(Settings.builder().put(MAX_INDEXING_BYTES.getKey(), "1KB").build()); + final String dataNode = internalCluster().startNode(Settings.builder().put(MAX_INDEXING_BYTES.getKey(), "2KB").build()); // setting high Indexing Pressure limits to pass coordinating checks final String coordinatingNode = internalCluster().startCoordinatingOnlyNode( Settings.builder().put(MAX_INDEXING_BYTES.getKey(), "10MB").build() @@ -227,19 +233,32 @@ public void testPrimaryRejectionMetricsArePublishing() throws Exception { plugin.resetMeter(); final int numberOfShards = randomIntBetween(1, 5); - assertAcked(prepareCreate("test", Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numberOfShards)).get()); + assertAcked(prepareCreate("test-one", Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numberOfShards)).get()); + assertAcked(prepareCreate("test-two", Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)).get()); - final BulkRequest bulkRequest = new BulkRequest(); - final int batchCount = randomIntBetween(50, 100); - for (int i = 0; i < batchCount; i++) { - bulkRequest.add(new IndexRequest("test").source("field", randomAlphaOfLength(2048))); + final BulkRequest bulkRequestOne = new BulkRequest(); + final int batchCountOne = randomIntBetween(50, 100); + for (int i = 0; i < batchCountOne; i++) { + bulkRequestOne.add(new IndexRequest("test-one").source("field", randomAlphaOfLength(3096))); } - // big batch should pass thru coordinating limit check but fail on primary - // note the bulk request is sent to coordinating client - final BulkResponse bulkResponse = client(coordinatingNode).bulk(bulkRequest).actionGet(); - assertThat(bulkResponse.hasFailures(), equalTo(true)); - assertThat(Arrays.stream(bulkResponse.getItems()).allMatch(item -> item.status() == RestStatus.TOO_MANY_REQUESTS), equalTo(true)); + final BulkRequest bulkRequestTwo = new BulkRequest(); + final int batchCountTwo = randomIntBetween(1, 5); + for (int i = 0; i < batchCountTwo; i++) { + bulkRequestTwo.add(new IndexRequest("test-two").source("field", randomAlphaOfLength(1))); + } + + // big batch should pass through coordinating gate but trip on primary gate + // note the bulk request is sent to coordinating node + final BulkResponse bulkResponseOne = client(coordinatingNode).bulk(bulkRequestOne).actionGet(); + assertThat(bulkResponseOne.hasFailures(), equalTo(true)); + assertThat( + Arrays.stream(bulkResponseOne.getItems()).allMatch(item -> item.status() == RestStatus.TOO_MANY_REQUESTS), + equalTo(true) + ); + // small bulk request is expected to pass through primary indexing pressure gate + final BulkResponse bulkResponseTwo = client(coordinatingNode).bulk(bulkRequestTwo).actionGet(); + assertThat(bulkResponseTwo.hasFailures(), equalTo(false)); // simulate async apm `polling` call for metrics plugin.collect(); @@ -251,6 +270,16 @@ public void testPrimaryRejectionMetricsArePublishing() throws Exception { "es.indexing.primary_operations.rejections.total" ); assertThat(primaryOperationsRejectionsTotal.getLong(), equalTo((long) numberOfShards)); + + var primaryOperationsDocumentRejectionsRatio = getRecordedMetric( + plugin::getDoubleGaugeMeasurement, + "es.indexing.primary_operations.document.rejections.ratio" + ); + // ratio of rejected documents vs all indexing documents + assertThat( + equals(primaryOperationsDocumentRejectionsRatio.getDouble(), (double) batchCountOne / (batchCountOne + batchCountTwo)), + equalTo(true) + ); }); } @@ -261,4 +290,9 @@ private static Measurement getRecordedMetric(Function> assertThat(measurements.size(), equalTo(1)); return measurements.get(0); } + + private static boolean equals(double expected, double actual) { + final double eps = .0000001; + return Math.abs(expected - actual) < eps; + } } diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 57a3afe083707..4a1bf691ea1b0 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -163,6 +163,7 @@ static TransportVersion def(int id) { public static final TransportVersion CCR_STATS_API_TIMEOUT_PARAM = def(8_622_00_0); public static final TransportVersion ESQL_ORDINAL_BLOCK = def(8_623_00_0); public static final TransportVersion ML_INFERENCE_COHERE_RERANK = def(8_624_00_0); + public static final TransportVersion INDEXING_PRESSURE_DOCUMENT_REJECTIONS_COUNT = def(8_625_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java index c421cdefbdbf4..6ffe7ac390260 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java @@ -782,6 +782,7 @@ static class IndexPressureStats implements ToXContentFragment { long coordinatingRejections = 0; long primaryRejections = 0; long replicaRejections = 0; + long primaryDocumentRejections = 0; long memoryLimit = 0; long totalCoordinatingOps = 0; @@ -811,6 +812,7 @@ static class IndexPressureStats implements ToXContentFragment { currentCoordinatingOps += nodeStatIndexingPressureStats.getCurrentCoordinatingOps(); currentPrimaryOps += nodeStatIndexingPressureStats.getCurrentPrimaryOps(); currentReplicaOps += nodeStatIndexingPressureStats.getCurrentReplicaOps(); + primaryDocumentRejections += nodeStatIndexingPressureStats.getPrimaryDocumentRejections(); } } indexingPressureStats = new IndexingPressureStats( @@ -831,7 +833,8 @@ static class IndexPressureStats implements ToXContentFragment { totalReplicaOps, currentCoordinatingOps, currentPrimaryOps, - currentReplicaOps + currentReplicaOps, + primaryDocumentRejections ); } diff --git a/server/src/main/java/org/elasticsearch/index/IndexingPressure.java b/server/src/main/java/org/elasticsearch/index/IndexingPressure.java index d0bc8ad980dde..7696cf99b75cd 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexingPressure.java +++ b/server/src/main/java/org/elasticsearch/index/IndexingPressure.java @@ -52,6 +52,7 @@ public class IndexingPressure { private final AtomicLong coordinatingRejections = new AtomicLong(0); private final AtomicLong primaryRejections = new AtomicLong(0); private final AtomicLong replicaRejections = new AtomicLong(0); + private final AtomicLong primaryDocumentRejections = new AtomicLong(0); private final long primaryAndCoordinatingLimits; private final long replicaLimits; @@ -136,6 +137,7 @@ public Releasable markPrimaryOperationStarted(int operations, long bytes, boolea long totalBytesWithoutOperation = totalBytes - bytes; this.currentCombinedCoordinatingAndPrimaryBytes.getAndAdd(-bytes); this.primaryRejections.getAndIncrement(); + this.primaryDocumentRejections.addAndGet(operations); throw new EsRejectedExecutionException( "rejected execution of primary operation [" + "coordinating_and_primary_bytes=" @@ -218,7 +220,8 @@ public IndexingPressureStats stats() { totalReplicaOps.get(), currentCoordinatingOps.get(), currentPrimaryOps.get(), - currentReplicaOps.get() + currentReplicaOps.get(), + primaryDocumentRejections.get() ); } } diff --git a/server/src/main/java/org/elasticsearch/index/stats/IndexingPressureStats.java b/server/src/main/java/org/elasticsearch/index/stats/IndexingPressureStats.java index 81eb40e6f6f61..608fa3128bf09 100644 --- a/server/src/main/java/org/elasticsearch/index/stats/IndexingPressureStats.java +++ b/server/src/main/java/org/elasticsearch/index/stats/IndexingPressureStats.java @@ -32,6 +32,7 @@ public class IndexingPressureStats implements Writeable, ToXContentFragment { private final long coordinatingRejections; private final long primaryRejections; private final long replicaRejections; + private final long primaryDocumentRejections; private final long memoryLimit; // These fields will be used for additional back-pressure and metrics in the future @@ -70,6 +71,12 @@ public IndexingPressureStats(StreamInput in) throws IOException { this.currentCoordinatingOps = 0; this.currentPrimaryOps = 0; this.currentReplicaOps = 0; + + if (in.getTransportVersion().onOrAfter(TransportVersions.INDEXING_PRESSURE_DOCUMENT_REJECTIONS_COUNT)) { + primaryDocumentRejections = in.readVLong(); + } else { + primaryDocumentRejections = -1L; + } } public IndexingPressureStats( @@ -90,7 +97,8 @@ public IndexingPressureStats( long totalReplicaOps, long currentCoordinatingOps, long currentPrimaryOps, - long currentReplicaOps + long currentReplicaOps, + long primaryDocumentRejections ) { this.totalCombinedCoordinatingAndPrimaryBytes = totalCombinedCoordinatingAndPrimaryBytes; this.totalCoordinatingBytes = totalCoordinatingBytes; @@ -111,6 +119,8 @@ public IndexingPressureStats( this.currentCoordinatingOps = currentCoordinatingOps; this.currentPrimaryOps = currentPrimaryOps; this.currentReplicaOps = currentReplicaOps; + + this.primaryDocumentRejections = primaryDocumentRejections; } @Override @@ -132,6 +142,10 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_10_0)) { out.writeVLong(memoryLimit); } + + if (out.getTransportVersion().onOrAfter(TransportVersions.INDEXING_PRESSURE_DOCUMENT_REJECTIONS_COUNT)) { + out.writeVLong(primaryDocumentRejections); + } } public long getTotalCombinedCoordinatingAndPrimaryBytes() { @@ -206,6 +220,10 @@ public long getMemoryLimit() { return memoryLimit; } + public long getPrimaryDocumentRejections() { + return primaryDocumentRejections; + } + private static final String COMBINED = "combined_coordinating_and_primary"; private static final String COMBINED_IN_BYTES = "combined_coordinating_and_primary_in_bytes"; private static final String COORDINATING = "coordinating"; @@ -219,6 +237,7 @@ public long getMemoryLimit() { private static final String COORDINATING_REJECTIONS = "coordinating_rejections"; private static final String PRIMARY_REJECTIONS = "primary_rejections"; private static final String REPLICA_REJECTIONS = "replica_rejections"; + private static final String PRIMARY_DOCUMENT_REJECTIONS = "primary_document_rejections"; private static final String LIMIT = "limit"; private static final String LIMIT_IN_BYTES = "limit_in_bytes"; @@ -246,6 +265,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(COORDINATING_REJECTIONS, coordinatingRejections); builder.field(PRIMARY_REJECTIONS, primaryRejections); builder.field(REPLICA_REJECTIONS, replicaRejections); + builder.field(PRIMARY_DOCUMENT_REJECTIONS, primaryDocumentRejections); builder.endObject(); builder.humanReadableField(LIMIT_IN_BYTES, LIMIT, ByteSizeValue.ofBytes(memoryLimit)); builder.endObject(); diff --git a/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java b/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java index 527acb8d4fcbc..e689898b05da6 100644 --- a/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java +++ b/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java @@ -621,7 +621,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerDoubleGauge( - "es.indexing.primary_operations.rejections.ratio", + "es.indexing.primary_operations.document.rejections.ratio", "Ratio of rejected primary operations", "ratio", () -> { @@ -629,13 +629,13 @@ private void registerAsyncMetrics(MeterRegistry registry) { .map(NodeStats::getIndexingPressureStats) .map(IndexingPressureStats::getTotalPrimaryOps) .orElse(0L); - var totalPrimaryRejections = Optional.ofNullable(stats.getOrRefresh()) + var totalPrimaryDocumentRejections = Optional.ofNullable(stats.getOrRefresh()) .map(NodeStats::getIndexingPressureStats) - .map(IndexingPressureStats::getPrimaryRejections) + .map(IndexingPressureStats::getPrimaryDocumentRejections) .orElse(0L); - // rejections do not count towards `totalPrimaryOperations` - var totalOps = totalPrimaryOperations + totalPrimaryRejections; - return new DoubleWithAttributes(totalOps != 0 ? (double) totalPrimaryRejections / totalOps : 0.0); + // primary document rejections do not count towards `totalPrimaryOperations` + var totalOps = totalPrimaryOperations + totalPrimaryDocumentRejections; + return new DoubleWithAttributes(totalOps != 0 ? (double) totalPrimaryDocumentRejections / totalOps : 0.0); } ) ); diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java index e4b821fba7634..b91ea304c5da6 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java @@ -1038,6 +1038,7 @@ public static NodeStats createNodeStats() { randomLongBetween(0, maxStatValue), randomLongBetween(0, maxStatValue), randomLongBetween(0, maxStatValue), + randomLongBetween(0, maxStatValue), randomLongBetween(0, maxStatValue) ); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodesTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodesTests.java index 0ee3b244ecf45..adba547f9b2ab 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodesTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodesTests.java @@ -113,7 +113,7 @@ public void testIndexPressureStats() throws Exception { randomValueOtherThanMany(n -> n.getIndexingPressureStats() == null, NodeStatsTests::createNodeStats), randomValueOtherThanMany(n -> n.getIndexingPressureStats() == null, NodeStatsTests::createNodeStats) ); - long[] expectedStats = new long[12]; + long[] expectedStats = new long[13]; for (NodeStats nodeStat : nodeStats) { IndexingPressureStats indexingPressureStats = nodeStat.getIndexingPressureStats(); if (indexingPressureStats != null) { @@ -130,8 +130,9 @@ public void testIndexPressureStats() throws Exception { expectedStats[8] += indexingPressureStats.getCoordinatingRejections(); expectedStats[9] += indexingPressureStats.getPrimaryRejections(); expectedStats[10] += indexingPressureStats.getReplicaRejections(); + expectedStats[11] += indexingPressureStats.getPrimaryDocumentRejections(); - expectedStats[11] += indexingPressureStats.getMemoryLimit(); + expectedStats[12] += indexingPressureStats.getMemoryLimit(); } } @@ -181,9 +182,12 @@ public void testIndexPressureStats() throws Exception { + "," + "\"replica_rejections\":" + expectedStats[10] + + "," + + "\"primary_document_rejections\":" + + expectedStats[11] + "}," + "\"limit_in_bytes\":" - + expectedStats[11] + + expectedStats[12] + "}" + "}}" ) diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java index cb270c7f19ae8..2c5485b8d467f 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java @@ -730,7 +730,8 @@ public void testToXContent() throws IOException { "all_in_bytes": 0, "coordinating_rejections": 0, "primary_rejections": 0, - "replica_rejections": 0 + "replica_rejections": 0, + "primary_document_rejections": 0 }, "limit_in_bytes": 0 } From 29888ff7ef815f543b8e6f2650746182dec8346d Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Mon, 8 Apr 2024 08:09:35 +0200 Subject: [PATCH 157/264] [Profiling] Fix test assumption re. serialization (#107134) With this commit we use the same XContent API (`rawValue`) in the test as in the actual code in order to ensure the test expects the correct value (i.e. this was a test bug). Closes #107117 --- .../elasticsearch/xpack/profiling/TopNFunctionTests.java | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TopNFunctionTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TopNFunctionTests.java index 3a91550767094..f30fd18443550 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TopNFunctionTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TopNFunctionTests.java @@ -22,7 +22,6 @@ public class TopNFunctionTests extends ESTestCase { - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107117") public void testToXContent() throws IOException { String fileID = "6tVKI4mSYDEJ-ABAIpYXcg"; int frameType = 1; @@ -56,8 +55,10 @@ public void testToXContent() throws IOException { .rawValue("2.2000") .field("total_annual_co2_tons") .rawValue("22.0000") - .field("self_annual_costs_usd", "12.0000") - .field("total_annual_costs_usd", "120.0000") + .field("self_annual_costs_usd") + .rawValue("12.0000") + .field("total_annual_costs_usd") + .rawValue("120.0000") .endObject(); XContentBuilder actualRequest = XContentFactory.contentBuilder(contentType); From f66ca2d697a3a6c958cec20883ef696ade7311b1 Mon Sep 17 00:00:00 2001 From: Mary Gouseti Date: Mon, 8 Apr 2024 11:26:24 +0300 Subject: [PATCH 158/264] Introduce new node feature for renaming health endpoint (#107154) The health API was available for experimentation under the [`_internal/_health`](https://www.elastic.co/guide/en/elasticsearch/reference/8.6/health-api.html) before it [became GA](https://www.elastic.co/guide/en/elasticsearch/reference/8.7/health-api.html) at `8.7.0`. For this reason we introduce another node feature to capture this change. Fixes https://github.com/elastic/elasticsearch/issues/106933 --- .../elasticsearch/upgrades/HealthNodeUpgradeIT.java | 5 ++--- .../java/org/elasticsearch/health/HealthFeatures.java | 10 +++++++++- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java index 695f5d2a64bc7..0f210ee4b2450 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java @@ -11,7 +11,6 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import org.apache.http.util.EntityUtils; -import org.apache.lucene.tests.util.LuceneTestCase.AwaitsFix; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.hamcrest.Matchers; @@ -21,7 +20,6 @@ import static org.hamcrest.CoreMatchers.equalTo; -@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106933") public class HealthNodeUpgradeIT extends ParameterizedRollingUpgradeTestCase { public HealthNodeUpgradeIT(@Name("upgradedNodes") int upgradedNodes) { @@ -36,7 +34,8 @@ public void testHealthNode() throws Exception { assertThat(tasks, Matchers.containsString("health-node")); }); assertBusy(() -> { - Response response = client().performRequest(new Request("GET", "_health_report")); + String path = clusterHasFeature("health.supports_health_report_api") ? "_health_report" : "_internal/_health"; + Response response = client().performRequest(new Request("GET", path)); Map health_report = entityAsMap(response.getEntity()); assertThat(health_report.get("status"), equalTo("green")); }); diff --git a/server/src/main/java/org/elasticsearch/health/HealthFeatures.java b/server/src/main/java/org/elasticsearch/health/HealthFeatures.java index 4b3bcf7e7278f..32e6c8f5ca849 100644 --- a/server/src/main/java/org/elasticsearch/health/HealthFeatures.java +++ b/server/src/main/java/org/elasticsearch/health/HealthFeatures.java @@ -18,6 +18,7 @@ public class HealthFeatures implements FeatureSpecification { public static final NodeFeature SUPPORTS_HEALTH = new NodeFeature("health.supports_health"); + public static final NodeFeature SUPPORTS_HEALTH_REPORT_API = new NodeFeature("health.supports_health_report_api"); public static final NodeFeature SUPPORTS_SHARDS_CAPACITY_INDICATOR = new NodeFeature("health.shards_capacity_indicator"); public static final NodeFeature SUPPORTS_EXTENDED_REPOSITORY_INDICATOR = new NodeFeature("health.extended_repository_indicator"); @@ -28,6 +29,13 @@ public Set getFeatures() { @Override public Map getHistoricalFeatures() { - return Map.of(SUPPORTS_HEALTH, Version.V_8_5_0, SUPPORTS_SHARDS_CAPACITY_INDICATOR, Version.V_8_8_0); + return Map.of( + SUPPORTS_HEALTH, + Version.V_8_5_0, // health accessible via /_internal/_health + SUPPORTS_HEALTH_REPORT_API, + Version.V_8_7_0, // health accessible via /_health_report + SUPPORTS_SHARDS_CAPACITY_INDICATOR, + Version.V_8_8_0 + ); } } From 8ca6f506399eab246ceddc6bace404ee28491354 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Mon, 8 Apr 2024 11:19:25 +0200 Subject: [PATCH 159/264] Remove unused buildShardSearchRequest from SearchPhaseContext (#107180) This method isn't used on the interface, it's only used as part of the abstract async action. --- .../action/search/AbstractSearchAsyncAction.java | 10 ++++++++-- .../action/search/SearchPhaseContext.java | 10 ---------- .../action/search/MockSearchPhaseContext.java | 7 ------- 3 files changed, 8 insertions(+), 19 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java index 1da114adb34f6..1f8470b3bcd01 100644 --- a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java @@ -769,8 +769,14 @@ public final void onFailure(Exception e) { listener.onFailure(e); } - @Override - public final ShardSearchRequest buildShardSearchRequest(SearchShardIterator shardIt, int shardIndex) { + /** + * Builds an request for the initial search phase. + * + * @param shardIt the target {@link SearchShardIterator} + * @param shardIndex the index of the shard that is used in the coordinator node to + * tiebreak results with identical sort values + */ + protected final ShardSearchRequest buildShardSearchRequest(SearchShardIterator shardIt, int shardIndex) { AliasFilter filter = aliasFilter.get(shardIt.shardId().getIndex().getUUID()); assert filter != null; float indexBoost = concreteIndexBoosts.getOrDefault(shardIt.shardId().getIndex().getUUID(), DEFAULT_INDEX_BOOST); diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java index af9bcac8e3a33..c77c7e58efc7d 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java @@ -15,7 +15,6 @@ import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.internal.ShardSearchContextId; -import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.transport.Transport; import java.util.concurrent.Executor; @@ -115,15 +114,6 @@ default void sendReleaseSearchContext( } } - /** - * Builds an request for the initial search phase. - * - * @param shardIt the target {@link SearchShardIterator} - * @param shardIndex the index of the shard that is used in the coordinator node to - * tiebreak results with identical sort values - */ - ShardSearchRequest buildShardSearchRequest(SearchShardIterator shardIt, int shardIndex); - /** * Processes the phase transition from on phase to another. This method handles all errors that happen during the initial run execution * of the next phase. If there are no successful operations in the context when this method is executed the search is aborted and diff --git a/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java b/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java index ed807091ae9a2..8bfd61b8d5b32 100644 --- a/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java +++ b/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java @@ -17,7 +17,6 @@ import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.internal.ShardSearchContextId; -import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.transport.Transport; import org.junit.Assert; @@ -127,12 +126,6 @@ public SearchTransportService getSearchTransport() { return searchTransport; } - @Override - public ShardSearchRequest buildShardSearchRequest(SearchShardIterator shardIt, int shardIndex) { - Assert.fail("should not be called"); - return null; - } - @Override public void executeNextPhase(SearchPhase currentPhase, SearchPhase nextPhase) { try { From 88306379ba3f6728af272886c0a953ef08df2fcf Mon Sep 17 00:00:00 2001 From: Nikolaj Volgushev Date: Mon, 8 Apr 2024 11:28:12 +0200 Subject: [PATCH 160/264] Increase KDF iteration count in `KeyStoreWrapper` (#107107) This PR increases the KDF iteration count for the keystore password. Additional context in ES-8063. --- .../cli/keystore/KeyStoreWrapperTests.java | 20 ++++++++++++++ .../keystore/UpgradeKeyStoreCommandTests.java | 22 +++++++++++++-- ...at-v5-with-password-elasticsearch.keystore | Bin 0 -> 199 bytes docs/changelog/107107.yaml | 5 ++++ .../common/settings/KeyStoreWrapper.java | 25 +++++++++++------- 5 files changed, 60 insertions(+), 12 deletions(-) create mode 100644 distribution/tools/keystore-cli/src/test/resources/format-v5-with-password-elasticsearch.keystore create mode 100644 docs/changelog/107107.yaml diff --git a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/KeyStoreWrapperTests.java b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/KeyStoreWrapperTests.java index f6e3578811688..3004494262e6b 100644 --- a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/KeyStoreWrapperTests.java +++ b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/KeyStoreWrapperTests.java @@ -457,6 +457,26 @@ public void testLegacyV3() throws GeneralSecurityException, IOException { assertThat(toByteArray(wrapper.getFile("file_setting")), equalTo("file_value".getBytes(StandardCharsets.UTF_8))); } + public void testLegacyV5() throws GeneralSecurityException, IOException { + final Path configDir = createTempDir(); + final Path keystore = configDir.resolve("elasticsearch.keystore"); + try ( + InputStream is = KeyStoreWrapperTests.class.getResourceAsStream("/format-v5-with-password-elasticsearch.keystore"); + OutputStream os = Files.newOutputStream(keystore) + ) { + final byte[] buffer = new byte[4096]; + int readBytes; + while ((readBytes = is.read(buffer)) > 0) { + os.write(buffer, 0, readBytes); + } + } + final KeyStoreWrapper wrapper = KeyStoreWrapper.load(configDir); + assertNotNull(wrapper); + wrapper.decrypt("keystorepassword".toCharArray()); + assertThat(wrapper.getFormatVersion(), equalTo(5)); + assertThat(wrapper.getSettingNames(), equalTo(Set.of("keystore.seed"))); + } + public void testSerializationNewlyCreated() throws Exception { final KeyStoreWrapper wrapper = KeyStoreWrapper.create(); wrapper.setString("string_setting", "string_value".toCharArray()); diff --git a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/UpgradeKeyStoreCommandTests.java b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/UpgradeKeyStoreCommandTests.java index ae19fa0b94b83..979b118a887e5 100644 --- a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/UpgradeKeyStoreCommandTests.java +++ b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/UpgradeKeyStoreCommandTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.cli.ProcessInfo; import org.elasticsearch.cli.UserException; import org.elasticsearch.common.settings.KeyStoreWrapper; +import org.elasticsearch.core.Nullable; import org.elasticsearch.env.Environment; import java.io.InputStream; @@ -46,8 +47,20 @@ public void testKeystoreUpgradeV4() throws Exception { assertKeystoreUpgrade("/format-v4-elasticsearch.keystore", KeyStoreWrapper.V4_VERSION); } + public void testKeystoreUpgradeV5() throws Exception { + assertKeystoreUpgradeWithPassword("/format-v5-with-password-elasticsearch.keystore", KeyStoreWrapper.LE_VERSION); + } + private void assertKeystoreUpgrade(String file, int version) throws Exception { assumeFalse("Cannot open unprotected keystore on FIPS JVM", inFipsJvm()); + assertKeystoreUpgrade(file, version, null); + } + + private void assertKeystoreUpgradeWithPassword(String file, int version) throws Exception { + assertKeystoreUpgrade(file, version, "keystorepassword"); + } + + private void assertKeystoreUpgrade(String file, int version, @Nullable String password) throws Exception { final Path keystore = KeyStoreWrapper.keystorePath(env.configFile()); try (InputStream is = KeyStoreWrapperTests.class.getResourceAsStream(file); OutputStream os = Files.newOutputStream(keystore)) { is.transferTo(os); @@ -56,11 +69,17 @@ private void assertKeystoreUpgrade(String file, int version) throws Exception { assertNotNull(beforeUpgrade); assertThat(beforeUpgrade.getFormatVersion(), equalTo(version)); } + if (password != null) { + terminal.addSecretInput(password); + terminal.addSecretInput(password); + } execute(); + terminal.reset(); + try (KeyStoreWrapper afterUpgrade = KeyStoreWrapper.load(env.configFile())) { assertNotNull(afterUpgrade); assertThat(afterUpgrade.getFormatVersion(), equalTo(KeyStoreWrapper.CURRENT_VERSION)); - afterUpgrade.decrypt(new char[0]); + afterUpgrade.decrypt(password != null ? password.toCharArray() : new char[0]); assertThat(afterUpgrade.getSettingNames(), hasItem(KeyStoreWrapper.SEED_SETTING.getKey())); } } @@ -69,5 +88,4 @@ public void testKeystoreDoesNotExist() { final UserException e = expectThrows(UserException.class, this::execute); assertThat(e, hasToString(containsString("keystore not found at [" + KeyStoreWrapper.keystorePath(env.configFile()) + "]"))); } - } diff --git a/distribution/tools/keystore-cli/src/test/resources/format-v5-with-password-elasticsearch.keystore b/distribution/tools/keystore-cli/src/test/resources/format-v5-with-password-elasticsearch.keystore new file mode 100644 index 0000000000000000000000000000000000000000..0547db46eb1ef2f8270b72b34cb2551585a971b6 GIT binary patch literal 199 zcmcD&o+B=nnv+;ul9^nbnpl*ap_iRnSzMA|l*+)sz{)roNIC%Vlj)|G46`OpP@B2w z_KHsPxC3n(<+YD~A5612T9q0nn)0a5Cv@|t#hKFe@qeS!?|(gOZ*$|%CZ5CA-C3OP zRyD5Y0h(d+LG_z3SB1L4BOYFBAiq%YRFpH%x+CHWwQh11R(sC5C}kHf*vM(LV2R?~ wFcXK0c>S=C)(e(Q;$QdLq3inrey+=A@?9^ZnHXOm(3t!JXaX3-1vqa40Q;Rzng9R* literal 0 HcmV?d00001 diff --git a/docs/changelog/107107.yaml b/docs/changelog/107107.yaml new file mode 100644 index 0000000000000..5ca611befeb5d --- /dev/null +++ b/docs/changelog/107107.yaml @@ -0,0 +1,5 @@ +pr: 107107 +summary: Increase KDF iteration count in `KeyStoreWrapper` +area: Infra/CLI +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java b/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java index 6bdec2380c344..276775a868665 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java +++ b/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java @@ -114,19 +114,18 @@ public void writeTo(StreamOutput out) throws IOException { /** The oldest metadata format version that can be read. */ private static final int MIN_FORMAT_VERSION = 3; - /** Legacy versions of the metadata written before the keystore data. */ - public static final int V2_VERSION = 2; public static final int V3_VERSION = 3; public static final int V4_VERSION = 4; /** The version where lucene directory API changed from BE to LE. */ public static final int LE_VERSION = 5; - public static final int CURRENT_VERSION = LE_VERSION; + public static final int HIGHER_KDF_ITERATION_COUNT_VERSION = 6; + public static final int CURRENT_VERSION = HIGHER_KDF_ITERATION_COUNT_VERSION; /** The algorithm used to derive the cipher key from a password. */ private static final String KDF_ALGO = "PBKDF2WithHmacSHA512"; /** The number of iterations to derive the cipher key. */ - private static final int KDF_ITERS = 10000; + private static final int KDF_ITERS = 210000; /** * The number of bits for the cipher key. @@ -155,6 +154,7 @@ public void writeTo(StreamOutput out) throws IOException { // 3: FIPS compliant algos, ES 6.3 // 4: remove distinction between string/files, ES 6.8/7.1 // 5: Lucene directory API changed to LE, ES 8.0 + // 6: increase KDF iteration count, ES 8.14 /** The metadata format version used to read the current keystore wrapper. */ private final int formatVersion; @@ -317,8 +317,8 @@ public boolean hasPassword() { return hasPassword; } - private static Cipher createCipher(int opmode, char[] password, byte[] salt, byte[] iv) throws GeneralSecurityException { - PBEKeySpec keySpec = new PBEKeySpec(password, salt, KDF_ITERS, CIPHER_KEY_BITS); + private static Cipher createCipher(int opmode, char[] password, byte[] salt, byte[] iv, int kdfIters) throws GeneralSecurityException { + PBEKeySpec keySpec = new PBEKeySpec(password, salt, kdfIters, CIPHER_KEY_BITS); SecretKeyFactory keyFactory = SecretKeyFactory.getInstance(KDF_ALGO); SecretKey secretKey; try { @@ -337,6 +337,11 @@ private static Cipher createCipher(int opmode, char[] password, byte[] salt, byt return cipher; } + private static int getKdfIterationCountForVersion(int formatVersion) { + // iteration count was increased in version 6; it was 10,000 in previous versions + return formatVersion < HIGHER_KDF_ITERATION_COUNT_VERSION ? 10000 : KDF_ITERS; + } + /** * Decrypts the underlying keystore data. * @@ -365,7 +370,7 @@ public void decrypt(char[] password) throws GeneralSecurityException, IOExceptio throw new SecurityException("Keystore has been corrupted or tampered with", e); } - Cipher cipher = createCipher(Cipher.DECRYPT_MODE, password, salt, iv); + Cipher cipher = createCipher(Cipher.DECRYPT_MODE, password, salt, iv, getKdfIterationCountForVersion(formatVersion)); try ( ByteArrayInputStream bytesStream = new ByteArrayInputStream(encryptedBytes); CipherInputStream cipherStream = new CipherInputStream(bytesStream, cipher); @@ -403,11 +408,11 @@ private static byte[] readByteArray(DataInput input) throws IOException { } /** Encrypt the keystore entries and return the encrypted data. */ - private byte[] encrypt(char[] password, byte[] salt, byte[] iv) throws GeneralSecurityException, IOException { + private byte[] encrypt(char[] password, byte[] salt, byte[] iv, int kdfIterationCount) throws GeneralSecurityException, IOException { assert isLoaded(); ByteArrayOutputStream bytes = new ByteArrayOutputStream(); - Cipher cipher = createCipher(Cipher.ENCRYPT_MODE, password, salt, iv); + Cipher cipher = createCipher(Cipher.ENCRYPT_MODE, password, salt, iv, kdfIterationCount); try ( CipherOutputStream cipherStream = new CipherOutputStream(bytes, cipher); DataOutputStream output = new DataOutputStream(cipherStream) @@ -450,7 +455,7 @@ public synchronized void save(Path configDir, char[] password, boolean preserveP byte[] iv = new byte[12]; random.nextBytes(iv); // encrypted data - byte[] encryptedBytes = encrypt(password, salt, iv); + byte[] encryptedBytes = encrypt(password, salt, iv, getKdfIterationCountForVersion(CURRENT_VERSION)); // size of data block output.writeInt(4 + salt.length + 4 + iv.length + 4 + encryptedBytes.length); From 887d48dfc290a61229311899041234b64a8c8066 Mon Sep 17 00:00:00 2001 From: Niels Bauman <33722607+nielsbauman@users.noreply.github.com> Date: Mon, 8 Apr 2024 12:06:08 +0200 Subject: [PATCH 161/264] Avoid unintentionally clearing the `DataStream.rolloverOnWrite` flag (#107122) A lot of places in the code use a `DataStream` constructor that sets the `rolloverOnWrite` flag to `false`. For some places, this was intentional, but for others, this was erroneous (and for most tests, it didn't matter much). This PR fixes the erroneous spots and avoids similar unintentional behavior in the future by removing the constructor in question altogether. Most use cases just want to copy the flag over and if you _do_ want to set the flag to false, it makes more sense to do so explicitly yourself rather than letting the constructor do it for you. An additional small bonus is that we have one less constructor for the `DataStream` class :). Follow up of [this](https://github.com/elastic/elasticsearch/pull/107035#discussion_r1549299287) discussion. --- docs/changelog/107122.yaml | 5 ++ .../datastreams/DataStreamIT.java | 3 +- .../DataStreamIndexSettingsProviderTests.java | 3 +- .../UpdateTimeSeriesRangeServiceTests.java | 3 +- .../action/GetDataStreamsResponseTests.java | 2 + .../DataStreamLifecycleServiceTests.java | 3 +- .../cluster/metadata/DataStream.java | 58 ++++++++----------- .../MetadataCreateDataStreamService.java | 1 + .../metadata/MetadataDataStreamsService.java | 1 + .../snapshots/RestoreService.java | 1 + .../DataStreamAutoShardingServiceTests.java | 1 + .../cluster/metadata/DataStreamTests.java | 47 ++++++++++----- .../MetadataDataStreamsServiceTests.java | 1 + .../WildcardExpressionResolverTests.java | 1 + .../metadata/DataStreamTestHelper.java | 7 ++- .../ccr/action/TransportPutFollowAction.java | 4 ++ ...StreamLifecycleUsageTransportActionIT.java | 4 +- 17 files changed, 88 insertions(+), 57 deletions(-) create mode 100644 docs/changelog/107122.yaml diff --git a/docs/changelog/107122.yaml b/docs/changelog/107122.yaml new file mode 100644 index 0000000000000..e227bfd45b939 --- /dev/null +++ b/docs/changelog/107122.yaml @@ -0,0 +1,5 @@ +pr: 107122 +summary: Avoid unintentionally clearing the `DataStream.rolloverOnWrite` flag +area: Data streams +type: bug +issues: [] diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java index 6c06511ccfbd1..a0a391a0f019b 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java @@ -1791,7 +1791,8 @@ public ClusterState execute(ClusterState currentState) throws Exception { original.getLifecycle(), original.isFailureStore(), original.getFailureIndices(), - null + original.rolloverOnWrite(), + original.getAutoShardingEvent() ); brokenDataStreamHolder.set(broken); return ClusterState.builder(currentState) diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProviderTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProviderTests.java index 01ad1bb09b20f..11446a2a2a761 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProviderTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProviderTests.java @@ -315,7 +315,8 @@ public void testGetAdditionalIndexSettingsDataStreamAlreadyCreatedTimeSettingsMi ds.getLifecycle(), ds.isFailureStore(), ds.getFailureIndices(), - null + ds.rolloverOnWrite(), + ds.getAutoShardingEvent() ) ); Metadata metadata = mb.build(); diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/UpdateTimeSeriesRangeServiceTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/UpdateTimeSeriesRangeServiceTests.java index abd5132edde16..1c63deadf92a4 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/UpdateTimeSeriesRangeServiceTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/UpdateTimeSeriesRangeServiceTests.java @@ -154,7 +154,8 @@ public void testUpdateTimeSeriesTemporalRange_NoUpdateBecauseReplicated() { d.getLifecycle(), d.isFailureStore(), d.getFailureIndices(), - null + false, + d.getAutoShardingEvent() ) ) .build(); diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java index 2118c98b377bc..9fc646995bc0e 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java @@ -89,6 +89,7 @@ public void testResponseIlmAndDataStreamLifecycleRepresentation() throws Excepti new DataStreamLifecycle(), true, failureStores, + false, null ); @@ -199,6 +200,7 @@ public void testResponseIlmAndDataStreamLifecycleRepresentation() throws Excepti new DataStreamLifecycle(null, null, false), true, failureStores, + false, null ); diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java index d0456d669663d..a67fa72cb3079 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java @@ -296,7 +296,8 @@ public void testRetentionNotExecutedForTSIndicesWithinTimeBounds() { DataStreamLifecycle.newBuilder().dataRetention(0L).build(), dataStream.isFailureStore(), dataStream.getFailureIndices(), - null + dataStream.rolloverOnWrite(), + dataStream.getAutoShardingEvent() ) ); clusterState = ClusterState.builder(clusterState).metadata(builder).build(); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java index 57ab7c431f7ea..364a1b31ceeba 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java @@ -119,40 +119,6 @@ public static boolean isFailureStoreEnabled() { @Nullable private final DataStreamAutoShardingEvent autoShardingEvent; - public DataStream( - String name, - List indices, - long generation, - Map metadata, - boolean hidden, - boolean replicated, - boolean system, - boolean allowCustomRouting, - IndexMode indexMode, - DataStreamLifecycle lifecycle, - boolean failureStore, - List failureIndices, - @Nullable DataStreamAutoShardingEvent autoShardingEvent - ) { - this( - name, - indices, - generation, - metadata, - hidden, - replicated, - system, - System::currentTimeMillis, - allowCustomRouting, - indexMode, - lifecycle, - failureStore, - failureIndices, - false, - autoShardingEvent - ); - } - public DataStream( String name, List indices, @@ -222,6 +188,7 @@ public DataStream( this.failureStore = failureStore; this.failureIndices = failureIndices; assert assertConsistent(this.indices); + assert replicated == false || rolloverOnWrite == false : "replicated data streams cannot be marked for lazy rollover"; this.rolloverOnWrite = rolloverOnWrite; this.autoShardingEvent = autoShardingEvent; } @@ -238,7 +205,22 @@ public DataStream( boolean allowCustomRouting, IndexMode indexMode ) { - this(name, indices, generation, metadata, hidden, replicated, system, allowCustomRouting, indexMode, null, false, List.of(), null); + this( + name, + indices, + generation, + metadata, + hidden, + replicated, + system, + allowCustomRouting, + indexMode, + null, + false, + List.of(), + false, + null + ); } private static boolean assertConsistent(List indices) { @@ -507,6 +489,7 @@ public DataStream unsafeRollover(Index writeIndex, long generation, boolean time lifecycle, failureStore, failureIndices, + false, autoShardingEvent ); } @@ -544,6 +527,7 @@ public DataStream unsafeRolloverFailureStore(Index writeIndex, long generation) lifecycle, failureStore, failureIndices, + false, autoShardingEvent ); } @@ -646,6 +630,7 @@ public DataStream removeBackingIndex(Index index) { lifecycle, failureStore, failureIndices, + rolloverOnWrite, autoShardingEvent ); } @@ -692,6 +677,7 @@ public DataStream replaceBackingIndex(Index existingBackingIndex, Index newBacki lifecycle, failureStore, failureIndices, + rolloverOnWrite, autoShardingEvent ); } @@ -753,6 +739,7 @@ public DataStream addBackingIndex(Metadata clusterMetadata, Index index) { lifecycle, failureStore, failureIndices, + rolloverOnWrite, autoShardingEvent ); } @@ -810,6 +797,7 @@ public DataStream snapshot(Collection indicesInSnapshot) { lifecycle, failureStore, failureIndices, + rolloverOnWrite, autoShardingEvent ); } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java index 6d0b424cad8f2..3c3ff0d130f0a 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java @@ -339,6 +339,7 @@ static ClusterState createDataStream( lifecycle == null && isDslOnlyMode ? DataStreamLifecycle.DEFAULT : lifecycle, template.getDataStreamTemplate().hasFailureStore(), failureIndices, + false, null ); Metadata.Builder builder = Metadata.builder(currentState.metadata()).put(newDataStream); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsService.java index 4006bc8d1a94a..c03d40984e11c 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsService.java @@ -213,6 +213,7 @@ static ClusterState updateDataLifecycle( lifecycle, dataStream.isFailureStore(), dataStream.getFailureIndices(), + dataStream.rolloverOnWrite(), dataStream.getAutoShardingEvent() ) ); diff --git a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java index 4b6e3f30fe6fa..a597901d4600e 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java @@ -717,6 +717,7 @@ static DataStream updateDataStream(DataStream dataStream, Metadata.Builder metad dataStream.getLifecycle(), dataStream.isFailureStore(), dataStream.getFailureIndices(), + dataStream.rolloverOnWrite(), dataStream.getAutoShardingEvent() ); } diff --git a/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java b/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java index 0d1104279d3ce..70e291afcaf32 100644 --- a/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java +++ b/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java @@ -794,6 +794,7 @@ private DataStream createDataStream( null, false, List.of(), + false, autoShardingEvent ); } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java index 3187a3e391691..f086b52c1b491 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java @@ -96,8 +96,9 @@ protected DataStream mutateInstance(DataStream instance) { var lifecycle = instance.getLifecycle(); var failureStore = instance.isFailureStore(); var failureIndices = instance.getFailureIndices(); + var rolloverOnWrite = instance.rolloverOnWrite(); var autoShardingEvent = instance.getAutoShardingEvent(); - switch (between(0, 11)) { + switch (between(0, 12)) { case 0 -> name = randomAlphaOfLength(10); case 1 -> indices = randomNonEmptyIndexInstances(); case 2 -> generation = instance.getGeneration() + randomIntBetween(1, 10); @@ -110,7 +111,11 @@ protected DataStream mutateInstance(DataStream instance) { isHidden = true; } } - case 5 -> isReplicated = isReplicated == false; + case 5 -> { + isReplicated = isReplicated == false; + // Replicated data streams cannot be marked for lazy rollover. + rolloverOnWrite = isReplicated == false && rolloverOnWrite; + } case 6 -> { if (isSystem == false) { isSystem = true; @@ -131,6 +136,10 @@ protected DataStream mutateInstance(DataStream instance) { failureStore = failureIndices.isEmpty() == false; } case 11 -> { + rolloverOnWrite = rolloverOnWrite == false; + isReplicated = rolloverOnWrite == false && isReplicated; + } + case 12 -> { autoShardingEvent = randomBoolean() && autoShardingEvent != null ? null : new DataStreamAutoShardingEvent( @@ -154,6 +163,7 @@ protected DataStream mutateInstance(DataStream instance) { lifecycle, failureStore, failureIndices, + rolloverOnWrite, autoShardingEvent ); } @@ -212,6 +222,7 @@ public void testRolloverUpgradeToTsdbDataStream() { ds.getLifecycle(), ds.isFailureStore(), ds.getFailureIndices(), + ds.rolloverOnWrite(), ds.getAutoShardingEvent() ); var newCoordinates = ds.nextWriteIndexAndGeneration(Metadata.EMPTY_METADATA); @@ -240,6 +251,7 @@ public void testRolloverDowngradeToRegularDataStream() { ds.getLifecycle(), ds.isFailureStore(), ds.getFailureIndices(), + ds.rolloverOnWrite(), ds.getAutoShardingEvent() ); var newCoordinates = ds.nextWriteIndexAndGeneration(Metadata.EMPTY_METADATA); @@ -616,19 +628,21 @@ public void testSnapshot() { postSnapshotIndices.removeAll(indicesToRemove); postSnapshotIndices.addAll(indicesToAdd); + var replicated = preSnapshotDataStream.isReplicated() && randomBoolean(); var postSnapshotDataStream = new DataStream( preSnapshotDataStream.getName(), postSnapshotIndices, preSnapshotDataStream.getGeneration() + randomIntBetween(0, 5), preSnapshotDataStream.getMetadata() == null ? null : new HashMap<>(preSnapshotDataStream.getMetadata()), preSnapshotDataStream.isHidden(), - preSnapshotDataStream.isReplicated() && randomBoolean(), + replicated, preSnapshotDataStream.isSystem(), preSnapshotDataStream.isAllowCustomRouting(), preSnapshotDataStream.getIndexMode(), preSnapshotDataStream.getLifecycle(), preSnapshotDataStream.isFailureStore(), preSnapshotDataStream.getFailureIndices(), + replicated == false && preSnapshotDataStream.rolloverOnWrite(), preSnapshotDataStream.getAutoShardingEvent() ); @@ -670,6 +684,7 @@ public void testSnapshotWithAllBackingIndicesRemoved() { preSnapshotDataStream.getLifecycle(), preSnapshotDataStream.isFailureStore(), preSnapshotDataStream.getFailureIndices(), + preSnapshotDataStream.rolloverOnWrite(), preSnapshotDataStream.getAutoShardingEvent() ); @@ -1896,13 +1911,14 @@ private IndexMetadata createIndexMetadata(String indexName, IndexWriteLoad index public void testWriteFailureIndex() { boolean hidden = randomBoolean(); boolean system = hidden && randomBoolean(); + boolean replicated = randomBoolean(); DataStream noFailureStoreDataStream = new DataStream( randomAlphaOfLength(10), randomNonEmptyIndexInstances(), randomNonNegativeInt(), null, hidden, - randomBoolean(), + replicated, system, System::currentTimeMillis, randomBoolean(), @@ -1910,7 +1926,7 @@ public void testWriteFailureIndex() { DataStreamLifecycleTests.randomLifecycle(), false, null, - randomBoolean(), + replicated == false && randomBoolean(), null ); assertThat(noFailureStoreDataStream.getFailureStoreWriteIndex(), nullValue()); @@ -1921,7 +1937,7 @@ public void testWriteFailureIndex() { randomNonNegativeInt(), null, hidden, - randomBoolean(), + replicated, system, System::currentTimeMillis, randomBoolean(), @@ -1929,7 +1945,7 @@ public void testWriteFailureIndex() { DataStreamLifecycleTests.randomLifecycle(), true, List.of(), - randomBoolean(), + replicated == false && randomBoolean(), null ); assertThat(failureStoreDataStreamWithEmptyFailureIndices.getFailureStoreWriteIndex(), nullValue()); @@ -1947,7 +1963,7 @@ public void testWriteFailureIndex() { randomNonNegativeInt(), null, hidden, - randomBoolean(), + replicated, system, System::currentTimeMillis, randomBoolean(), @@ -1955,7 +1971,7 @@ public void testWriteFailureIndex() { DataStreamLifecycleTests.randomLifecycle(), true, failureIndices, - randomBoolean(), + replicated == false && randomBoolean(), null ); assertThat(failureStoreDataStream.getFailureStoreWriteIndex(), is(writeFailureIndex)); @@ -1965,13 +1981,14 @@ public void testIsFailureIndex() { boolean hidden = randomBoolean(); boolean system = hidden && randomBoolean(); List backingIndices = randomNonEmptyIndexInstances(); + boolean replicated = randomBoolean(); DataStream noFailureStoreDataStream = new DataStream( randomAlphaOfLength(10), backingIndices, randomNonNegativeInt(), null, hidden, - randomBoolean(), + replicated, system, System::currentTimeMillis, randomBoolean(), @@ -1979,7 +1996,7 @@ public void testIsFailureIndex() { DataStreamLifecycleTests.randomLifecycle(), false, null, - randomBoolean(), + replicated == false && randomBoolean(), null ); assertThat( @@ -1994,7 +2011,7 @@ public void testIsFailureIndex() { randomNonNegativeInt(), null, hidden, - randomBoolean(), + replicated, system, System::currentTimeMillis, randomBoolean(), @@ -2002,7 +2019,7 @@ public void testIsFailureIndex() { DataStreamLifecycleTests.randomLifecycle(), true, List.of(), - randomBoolean(), + replicated == false && randomBoolean(), null ); assertThat( @@ -2026,7 +2043,7 @@ public void testIsFailureIndex() { randomNonNegativeInt(), null, hidden, - randomBoolean(), + replicated, system, System::currentTimeMillis, randomBoolean(), @@ -2034,7 +2051,7 @@ public void testIsFailureIndex() { DataStreamLifecycleTests.randomLifecycle(), true, failureIndices, - randomBoolean(), + replicated == false && randomBoolean(), null ); assertThat(failureStoreDataStream.isFailureStoreIndex(writeFailureIndex.getName()), is(true)); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsServiceTests.java index 71306d7fe0aef..1fe1f6350445c 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsServiceTests.java @@ -357,6 +357,7 @@ public void testRemoveBrokenBackingIndexReference() { original.getLifecycle(), original.isFailureStore(), original.getFailureIndices(), + original.rolloverOnWrite(), original.getAutoShardingEvent() ); var brokenState = ClusterState.builder(state).metadata(Metadata.builder(state.getMetadata()).put(broken).build()).build(); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java index 9980e1b27e48c..c7a30e3eae548 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java @@ -381,6 +381,7 @@ public void testAllDataStreams() { null, false, List.of(), + false, null ); diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java index 2980b8a48636a..c83caa617e16e 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java @@ -143,6 +143,7 @@ public static DataStream newInstance( lifecycle, false, List.of(), + false, autoShardingEvent ); } @@ -169,6 +170,7 @@ public static DataStream newInstance( lifecycle, failureStores.size() > 0, failureStores, + false, null ); } @@ -352,13 +354,14 @@ public static DataStream randomInstance(String dataStreamName, LongSupplier time ); } + boolean replicated = randomBoolean(); return new DataStream( dataStreamName, indices, generation, metadata, randomBoolean(), - randomBoolean(), + replicated, false, // Some tests don't work well with system data streams, since these data streams require special handling timeProvider, randomBoolean(), @@ -366,7 +369,7 @@ public static DataStream randomInstance(String dataStreamName, LongSupplier time randomBoolean() ? DataStreamLifecycle.newBuilder().dataRetention(randomMillisUpToYear9999()).build() : null, failureStore, failureIndices, - randomBoolean(), + replicated == false && randomBoolean(), randomBoolean() ? new DataStreamAutoShardingEvent( indices.get(indices.size() - 1).getName(), diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java index 446e9abcd3e26..b3c059e933fcf 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java @@ -341,6 +341,9 @@ static DataStream updateLocalDataStream( remoteDataStream.getLifecycle(), remoteDataStream.isFailureStore(), remoteDataStream.getFailureIndices(), + // Replicated data streams can't be rolled over, so having the `rolloverOnWrite` flag set to `true` wouldn't make sense + // (and potentially even break things). + false, remoteDataStream.getAutoShardingEvent() ); } else { @@ -395,6 +398,7 @@ static DataStream updateLocalDataStream( localDataStream.getLifecycle(), localDataStream.isFailureStore(), localDataStream.getFailureIndices(), + localDataStream.rolloverOnWrite(), localDataStream.getAutoShardingEvent() ); } diff --git a/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/core/action/DataStreamLifecycleUsageTransportActionIT.java b/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/core/action/DataStreamLifecycleUsageTransportActionIT.java index bc97623c76970..c1b4a4bf27890 100644 --- a/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/core/action/DataStreamLifecycleUsageTransportActionIT.java +++ b/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/core/action/DataStreamLifecycleUsageTransportActionIT.java @@ -122,19 +122,21 @@ public void testAction() throws Exception { indices.add(index); } boolean systemDataStream = randomBoolean(); + boolean replicated = randomBoolean(); DataStream dataStream = new DataStream( randomAlphaOfLength(50), indices, randomLongBetween(0, 1000), Map.of(), systemDataStream || randomBoolean(), - randomBoolean(), + replicated, systemDataStream, randomBoolean(), IndexMode.STANDARD, lifecycle, false, List.of(), + replicated == false && randomBoolean(), null ); dataStreamMap.put(dataStream.getName(), dataStream); From a7b38394d9f12dcb930616f0162a11c5cad68961 Mon Sep 17 00:00:00 2001 From: Craig Taverner Date: Mon, 8 Apr 2024 12:26:26 +0200 Subject: [PATCH 162/264] ESQL: Support ST_DISJOINT (#107007) * WIP Started developing ST_DISJOINT Initially based on ST_INTERSECTS * Fix functions list and add spatial point integration tests * Update docs/changelog/107007.yaml * More tests for shapes and cartesian-multigeoms * Some more tests to highlight issues with DISJOINT on cartesian point indices * Disable Lucene push-down for DISJOINT on cartesian point indices * Added docs for ST_DISJOINT * Support DISJOINT in the lucene-pushdown code for cartesian point indexes * Re-enable push-to-source for DISJOINT on cartesian_point indices * Fix docs example * Try fix internal docs links which are not being rendered * Fixed disjoint on empty geometry * Added tests on empty linestring, and changed lucene push-down to exception In lucene code only LineString can be empty, but in Elasticsearch even that is not allowed, resulting in parsing errors. So we cannot get to this code in the lucene push-down and now throw an error instead. The tests now assert on the warnings. Note that for any predicate DISJOINT and INTERSECTS alike, the predicate fails, because the parsing error results in null, the function returns null, the predicate interprets this as false, and no documents match. This null-in-null-out rule means that DISJOINT and INTERSECTS give the same answer on invalid geometries. --- docs/changelog/107007.yaml | 5 + .../description/st_contains.asciidoc | 2 - .../description/st_disjoint.asciidoc | 5 + .../description/st_intersects.asciidoc | 2 - .../functions/description/st_within.asciidoc | 2 - .../functions/examples/st_disjoint.asciidoc | 13 + .../functions/layout/st_disjoint.asciidoc | 15 ++ .../functions/parameters/st_disjoint.asciidoc | 9 + .../esql/functions/signature/st_disjoint.svg | 1 + .../esql/functions/spatial-functions.asciidoc | 2 + .../esql/functions/st_contains.asciidoc | 2 +- .../esql/functions/st_disjoint.asciidoc | 27 ++ .../esql/functions/st_intersects.asciidoc | 1 + .../esql/functions/st_within.asciidoc | 2 +- .../esql/functions/types/st_disjoint.asciidoc | 16 ++ .../cartesian_multipolygons.csv-spec | 68 +++++ .../src/main/resources/meta.csv-spec | 6 +- .../src/main/resources/spatial.csv-spec | 227 +++++++++++++++++ .../main/resources/spatial_shapes.csv-spec | 52 ++++ ...ianPointDocValuesAndConstantEvaluator.java | 128 ++++++++++ ...esianPointDocValuesAndSourceEvaluator.java | 142 +++++++++++ ...ntCartesianSourceAndConstantEvaluator.java | 132 ++++++++++ ...ointCartesianSourceAndSourceEvaluator.java | 152 +++++++++++ ...GeoPointDocValuesAndConstantEvaluator.java | 128 ++++++++++ ...ntGeoPointDocValuesAndSourceEvaluator.java | 151 +++++++++++ ...DisjointGeoSourceAndConstantEvaluator.java | 132 ++++++++++ ...alDisjointGeoSourceAndSourceEvaluator.java | 152 +++++++++++ .../function/EsqlFunctionRegistry.java | 2 + .../scalar/spatial/SpatialContains.java | 2 - .../scalar/spatial/SpatialDisjoint.java | 239 ++++++++++++++++++ .../scalar/spatial/SpatialIntersects.java | 2 - .../spatial/SpatialRelatesFunction.java | 6 +- .../scalar/spatial/SpatialWithin.java | 2 - .../xpack/esql/io/stream/PlanNamedTypes.java | 6 + .../xpack/esql/plugin/EsqlFeatures.java | 8 +- .../querydsl/query/SpatialRelatesQuery.java | 39 ++- .../scalar/spatial/SpatialDisjointTests.java | 46 ++++ .../optimizer/PhysicalPlanOptimizerTests.java | 13 + 38 files changed, 1912 insertions(+), 27 deletions(-) create mode 100644 docs/changelog/107007.yaml create mode 100644 docs/reference/esql/functions/description/st_disjoint.asciidoc create mode 100644 docs/reference/esql/functions/examples/st_disjoint.asciidoc create mode 100644 docs/reference/esql/functions/layout/st_disjoint.asciidoc create mode 100644 docs/reference/esql/functions/parameters/st_disjoint.asciidoc create mode 100644 docs/reference/esql/functions/signature/st_disjoint.svg create mode 100644 docs/reference/esql/functions/st_disjoint.asciidoc create mode 100644 docs/reference/esql/functions/types/st_disjoint.asciidoc create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndConstantEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndSourceEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndConstantEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndSourceEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndConstantEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndSourceEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndConstantEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndSourceEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjoint.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointTests.java diff --git a/docs/changelog/107007.yaml b/docs/changelog/107007.yaml new file mode 100644 index 0000000000000..b2a755171725b --- /dev/null +++ b/docs/changelog/107007.yaml @@ -0,0 +1,5 @@ +pr: 107007 +summary: "ESQL: Support ST_DISJOINT" +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/reference/esql/functions/description/st_contains.asciidoc b/docs/reference/esql/functions/description/st_contains.asciidoc index ed79fe3d9c1f3..678fde7f5d98b 100644 --- a/docs/reference/esql/functions/description/st_contains.asciidoc +++ b/docs/reference/esql/functions/description/st_contains.asciidoc @@ -3,5 +3,3 @@ *Description* Returns whether the first geometry contains the second geometry. - -NOTE: The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters. diff --git a/docs/reference/esql/functions/description/st_disjoint.asciidoc b/docs/reference/esql/functions/description/st_disjoint.asciidoc new file mode 100644 index 0000000000000..95ab02a39614a --- /dev/null +++ b/docs/reference/esql/functions/description/st_disjoint.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns whether the two geometries or geometry columns are disjoint. diff --git a/docs/reference/esql/functions/description/st_intersects.asciidoc b/docs/reference/esql/functions/description/st_intersects.asciidoc index 3a36d79cbd123..b736ba29a6c8b 100644 --- a/docs/reference/esql/functions/description/st_intersects.asciidoc +++ b/docs/reference/esql/functions/description/st_intersects.asciidoc @@ -3,5 +3,3 @@ *Description* Returns whether the two geometries or geometry columns intersect. - -NOTE: The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters. diff --git a/docs/reference/esql/functions/description/st_within.asciidoc b/docs/reference/esql/functions/description/st_within.asciidoc index be52db3f694bf..890f28cb769b0 100644 --- a/docs/reference/esql/functions/description/st_within.asciidoc +++ b/docs/reference/esql/functions/description/st_within.asciidoc @@ -3,5 +3,3 @@ *Description* Returns whether the first geometry is within the second geometry. - -NOTE: The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters. diff --git a/docs/reference/esql/functions/examples/st_disjoint.asciidoc b/docs/reference/esql/functions/examples/st_disjoint.asciidoc new file mode 100644 index 0000000000000..192553e528a24 --- /dev/null +++ b/docs/reference/esql/functions/examples/st_disjoint.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/spatial_shapes.csv-spec[tag=st_disjoint-airport_city_boundaries] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/spatial_shapes.csv-spec[tag=st_disjoint-airport_city_boundaries-result] +|=== + diff --git a/docs/reference/esql/functions/layout/st_disjoint.asciidoc b/docs/reference/esql/functions/layout/st_disjoint.asciidoc new file mode 100644 index 0000000000000..a1eef41006f3e --- /dev/null +++ b/docs/reference/esql/functions/layout/st_disjoint.asciidoc @@ -0,0 +1,15 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-st_disjoint]] +=== `ST_DISJOINT` + +*Syntax* + +[.text-center] +image::esql/functions/signature/st_disjoint.svg[Embedded,opts=inline] + +include::../parameters/st_disjoint.asciidoc[] +include::../description/st_disjoint.asciidoc[] +include::../types/st_disjoint.asciidoc[] +include::../examples/st_disjoint.asciidoc[] diff --git a/docs/reference/esql/functions/parameters/st_disjoint.asciidoc b/docs/reference/esql/functions/parameters/st_disjoint.asciidoc new file mode 100644 index 0000000000000..e87a0d0eb94f0 --- /dev/null +++ b/docs/reference/esql/functions/parameters/st_disjoint.asciidoc @@ -0,0 +1,9 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Parameters* + +`geomA`:: +Geometry column name or variable of geometry type + +`geomB`:: +Geometry column name or variable of geometry type diff --git a/docs/reference/esql/functions/signature/st_disjoint.svg b/docs/reference/esql/functions/signature/st_disjoint.svg new file mode 100644 index 0000000000000..becd0be37e441 --- /dev/null +++ b/docs/reference/esql/functions/signature/st_disjoint.svg @@ -0,0 +1 @@ +ST_DISJOINT(geomA,geomB) \ No newline at end of file diff --git a/docs/reference/esql/functions/spatial-functions.asciidoc b/docs/reference/esql/functions/spatial-functions.asciidoc index 739d6b2d6f58f..b6d178ddd624d 100644 --- a/docs/reference/esql/functions/spatial-functions.asciidoc +++ b/docs/reference/esql/functions/spatial-functions.asciidoc @@ -9,6 +9,7 @@ // tag::spatial_list[] * experimental:[] <> +* experimental:[] <> * experimental:[] <> * experimental:[] <> * experimental:[] <> @@ -16,6 +17,7 @@ // end::spatial_list[] include::st_intersects.asciidoc[] +include::st_disjoint.asciidoc[] include::st_contains.asciidoc[] include::st_within.asciidoc[] include::st_x.asciidoc[] diff --git a/docs/reference/esql/functions/st_contains.asciidoc b/docs/reference/esql/functions/st_contains.asciidoc index 07b1a11aa7247..110c4fe4ca9ec 100644 --- a/docs/reference/esql/functions/st_contains.asciidoc +++ b/docs/reference/esql/functions/st_contains.asciidoc @@ -20,7 +20,7 @@ The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters. include::description/st_contains.asciidoc[] -This is the inverse of the `<>` function. +This is the inverse of the <> function. include::types/st_contains.asciidoc[] include::examples/st_contains.asciidoc[] diff --git a/docs/reference/esql/functions/st_disjoint.asciidoc b/docs/reference/esql/functions/st_disjoint.asciidoc new file mode 100644 index 0000000000000..db89ca186a0ff --- /dev/null +++ b/docs/reference/esql/functions/st_disjoint.asciidoc @@ -0,0 +1,27 @@ +[discrete] +[[esql-st_disjoint]] +=== `ST_DISJOINT` + +experimental::[] + +*Syntax* + +[.text-center] +image::esql/functions/signature/st_disjoint.svg[Embedded,opts=inline] + +*Parameters* + +`geomA`:: +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. + +`geomB`:: +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. +The second parameter must also have the same coordinate system as the first. +This means it is not possible to combine `geo_*` and `cartesian_*` parameters. + +include::description/st_disjoint.asciidoc[] +This is the inverse of the <> function. +In mathematical terms: ST_Disjoint(A, B) ⇔ A ⋂ B = ∅ + +include::types/st_disjoint.asciidoc[] +include::examples/st_disjoint.asciidoc[] diff --git a/docs/reference/esql/functions/st_intersects.asciidoc b/docs/reference/esql/functions/st_intersects.asciidoc index fbe313d10b0e7..d75a7f3a50e0f 100644 --- a/docs/reference/esql/functions/st_intersects.asciidoc +++ b/docs/reference/esql/functions/st_intersects.asciidoc @@ -24,6 +24,7 @@ This means it is not possible to combine `geo_*` and `cartesian_*` parameters. Returns true if two geometries intersect. They intersect if they have any point in common, including their interior points (points along lines or within polygons). +This is the inverse of the <> function. In mathematical terms: ST_Intersects(A, B) ⇔ A ⋂ B ≠ ∅ include::types/st_intersects.asciidoc[] diff --git a/docs/reference/esql/functions/st_within.asciidoc b/docs/reference/esql/functions/st_within.asciidoc index 64adb91219c4a..0f0190a9de638 100644 --- a/docs/reference/esql/functions/st_within.asciidoc +++ b/docs/reference/esql/functions/st_within.asciidoc @@ -20,7 +20,7 @@ The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters. include::description/st_within.asciidoc[] -This is the inverse of the `<>` function. +This is the inverse of the <> function. include::types/st_within.asciidoc[] include::examples/st_within.asciidoc[] diff --git a/docs/reference/esql/functions/types/st_disjoint.asciidoc b/docs/reference/esql/functions/types/st_disjoint.asciidoc new file mode 100644 index 0000000000000..36bd9cc036ade --- /dev/null +++ b/docs/reference/esql/functions/types/st_disjoint.asciidoc @@ -0,0 +1,16 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +geomA | geomB | result +cartesian_point | cartesian_point | boolean +cartesian_point | cartesian_shape | boolean +cartesian_shape | cartesian_point | boolean +cartesian_shape | cartesian_shape | boolean +geo_point | geo_point | boolean +geo_point | geo_shape | boolean +geo_shape | geo_point | boolean +geo_shape | geo_shape | boolean +|=== diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv-spec index a2411cfd7a335..aa6529c2d4319 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv-spec @@ -52,6 +52,29 @@ id:l | name:keyword | shape:cartesian_shape 16 | Bottom left point | POINT(0.5 0.5) ; +whereDisjointSinglePolygon +required_feature: esql.st_disjoint + +FROM cartesian_multipolygons +| WHERE ST_Disjoint(shape, TO_CARTESIANSHAPE("POLYGON((0 0, 1 0, 1 1, 0 1, 0 0))")) +| SORT id +; + +id:l | name:keyword | shape:cartesian_shape +2 | Bottom right | POLYGON((2 0, 3 0, 3 1, 2 1, 2 0)) +3 | Top right | POLYGON((2 2, 3 2, 3 3, 2 3, 2 2)) +4 | Top left | POLYGON((0 2, 1 2, 1 3, 0 3, 0 2)) +7 | Bottom right with holes | POLYGON((2 0, 3 0, 3 1, 2 1, 2 0), (2.4 0.4, 2.6 0.4, 2.6 0.6, 2.4 0.6, 2.4 0.4)) +8 | Top right with holes | POLYGON((2 2, 3 2, 3 3, 2 3, 2 2), (2.4 2.4, 2.6 2.4, 2.6 2.6, 2.4 2.6, 2.4 2.4)) +9 | Top left with holes | POLYGON((0 2, 1 2, 1 3, 0 3, 0 2), (0.4 2.4, 0.6 2.4, 0.6 2.6, 0.4 2.6, 0.4 2.4)) +12 | Bottom right diagonal | LINESTRING(2 0, 3 1) +13 | Top right diagonal | LINESTRING(2 2, 3 3) +14 | Top left diagonal | LINESTRING(0 2, 1 3) +17 | Bottom right point | POINT(2.5 0.5) +18 | Top right point | POINT(2.5 2.5) +19 | Top left point | POINT(0.5 2.5) +; + #################################################################################################### # Test against a polygon smaller in size to the Bottom Left polygon @@ -99,6 +122,29 @@ id:l | name:keyword | shape:cartesian_shape 16 | Bottom left point | POINT(0.5 0.5) ; +whereDisjointSmallerPolygon +required_feature: esql.st_disjoint + +FROM cartesian_multipolygons +| WHERE ST_Disjoint(shape, TO_CARTESIANSHAPE("POLYGON((0.2 0.2, 0.8 0.2, 0.8 0.8, 0.2 0.8, 0.2 0.2))")) +| SORT id +; + +id:l | name:keyword | shape:cartesian_shape +2 | Bottom right | POLYGON((2 0, 3 0, 3 1, 2 1, 2 0)) +3 | Top right | POLYGON((2 2, 3 2, 3 3, 2 3, 2 2)) +4 | Top left | POLYGON((0 2, 1 2, 1 3, 0 3, 0 2)) +7 | Bottom right with holes | POLYGON((2 0, 3 0, 3 1, 2 1, 2 0), (2.4 0.4, 2.6 0.4, 2.6 0.6, 2.4 0.6, 2.4 0.4)) +8 | Top right with holes | POLYGON((2 2, 3 2, 3 3, 2 3, 2 2), (2.4 2.4, 2.6 2.4, 2.6 2.6, 2.4 2.6, 2.4 2.4)) +9 | Top left with holes | POLYGON((0 2, 1 2, 1 3, 0 3, 0 2), (0.4 2.4, 0.6 2.4, 0.6 2.6, 0.4 2.6, 0.4 2.4)) +12 | Bottom right diagonal | LINESTRING(2 0, 3 1) +13 | Top right diagonal | LINESTRING(2 2, 3 3) +14 | Top left diagonal | LINESTRING(0 2, 1 3) +17 | Bottom right point | POINT(2.5 0.5) +18 | Top right point | POINT(2.5 2.5) +19 | Top left point | POINT(0.5 2.5); +; + #################################################################################################### # Test against a polygon similar in size to the entire test data @@ -175,6 +221,17 @@ id:l | name:keyword | shape:cartesian_shape 19 | Top left point | POINT(0.5 2.5) ; +whereDisjointLargerPolygon +required_feature: esql.st_disjoint + +FROM cartesian_multipolygons +| WHERE ST_Disjoint(shape, TO_CARTESIANSHAPE("POLYGON((0 0, 3 0, 3 3, 0 3, 0 0))")) +| SORT id +; + +id:l | name:keyword | shape:cartesian_shape +; + #################################################################################################### # Test against a polygon larger than all test data @@ -250,3 +307,14 @@ id:l | name:keyword | shape:cartesian_shape 18 | Top right point | POINT(2.5 2.5) 19 | Top left point | POINT(0.5 2.5) ; + +whereDisjointEvenLargerPolygon +required_feature: esql.st_disjoint + +FROM cartesian_multipolygons +| WHERE ST_Disjoint(shape, TO_CARTESIANSHAPE("POLYGON((-1 -1, 4 -1, 4 4, -1 4, -1 -1))")) +| SORT id +; + +id:l | name:keyword | shape:cartesian_shape +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec index 5af5d9d3417de..33b61c95ed0ed 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -66,6 +66,7 @@ double pi() "double sqrt(number:double|integer|long|unsigned_long)" "geo_point|cartesian_point st_centroid(field:geo_point|cartesian_point)" "boolean st_contains(geomA:geo_point|cartesian_point|geo_shape|cartesian_shape, geomB:geo_point|cartesian_point|geo_shape|cartesian_shape)" +"boolean st_disjoint(geomA:geo_point|cartesian_point|geo_shape|cartesian_shape, geomB:geo_point|cartesian_point|geo_shape|cartesian_shape)" "boolean st_intersects(geomA:geo_point|cartesian_point|geo_shape|cartesian_shape, geomB:geo_point|cartesian_point|geo_shape|cartesian_shape)" "boolean st_within(geomA:geo_point|cartesian_point|geo_shape|cartesian_shape, geomB:geo_point|cartesian_point|geo_shape|cartesian_shape)" "double st_x(point:geo_point|cartesian_point)" @@ -175,6 +176,7 @@ split |[string, delim] |["keyword|text", "keyword|te sqrt |number |"double|integer|long|unsigned_long" |[""] st_centroid |field |"geo_point|cartesian_point" |[""] st_contains |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Geometry column name or variable of geometry type, Geometry column name or variable of geometry type] +st_disjoint |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Geometry column name or variable of geometry type, Geometry column name or variable of geometry type] st_intersects |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Geometry column name or variable of geometry type, Geometry column name or variable of geometry type] st_within |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Geometry column name or variable of geometry type, Geometry column name or variable of geometry type] st_x |point |"geo_point|cartesian_point" |[""] @@ -285,6 +287,7 @@ split |Split a single valued string into multiple strings. sqrt |Returns the square root of a number. st_centroid |The centroid of a spatial field. st_contains |Returns whether the first geometry contains the second geometry. +st_disjoint |Returns whether the two geometries or geometry columns are disjoint. st_intersects |Returns whether the two geometries or geometry columns intersect. st_within |Returns whether the first geometry is within the second geometry. st_x |Extracts the x-coordinate from a point geometry. @@ -396,6 +399,7 @@ split |keyword sqrt |double |false |false |false st_centroid |"geo_point|cartesian_point" |false |false |true st_contains |boolean |[false, false] |false |false +st_disjoint |boolean |[false, false] |false |false st_intersects |boolean |[false, false] |false |false st_within |boolean |[false, false] |false |false st_x |double |false |false |false @@ -451,5 +455,5 @@ countFunctions#[skip:-8.13.99] meta functions | stats a = count(*), b = count(*), c = count(*) | mv_expand c; a:long | b:long | c:long -101 | 101 | 101 +102 | 102 | 102 ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec index c1421d91dffa5..843b2674967fe 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec @@ -605,6 +605,91 @@ location:geo_point | city_location:geo_point | count:long POINT (0 0) | POINT (0 0) | 1 ; +############################################### +# Tests for ST_DISJOINT on GEO_POINT type + +literalPolygonDisjointLiteralPoint +required_feature: esql.st_disjoint + +ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] +| MV_EXPAND wkt +| EVAL pt = TO_GEOPOINT(wkt) +| WHERE ST_DISJOINT(TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))"), pt) +; + +wkt:keyword | pt:geo_point +"POINT(-1 -1)" | POINT(-1 -1) +"POINT(-1 1)" | POINT(-1 1) +; + +literalPointDisjointLiteralPolygon +required_feature: esql.st_disjoint + +ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] +| MV_EXPAND wkt +| EVAL pt = TO_GEOPOINT(wkt) +| WHERE ST_DISJOINT(pt, TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) +; + +wkt:keyword | pt:geo_point +"POINT(-1 -1)" | POINT(-1 -1) +"POINT(-1 1)" | POINT(-1 1) +; + +literalPolygonDisjointLiteralPointOneRow +required_feature: esql.st_disjoint + +ROW disjoint = ST_DISJOINT(TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))"), TO_GEOPOINT("POINT(0 0)")) +; + +disjoint:boolean +false +; + +literalPointDisjointLiteralPolygonOneRow +required_feature: esql.st_disjoint + +ROW disjoint = ST_DISJOINT(TO_GEOPOINT("POINT(-1 0)"), TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) +; + +disjoint:boolean +true +; + +pointDisjointLiteralPolygon +required_feature: esql.st_disjoint + +FROM airports +| WHERE ST_DISJOINT(location, TO_GEOSHAPE("POLYGON((-10 -60, 120 -60, 120 60, -10 60, -10 -60))")) +| EVAL x = ST_X(location), y = ST_Y(location) +| EVAL x = FLOOR(x / 100), y = FLOOR(y / 100) +| STATS count=COUNT() BY x, y +| KEEP x, y, count +| SORT x ASC, y ASC +; + +x:double | y:double | count:long +-2 | -1 | 8 +-2 | 0 | 94 +-1 | -1 | 67 +-1 | 0 | 201 +0 | 0 | 15 +1 | -1 | 33 +1 | 0 | 53 +; + +airportCityLocationPointDisjointCentroid +required_feature: esql.st_disjoint + +FROM airports_mp +| WHERE ST_DISJOINT(location, city_location) +| STATS location=ST_CENTROID(location), city_location=ST_CENTROID(city_location), count=COUNT() +; + +location:geo_point | city_location:geo_point | count:long +POINT (67.8581917192787 24.02956652920693) | POINT (67.81638333333332 24.048999999999996) | 6 +; + ############################################### # Tests for ST_CONTAINS on GEO_POINT type @@ -1167,6 +1252,148 @@ centroid:cartesian_point | count:long POINT (4783520.5 1661010.0) | 1 ; +cartesianPointIntersectsLiteralPolygonCount +required_feature: esql.st_intersects + +FROM airports_web +| WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("POLYGON((0 -60000000, 120000000 -60000000, 120000000 60000000, 0 60000000, 0 -60000000))")) +| STATS count=COUNT() +; + +count:long +444 +; + +############################################### +# Tests for ST_DISJOINT on CARTESIAN_POINT type + +literalPolygonDisjointLiteralCartesianPoint +required_feature: esql.st_disjoint + +ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] +| MV_EXPAND wkt +| EVAL pt = TO_CARTESIANPOINT(wkt) +| WHERE ST_DISJOINT(TO_CARTESIANSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))"), pt) +; + +wkt:keyword | pt:cartesian_point +"POINT(-1 -1)" | POINT(-1 -1) +"POINT(-1 1)" | POINT(-1 1) +; + +literalCartesianPointDisjointLiteralPolygon +required_feature: esql.st_disjoint + +ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] +| MV_EXPAND wkt +| EVAL pt = TO_CARTESIANPOINT(wkt) +| WHERE ST_DISJOINT(pt, TO_CARTESIANSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) +; + +wkt:keyword | pt:cartesian_point +"POINT(-1 -1)" | POINT(-1 -1) +"POINT(-1 1)" | POINT(-1 1) +; + +literalPolygonDisjointLiteralCartesianPointOneRow +required_feature: esql.st_disjoint + +ROW disjoint = ST_DISJOINT(TO_CARTESIANSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))"), TO_CARTESIANPOINT("POINT(0 0)")) +; + +disjoint:boolean +false +; + +literalCartesianPointDisjointLiteralPolygonOneRow +required_feature: esql.st_disjoint + +ROW disjoint = ST_DISJOINT(TO_CARTESIANPOINT("POINT(-1 0)"), TO_CARTESIANSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) +; + +disjoint:boolean +true +; + +cartesianPointDisjointLiteralPolygonCount +required_feature: esql.st_disjoint + +FROM airports_web +| WHERE ST_DISJOINT(location, TO_CARTESIANSHAPE("POLYGON((0 -60000000, 120000000 -60000000, 120000000 60000000, 0 60000000, 0 -60000000))")) +| STATS count=COUNT() +; + +count:long +405 +; + +cartesianPointIntersectsDisjointLiteralPolygonCount +required_feature: esql.st_disjoint + +FROM airports_web +| EVAL intersects = ST_INTERSECTS(location, TO_CARTESIANSHAPE("POLYGON((0 -60000000, 120000000 -60000000, 120000000 60000000, 0 60000000, 0 -60000000))")) +| EVAL disjoint = ST_DISJOINT(location, TO_CARTESIANSHAPE("POLYGON((0 -60000000, 120000000 -60000000, 120000000 60000000, 0 60000000, 0 -60000000))")) +| STATS count=COUNT() BY intersects, disjoint +| SORT intersects DESC, disjoint DESC +| KEEP intersects, disjoint, count +; + +intersects:boolean | disjoint:boolean | count:long +true | false | 444 +false | true | 405 +; + +cartesianPointDisjointLiteralPolygon +required_feature: esql.st_disjoint + +FROM airports_web +| WHERE ST_DISJOINT(location, TO_CARTESIANSHAPE("POLYGON((0 -60000000, 120000000 -60000000, 120000000 60000000, 0 60000000, 0 -60000000))")) +| EVAL x = ST_X(location), y = ST_Y(location) +| EVAL x = FLOOR(x / 10000000), y = FLOOR(y / 10000000) +| STATS count=COUNT() BY x, y +| KEEP x, y, count +| SORT x ASC, y ASC +; + +x:double | y:double | count:long +-2 | -1 | 8 +-2 | 0 | 136 +-2 | 1 | 3 +-1 | -1 | 64 +-1 | 0 | 192 +-1 | 1 | 2 +; + +cartesianPointDisjointEmptyGeometry +required_feature: esql.st_disjoint + +FROM airports_web +| WHERE ST_DISJOINT(location, TO_CARTESIANSHAPE("LINESTRING()")) +| STATS count=COUNT() +; + +warning:Line 2:31: evaluation of [TO_CARTESIANSHAPE(\"LINESTRING()\")] failed, treating result as null. Only first 20 failures recorded. +warning:Line 2:31: java.lang.IllegalArgumentException: Failed to parse WKT: expected number but found: ')' + +count:long +0 +; + +cartesianPointDisjointInvalidGeometry +required_feature: esql.st_disjoint + +FROM airports_web +| WHERE ST_DISJOINT(location, TO_CARTESIANSHAPE("Invalid Geometry")) +| STATS count=COUNT() +; + +warning:Line 2:31: evaluation of [TO_CARTESIANSHAPE(\"Invalid Geometry\")] failed, treating result as null. Only first 20 failures recorded. +warning:Line 2:31: java.lang.IllegalArgumentException: Failed to parse WKT: Unknown geometry type: invalid + +count:long +0 +; + ############################################### # Tests for ST_CONTAINS on CARTESIAN_POINT type diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial_shapes.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial_shapes.csv-spec index f010ed13370e0..6d0d15c398986 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial_shapes.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial_shapes.csv-spec @@ -213,6 +213,27 @@ intersects:boolean true ; +############################################### +# Tests for ST_DISJOINT with GEO_SHAPE + +polygonDisjointLiteralPolygon +required_feature: esql.st_disjoint + +// tag::st_disjoint-airport_city_boundaries[] +FROM airport_city_boundaries +| WHERE ST_DISJOINT(city_boundary, TO_GEOSHAPE("POLYGON((-10 -60, 120 -60, 120 60, -10 60, -10 -60))")) +| KEEP abbrev, airport, region, city, city_location +// end::st_disjoint-airport_city_boundaries[] +| SORT abbrev +| LIMIT 1 +; + +// tag::st_disjoint-airport_city_boundaries-result[] +abbrev:keyword | airport:text | region:text | city:keyword | city_location:geo_point +ACA | General Juan N Alvarez Int'l | Acapulco de Juárez | Acapulco de Juárez | POINT (-99.8825 16.8636) +// end::st_disjoint-airport_city_boundaries-result[] +; + ############################################### # Tests for ST_CONTAINS and ST_WITHIN with GEO_SHAPE @@ -422,6 +443,37 @@ wkt:keyword | shape:ca "POLYGON((-2000 6000, -600 6000, -600 6600, -2000 6600, -2000 6000))" | POLYGON((-2000 6000, -600 6000, -600 6600, -2000 6600, -2000 6000)) | POLYGON((-1500 6400, -1000 6400, -1000 6600, -1500 6600, -1500 6400)) ; +############################################### +# Tests for ST_DISJOINT with CARTESIAN_SHAPE + +cartesianPolygonDisjointLiteralPolygon +required_feature: esql.st_disjoint + +FROM countries_bbox_web +| WHERE ST_DISJOINT(shape, TO_CARTESIANSHAPE("POLYGON((3100000 -3400000, 3500000 -3400000, 3500000 -3150000, 3100000 -3150000, 3100000 -3400000))")) +| SORT id DESC +| LIMIT 1 +; + +id:keyword | name:keyword | shape:cartesian_shape +ZWE | Zimbabwe | BBOX (2809472.180051312, 3681512.6693309383, -1760356.671722378, -2561396.0054164226) +; + +cartesianPolygonDisjointEmptyGeometry +required_feature: esql.st_disjoint + +FROM countries_bbox_web +| WHERE ST_DISJOINT(shape, TO_CARTESIANSHAPE("LINESTRING()")) +| STATS count=COUNT() +; + +warning:Line 2:28: evaluation of [TO_CARTESIANSHAPE(\"LINESTRING()\")] failed, treating result as null. Only first 20 failures recorded. +warning:Line 2:28: java.lang.IllegalArgumentException: Failed to parse WKT: expected number but found: ')' + +count:long +0 +; + ############################################### # Tests for ST_CONTAINS and ST_WITHIN with CARTESIAN_SHAPE diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndConstantEvaluator.java new file mode 100644 index 0000000000000..62b5761cfd655 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndConstantEvaluator.java @@ -0,0 +1,128 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.geo.Component2D; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialDisjoint}. + * This class is generated. Do not edit it. + */ +public final class SpatialDisjointCartesianPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final Component2D rightValue; + + private final DriverContext driverContext; + + public SpatialDisjointCartesianPointDocValuesAndConstantEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock leftValueBlock = (LongBlock) leftValue.eval(page)) { + LongVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock); + } + return eval(page.getPositionCount(), leftValueVector); + } + } + + public BooleanBlock eval(int positionCount, LongBlock leftValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialDisjoint.processCartesianPointDocValuesAndConstant(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValue)); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, LongVector leftValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialDisjoint.processCartesianPointDocValuesAndConstant(leftValueVector.getLong(p), rightValue)); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialDisjointCartesianPointDocValuesAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final Component2D rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + Component2D rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialDisjointCartesianPointDocValuesAndConstantEvaluator get(DriverContext context) { + return new SpatialDisjointCartesianPointDocValuesAndConstantEvaluator(source, leftValue.get(context), rightValue, context); + } + + @Override + public String toString() { + return "SpatialDisjointCartesianPointDocValuesAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndSourceEvaluator.java new file mode 100644 index 0000000000000..4f11da3c474a9 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndSourceEvaluator.java @@ -0,0 +1,142 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialDisjoint}. + * This class is generated. Do not edit it. + */ +public final class SpatialDisjointCartesianPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final EvalOperator.ExpressionEvaluator rightValue; + + private final DriverContext driverContext; + + public SpatialDisjointCartesianPointDocValuesAndSourceEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock leftValueBlock = (LongBlock) leftValue.eval(page)) { + try (BytesRefBlock rightValueBlock = (BytesRefBlock) rightValue.eval(page)) { + LongVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + BytesRefVector rightValueVector = rightValueBlock.asVector(); + if (rightValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + return eval(page.getPositionCount(), leftValueVector, rightValueVector).asBlock(); + } + } + } + + public BooleanBlock eval(int positionCount, LongBlock leftValueBlock, + BytesRefBlock rightValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rightValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rightValueBlock.getValueCount(p) != 1) { + if (rightValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendBoolean(SpatialDisjoint.processCartesianPointDocValuesAndSource(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValueBlock.getBytesRef(rightValueBlock.getFirstValueIndex(p), rightValueScratch))); + } + return result.build(); + } + } + + public BooleanVector eval(int positionCount, LongVector leftValueVector, + BytesRefVector rightValueVector) { + try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + result.appendBoolean(SpatialDisjoint.processCartesianPointDocValuesAndSource(leftValueVector.getLong(p), rightValueVector.getBytesRef(p, rightValueScratch))); + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialDisjointCartesianPointDocValuesAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue, rightValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final EvalOperator.ExpressionEvaluator.Factory rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + EvalOperator.ExpressionEvaluator.Factory rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialDisjointCartesianPointDocValuesAndSourceEvaluator get(DriverContext context) { + return new SpatialDisjointCartesianPointDocValuesAndSourceEvaluator(source, leftValue.get(context), rightValue.get(context), context); + } + + @Override + public String toString() { + return "SpatialDisjointCartesianPointDocValuesAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndConstantEvaluator.java new file mode 100644 index 0000000000000..adb5a33b83f3b --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndConstantEvaluator.java @@ -0,0 +1,132 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.io.IOException; +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.geo.Component2D; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialDisjoint}. + * This class is generated. Do not edit it. + */ +public final class SpatialDisjointCartesianSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final Component2D rightValue; + + private final DriverContext driverContext; + + public SpatialDisjointCartesianSourceAndConstantEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock leftValueBlock = (BytesRefBlock) leftValue.eval(page)) { + BytesRefVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock); + } + return eval(page.getPositionCount(), leftValueVector); + } + } + + public BooleanBlock eval(int positionCount, BytesRefBlock leftValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialDisjoint.processCartesianSourceAndConstant(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValue)); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, BytesRefVector leftValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialDisjoint.processCartesianSourceAndConstant(leftValueVector.getBytesRef(p, leftValueScratch), rightValue)); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialDisjointCartesianSourceAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final Component2D rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + Component2D rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialDisjointCartesianSourceAndConstantEvaluator get(DriverContext context) { + return new SpatialDisjointCartesianSourceAndConstantEvaluator(source, leftValue.get(context), rightValue, context); + } + + @Override + public String toString() { + return "SpatialDisjointCartesianSourceAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndSourceEvaluator.java new file mode 100644 index 0000000000000..186a1299a4a98 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndSourceEvaluator.java @@ -0,0 +1,152 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.io.IOException; +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialDisjoint}. + * This class is generated. Do not edit it. + */ +public final class SpatialDisjointCartesianSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final EvalOperator.ExpressionEvaluator rightValue; + + private final DriverContext driverContext; + + public SpatialDisjointCartesianSourceAndSourceEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock leftValueBlock = (BytesRefBlock) leftValue.eval(page)) { + try (BytesRefBlock rightValueBlock = (BytesRefBlock) rightValue.eval(page)) { + BytesRefVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + BytesRefVector rightValueVector = rightValueBlock.asVector(); + if (rightValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + return eval(page.getPositionCount(), leftValueVector, rightValueVector); + } + } + } + + public BooleanBlock eval(int positionCount, BytesRefBlock leftValueBlock, + BytesRefBlock rightValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rightValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rightValueBlock.getValueCount(p) != 1) { + if (rightValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialDisjoint.processCartesianSourceAndSource(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValueBlock.getBytesRef(rightValueBlock.getFirstValueIndex(p), rightValueScratch))); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, BytesRefVector leftValueVector, + BytesRefVector rightValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialDisjoint.processCartesianSourceAndSource(leftValueVector.getBytesRef(p, leftValueScratch), rightValueVector.getBytesRef(p, rightValueScratch))); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialDisjointCartesianSourceAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue, rightValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final EvalOperator.ExpressionEvaluator.Factory rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + EvalOperator.ExpressionEvaluator.Factory rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialDisjointCartesianSourceAndSourceEvaluator get(DriverContext context) { + return new SpatialDisjointCartesianSourceAndSourceEvaluator(source, leftValue.get(context), rightValue.get(context), context); + } + + @Override + public String toString() { + return "SpatialDisjointCartesianSourceAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndConstantEvaluator.java new file mode 100644 index 0000000000000..675b6cc58197e --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndConstantEvaluator.java @@ -0,0 +1,128 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.geo.Component2D; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialDisjoint}. + * This class is generated. Do not edit it. + */ +public final class SpatialDisjointGeoPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final Component2D rightValue; + + private final DriverContext driverContext; + + public SpatialDisjointGeoPointDocValuesAndConstantEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock leftValueBlock = (LongBlock) leftValue.eval(page)) { + LongVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock); + } + return eval(page.getPositionCount(), leftValueVector); + } + } + + public BooleanBlock eval(int positionCount, LongBlock leftValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialDisjoint.processGeoPointDocValuesAndConstant(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValue)); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, LongVector leftValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialDisjoint.processGeoPointDocValuesAndConstant(leftValueVector.getLong(p), rightValue)); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialDisjointGeoPointDocValuesAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final Component2D rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + Component2D rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialDisjointGeoPointDocValuesAndConstantEvaluator get(DriverContext context) { + return new SpatialDisjointGeoPointDocValuesAndConstantEvaluator(source, leftValue.get(context), rightValue, context); + } + + @Override + public String toString() { + return "SpatialDisjointGeoPointDocValuesAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndSourceEvaluator.java new file mode 100644 index 0000000000000..1b22e67d11b25 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndSourceEvaluator.java @@ -0,0 +1,151 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialDisjoint}. + * This class is generated. Do not edit it. + */ +public final class SpatialDisjointGeoPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final EvalOperator.ExpressionEvaluator rightValue; + + private final DriverContext driverContext; + + public SpatialDisjointGeoPointDocValuesAndSourceEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock leftValueBlock = (LongBlock) leftValue.eval(page)) { + try (BytesRefBlock rightValueBlock = (BytesRefBlock) rightValue.eval(page)) { + LongVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + BytesRefVector rightValueVector = rightValueBlock.asVector(); + if (rightValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + return eval(page.getPositionCount(), leftValueVector, rightValueVector); + } + } + } + + public BooleanBlock eval(int positionCount, LongBlock leftValueBlock, + BytesRefBlock rightValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rightValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rightValueBlock.getValueCount(p) != 1) { + if (rightValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialDisjoint.processGeoPointDocValuesAndSource(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValueBlock.getBytesRef(rightValueBlock.getFirstValueIndex(p), rightValueScratch))); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, LongVector leftValueVector, + BytesRefVector rightValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialDisjoint.processGeoPointDocValuesAndSource(leftValueVector.getLong(p), rightValueVector.getBytesRef(p, rightValueScratch))); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialDisjointGeoPointDocValuesAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue, rightValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final EvalOperator.ExpressionEvaluator.Factory rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + EvalOperator.ExpressionEvaluator.Factory rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialDisjointGeoPointDocValuesAndSourceEvaluator get(DriverContext context) { + return new SpatialDisjointGeoPointDocValuesAndSourceEvaluator(source, leftValue.get(context), rightValue.get(context), context); + } + + @Override + public String toString() { + return "SpatialDisjointGeoPointDocValuesAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndConstantEvaluator.java new file mode 100644 index 0000000000000..1df80cf90bd10 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndConstantEvaluator.java @@ -0,0 +1,132 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.io.IOException; +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.geo.Component2D; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialDisjoint}. + * This class is generated. Do not edit it. + */ +public final class SpatialDisjointGeoSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final Component2D rightValue; + + private final DriverContext driverContext; + + public SpatialDisjointGeoSourceAndConstantEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock leftValueBlock = (BytesRefBlock) leftValue.eval(page)) { + BytesRefVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock); + } + return eval(page.getPositionCount(), leftValueVector); + } + } + + public BooleanBlock eval(int positionCount, BytesRefBlock leftValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialDisjoint.processGeoSourceAndConstant(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValue)); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, BytesRefVector leftValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialDisjoint.processGeoSourceAndConstant(leftValueVector.getBytesRef(p, leftValueScratch), rightValue)); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialDisjointGeoSourceAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final Component2D rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + Component2D rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialDisjointGeoSourceAndConstantEvaluator get(DriverContext context) { + return new SpatialDisjointGeoSourceAndConstantEvaluator(source, leftValue.get(context), rightValue, context); + } + + @Override + public String toString() { + return "SpatialDisjointGeoSourceAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndSourceEvaluator.java new file mode 100644 index 0000000000000..9bdc60813ad67 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndSourceEvaluator.java @@ -0,0 +1,152 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.io.IOException; +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialDisjoint}. + * This class is generated. Do not edit it. + */ +public final class SpatialDisjointGeoSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final EvalOperator.ExpressionEvaluator rightValue; + + private final DriverContext driverContext; + + public SpatialDisjointGeoSourceAndSourceEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock leftValueBlock = (BytesRefBlock) leftValue.eval(page)) { + try (BytesRefBlock rightValueBlock = (BytesRefBlock) rightValue.eval(page)) { + BytesRefVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + BytesRefVector rightValueVector = rightValueBlock.asVector(); + if (rightValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + return eval(page.getPositionCount(), leftValueVector, rightValueVector); + } + } + } + + public BooleanBlock eval(int positionCount, BytesRefBlock leftValueBlock, + BytesRefBlock rightValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rightValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rightValueBlock.getValueCount(p) != 1) { + if (rightValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialDisjoint.processGeoSourceAndSource(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValueBlock.getBytesRef(rightValueBlock.getFirstValueIndex(p), rightValueScratch))); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, BytesRefVector leftValueVector, + BytesRefVector rightValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialDisjoint.processGeoSourceAndSource(leftValueVector.getBytesRef(p, leftValueScratch), rightValueVector.getBytesRef(p, rightValueScratch))); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialDisjointGeoSourceAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue, rightValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final EvalOperator.ExpressionEvaluator.Factory rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + EvalOperator.ExpressionEvaluator.Factory rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialDisjointGeoSourceAndSourceEvaluator get(DriverContext context) { + return new SpatialDisjointGeoSourceAndSourceEvaluator(source, leftValue.get(context), rightValue.get(context), context); + } + + @Override + public String toString() { + return "SpatialDisjointGeoSourceAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index 3db7ae3cac7b5..178c714950b05 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -81,6 +81,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvZip; import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialContains; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialDisjoint; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialIntersects; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialWithin; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.StX; @@ -189,6 +190,7 @@ private FunctionDefinition[][] functions() { new FunctionDefinition[] { def(SpatialCentroid.class, SpatialCentroid::new, "st_centroid"), def(SpatialContains.class, SpatialContains::new, "st_contains"), + def(SpatialDisjoint.class, SpatialDisjoint::new, "st_disjoint"), def(SpatialIntersects.class, SpatialIntersects::new, "st_intersects"), def(SpatialWithin.class, SpatialWithin::new, "st_within"), def(StX.class, StX::new, "st_x"), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java index 2a4915f38fb48..279f31e34ac95 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java @@ -112,8 +112,6 @@ private boolean pointRelatesGeometries(long encoded, Component2D[] rightComponen @FunctionInfo( returnType = { "boolean" }, description = "Returns whether the first geometry contains the second geometry.", - note = "The second parameter must also have the same coordinate system as the first. " - + "This means it is not possible to combine `geo_*` and `cartesian_*` parameters.", examples = @Example(file = "spatial_shapes", tag = "st_contains-airport_city_boundaries") ) public SpatialContains( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjoint.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjoint.java new file mode 100644 index 0000000000000..7833f93b6270f --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjoint.java @@ -0,0 +1,239 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import org.apache.lucene.document.ShapeField; +import org.apache.lucene.geo.Component2D; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.geo.Orientation; +import org.elasticsearch.compute.ann.Evaluator; +import org.elasticsearch.compute.ann.Fixed; +import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.index.mapper.GeoShapeIndexer; +import org.elasticsearch.lucene.spatial.CartesianShapeIndexer; +import org.elasticsearch.lucene.spatial.CoordinateEncoder; +import org.elasticsearch.lucene.spatial.GeometryDocValueReader; +import org.elasticsearch.xpack.esql.expression.function.Example; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.FieldAttribute; +import org.elasticsearch.xpack.ql.tree.NodeInfo; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils.asGeometryDocValueReader; +import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils.asLuceneComponent2D; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_POINT; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_SHAPE; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_POINT; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_SHAPE; + +/** + * This is the primary class for supporting the function ST_DISJOINT. + * The bulk of the capabilities are within the parent class SpatialRelatesFunction, + * which supports all the relations in the ShapeField.QueryRelation enum. + * Here we simply wire the rules together specific to ST_DISJOINT and QueryRelation.DISJOINT. + */ +public class SpatialDisjoint extends SpatialRelatesFunction { + // public for test access with reflection + public static final SpatialRelations GEO = new SpatialRelations( + ShapeField.QueryRelation.DISJOINT, + SpatialCoordinateTypes.GEO, + CoordinateEncoder.GEO, + new GeoShapeIndexer(Orientation.CCW, "ST_Disjoint") + ); + // public for test access with reflection + public static final SpatialRelations CARTESIAN = new SpatialRelations( + ShapeField.QueryRelation.DISJOINT, + SpatialCoordinateTypes.CARTESIAN, + CoordinateEncoder.CARTESIAN, + new CartesianShapeIndexer("ST_Disjoint") + ); + + @FunctionInfo( + returnType = { "boolean" }, + description = "Returns whether the two geometries or geometry columns are disjoint.", + examples = @Example(file = "spatial_shapes", tag = "st_disjoint-airport_city_boundaries") + ) + public SpatialDisjoint( + Source source, + @Param( + name = "geomA", + type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, + description = "Geometry column name or variable of geometry type" + ) Expression left, + @Param( + name = "geomB", + type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, + description = "Geometry column name or variable of geometry type" + ) Expression right + ) { + this(source, left, right, false, false); + } + + private SpatialDisjoint(Source source, Expression left, Expression right, boolean leftDocValues, boolean rightDocValues) { + super(source, left, right, leftDocValues, rightDocValues); + } + + @Override + public ShapeField.QueryRelation queryRelation() { + return ShapeField.QueryRelation.DISJOINT; + } + + @Override + public SpatialDisjoint withDocValues(Set attributes) { + // Only update the docValues flags if the field is found in the attributes + boolean leftDV = leftDocValues || foundField(left(), attributes); + boolean rightDV = rightDocValues || foundField(right(), attributes); + return new SpatialDisjoint(source(), left(), right(), leftDV, rightDV); + } + + @Override + protected SpatialDisjoint replaceChildren(Expression newLeft, Expression newRight) { + return new SpatialDisjoint(source(), newLeft, newRight, leftDocValues, rightDocValues); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, SpatialDisjoint::new, left(), right()); + } + + @Override + public Object fold() { + try { + GeometryDocValueReader docValueReader = asGeometryDocValueReader(crsType, left()); + Component2D component2D = asLuceneComponent2D(crsType, right()); + return (crsType == SpatialCrsType.GEO) + ? GEO.geometryRelatesGeometry(docValueReader, component2D) + : CARTESIAN.geometryRelatesGeometry(docValueReader, component2D); + } catch (IOException e) { + throw new IllegalArgumentException("Failed to fold constant fields: " + e.getMessage(), e); + } + } + + @Override + Map> evaluatorRules() { + return evaluatorMap; + } + + private static final Map> evaluatorMap = new HashMap<>(); + + static { + // Support geo_point and geo_shape from source and constant combinations + for (DataType spatialType : new DataType[] { GEO_POINT, GEO_SHAPE }) { + for (DataType otherType : new DataType[] { GEO_POINT, GEO_SHAPE }) { + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSources(spatialType, otherType), + new SpatialEvaluatorFactory.SpatialEvaluatorFactoryWithFields(SpatialDisjointGeoSourceAndSourceEvaluator.Factory::new) + ); + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSourceAndConstant(spatialType, otherType), + new SpatialEvaluatorFactory.SpatialEvaluatorWithConstantFactory( + SpatialDisjointGeoSourceAndConstantEvaluator.Factory::new + ) + ); + if (EsqlDataTypes.isSpatialPoint(spatialType)) { + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSources(spatialType, otherType).withLeftDocValues(), + new SpatialEvaluatorFactory.SpatialEvaluatorFactoryWithFields( + SpatialDisjointGeoPointDocValuesAndSourceEvaluator.Factory::new + ) + ); + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSourceAndConstant(spatialType, otherType).withLeftDocValues(), + new SpatialEvaluatorFactory.SpatialEvaluatorWithConstantFactory( + SpatialDisjointGeoPointDocValuesAndConstantEvaluator.Factory::new + ) + ); + } + } + } + + // Support cartesian_point and cartesian_shape from source and constant combinations + for (DataType spatialType : new DataType[] { CARTESIAN_POINT, CARTESIAN_SHAPE }) { + for (DataType otherType : new DataType[] { CARTESIAN_POINT, CARTESIAN_SHAPE }) { + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSources(spatialType, otherType), + new SpatialEvaluatorFactory.SpatialEvaluatorFactoryWithFields( + SpatialDisjointCartesianSourceAndSourceEvaluator.Factory::new + ) + ); + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSourceAndConstant(spatialType, otherType), + new SpatialEvaluatorFactory.SpatialEvaluatorWithConstantFactory( + SpatialDisjointCartesianSourceAndConstantEvaluator.Factory::new + ) + ); + if (EsqlDataTypes.isSpatialPoint(spatialType)) { + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSources(spatialType, otherType).withLeftDocValues(), + new SpatialEvaluatorFactory.SpatialEvaluatorFactoryWithFields( + SpatialDisjointCartesianPointDocValuesAndSourceEvaluator.Factory::new + ) + ); + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSourceAndConstant(spatialType, otherType).withLeftDocValues(), + new SpatialEvaluatorFactory.SpatialEvaluatorWithConstantFactory( + SpatialDisjointCartesianPointDocValuesAndConstantEvaluator.Factory::new + ) + ); + } + } + } + } + + @Evaluator(extraName = "GeoSourceAndConstant", warnExceptions = { IllegalArgumentException.class, IOException.class }) + static boolean processGeoSourceAndConstant(BytesRef leftValue, @Fixed Component2D rightValue) throws IOException { + return GEO.geometryRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "GeoSourceAndSource", warnExceptions = { IllegalArgumentException.class, IOException.class }) + static boolean processGeoSourceAndSource(BytesRef leftValue, BytesRef rightValue) throws IOException { + return GEO.geometryRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "GeoPointDocValuesAndConstant", warnExceptions = { IllegalArgumentException.class }) + static boolean processGeoPointDocValuesAndConstant(long leftValue, @Fixed Component2D rightValue) { + return GEO.pointRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "GeoPointDocValuesAndSource", warnExceptions = { IllegalArgumentException.class }) + static boolean processGeoPointDocValuesAndSource(long leftValue, BytesRef rightValue) { + Geometry geometry = SpatialCoordinateTypes.UNSPECIFIED.wkbToGeometry(rightValue); + return GEO.pointRelatesGeometry(leftValue, geometry); + } + + @Evaluator(extraName = "CartesianSourceAndConstant", warnExceptions = { IllegalArgumentException.class, IOException.class }) + static boolean processCartesianSourceAndConstant(BytesRef leftValue, @Fixed Component2D rightValue) throws IOException { + return CARTESIAN.geometryRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "CartesianSourceAndSource", warnExceptions = { IllegalArgumentException.class, IOException.class }) + static boolean processCartesianSourceAndSource(BytesRef leftValue, BytesRef rightValue) throws IOException { + return CARTESIAN.geometryRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "CartesianPointDocValuesAndConstant", warnExceptions = { IllegalArgumentException.class }) + static boolean processCartesianPointDocValuesAndConstant(long leftValue, @Fixed Component2D rightValue) { + return CARTESIAN.pointRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "CartesianPointDocValuesAndSource") + static boolean processCartesianPointDocValuesAndSource(long leftValue, BytesRef rightValue) { + Geometry geometry = SpatialCoordinateTypes.UNSPECIFIED.wkbToGeometry(rightValue); + return CARTESIAN.pointRelatesGeometry(leftValue, geometry); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java index 93965b0d3e9be..810e3206ada73 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java @@ -66,8 +66,6 @@ public class SpatialIntersects extends SpatialRelatesFunction { @FunctionInfo( returnType = { "boolean" }, description = "Returns whether the two geometries or geometry columns intersect.", - note = "The second parameter must also have the same coordinate system as the first. " - + "This means it is not possible to combine `geo_*` and `cartesian_*` parameters.", examples = @Example(file = "spatial", tag = "st_intersects-airports") ) public SpatialIntersects( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunction.java index b18a3ba4926f4..51109aee29482 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunction.java @@ -116,10 +116,14 @@ protected TypeResolution resolveType( if (resolution.unresolved()) { return resolution; } - crsType = SpatialCrsType.fromDataType(spatialExpression.dataType()); + setCrsType(spatialExpression.dataType()); return TypeResolution.TYPE_RESOLVED; } + protected void setCrsType(DataType dataType) { + crsType = SpatialCrsType.fromDataType(dataType); + } + public static TypeResolution isSameSpatialType( DataType spatialDataType, Expression expression, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithin.java index a5ade4cfeb73c..ca285ca07e27b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithin.java @@ -67,8 +67,6 @@ public class SpatialWithin extends SpatialRelatesFunction implements SurrogateEx @FunctionInfo( returnType = { "boolean" }, description = "Returns whether the first geometry is within the second geometry.", - note = "The second parameter must also have the same coordinate system as the first. " - + "This means it is not possible to combine `geo_*` and `cartesian_*` parameters.", examples = @Example(file = "spatial_shapes", tag = "st_within-airport_city_boundaries") ) public SpatialWithin( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java index 4640f1a7168c0..27e3c95bd123a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java @@ -103,6 +103,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvZip; import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialContains; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialDisjoint; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialIntersects; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesFunction; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialWithin; @@ -398,6 +399,7 @@ public static List namedTypeEntries() { of(ScalarFunction.class, StartsWith.class, PlanNamedTypes::writeStartsWith, PlanNamedTypes::readStartsWith), of(ScalarFunction.class, EndsWith.class, PlanNamedTypes::writeEndsWith, PlanNamedTypes::readEndsWith), of(ScalarFunction.class, SpatialIntersects.class, PlanNamedTypes::writeSpatialRelatesFunction, PlanNamedTypes::readIntersects), + of(ScalarFunction.class, SpatialDisjoint.class, PlanNamedTypes::writeSpatialRelatesFunction, PlanNamedTypes::readDisjoint), of(ScalarFunction.class, SpatialContains.class, PlanNamedTypes::writeSpatialRelatesFunction, PlanNamedTypes::readContains), of(ScalarFunction.class, SpatialWithin.class, PlanNamedTypes::writeSpatialRelatesFunction, PlanNamedTypes::readWithin), of(ScalarFunction.class, Substring.class, PlanNamedTypes::writeSubstring, PlanNamedTypes::readSubstring), @@ -1504,6 +1506,10 @@ static SpatialIntersects readIntersects(PlanStreamInput in) throws IOException { return new SpatialIntersects(Source.EMPTY, in.readExpression(), in.readExpression()); } + static SpatialDisjoint readDisjoint(PlanStreamInput in) throws IOException { + return new SpatialDisjoint(Source.EMPTY, in.readExpression(), in.readExpression()); + } + static SpatialContains readContains(PlanStreamInput in) throws IOException { return new SpatialContains(Source.EMPTY, in.readExpression(), in.readExpression()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java index 29f0e04ef2b94..31c967fc3eee8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java @@ -80,6 +80,11 @@ public class EsqlFeatures implements FeatureSpecification { */ private static final NodeFeature ST_CONTAINS_WITHIN = new NodeFeature("esql.st_contains_within"); + /** + * Support for spatial aggregation {@code ST_DISJOINT}. Done in #107007. + */ + private static final NodeFeature ST_DISJOINT = new NodeFeature("esql.st_disjoint"); + /** * The introduction of the {@code VALUES} agg. */ @@ -108,7 +113,8 @@ public Set getFeatures() { SPATIAL_SHAPES, ST_CENTROID, ST_INTERSECTS, - ST_CONTAINS_WITHIN + ST_CONTAINS_WITHIN, + ST_DISJOINT ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java index a16c227f7f277..30cadb3e19dc8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java @@ -12,6 +12,8 @@ import org.apache.lucene.document.XYPointField; import org.apache.lucene.document.XYShape; import org.apache.lucene.geo.XYGeometry; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.IndexOrDocValuesQuery; import org.apache.lucene.search.MatchNoDocsQuery; @@ -24,6 +26,7 @@ import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.GeoShapeQueryable; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.query.ExistsQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.query.SearchExecutionContext; @@ -221,7 +224,16 @@ org.apache.lucene.search.Query buildShapeQuery(SearchExecutionContext context, M } /** - * This code is based on the ShapeQueryPointProcessor.shapeQuery() method + * This code is based on the ShapeQueryPointProcessor.shapeQuery() method, with additional support for two special cases: + *
      + *
    • + * DISJOINT queries (using {@code EXISTS && !INTERSECTS}, similar to {@code LegacyGeoShapeQueryProcessor.geoShapeQuery()}) + *
    • + *
    • + * CONTAINS queries (if the shape is a point, INTERSECTS is used, otherwise a MatchNoDocsQuery is built, + * similar to {@code LatLonPoint.makeContainsGeometryQuery()}) + *
    • + *
    */ private static org.apache.lucene.search.Query pointShapeQuery( Geometry geometry, @@ -231,20 +243,28 @@ private static org.apache.lucene.search.Query pointShapeQuery( ) { final boolean hasDocValues = context.getFieldType(fieldName).hasDocValues(); if (geometry == null || geometry.isEmpty()) { - // Should never be null, but can be an empty geometry - return new MatchNoDocsQuery(); + throw new QueryShardException(context, "Invalid/empty geometry"); } if (geometry.type() != ShapeType.POINT && relation == ShapeField.QueryRelation.CONTAINS) { - // A point field can never contain a non-point geometry - return new MatchNoDocsQuery(); + return new MatchNoDocsQuery("A point field can never contain a non-point geometry"); } final XYGeometry[] luceneGeometries = LuceneGeometriesUtils.toXYGeometry(geometry, t -> {}); - org.apache.lucene.search.Query query = XYPointField.newGeometryQuery(fieldName, luceneGeometries); + org.apache.lucene.search.Query intersects = XYPointField.newGeometryQuery(fieldName, luceneGeometries); + if (relation == ShapeField.QueryRelation.DISJOINT) { + // XYPointField does not support DISJOINT queries, so we build one as EXISTS && !INTERSECTS + BooleanQuery.Builder bool = new BooleanQuery.Builder(); + org.apache.lucene.search.Query exists = ExistsQueryBuilder.newFilter(context, fieldName, false); + bool.add(exists, BooleanClause.Occur.MUST); + bool.add(intersects, BooleanClause.Occur.MUST_NOT); + return bool.build(); + } + + // Point-Intersects works for all cases except CONTAINS(shape) and DISJOINT, which are handled separately above if (hasDocValues) { final org.apache.lucene.search.Query queryDocValues = XYDocValuesField.newSlowGeometryQuery(fieldName, luceneGeometries); - query = new IndexOrDocValuesQuery(query, queryDocValues); + intersects = new IndexOrDocValuesQuery(intersects, queryDocValues); } - return query; + return intersects; } /** @@ -262,8 +282,7 @@ private static org.apache.lucene.search.Query shapeShapeQuery( throw new QueryShardException(context, relation + " query relation not supported for Field [" + fieldName + "]."); } if (geometry == null || geometry.isEmpty()) { - // Should never be null, but can be an empty geometry - return new MatchNoDocsQuery(); + throw new QueryShardException(context, "Invalid/empty geometry"); } final XYGeometry[] luceneGeometries; try { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointTests.java new file mode 100644 index 0000000000000..6e62af7e964f9 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointTests.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.xpack.esql.expression.function.FunctionName; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.Supplier; + +@FunctionName("st_disjoint") +public class SpatialDisjointTests extends SpatialRelatesFunctionTestCase { + public SpatialDisjointTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + List suppliers = new ArrayList<>(); + DataType[] geoDataTypes = { EsqlDataTypes.GEO_POINT, EsqlDataTypes.GEO_SHAPE }; + SpatialRelatesFunctionTestCase.addSpatialCombinations(suppliers, geoDataTypes); + DataType[] cartesianDataTypes = { EsqlDataTypes.CARTESIAN_POINT, EsqlDataTypes.CARTESIAN_SHAPE }; + SpatialRelatesFunctionTestCase.addSpatialCombinations(suppliers, cartesianDataTypes); + return parameterSuppliersFromTypedData( + errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers), SpatialDisjointTests::typeErrorMessage) + ); + } + + @Override + protected Expression build(Source source, List args) { + return new SpatialDisjoint(source, args.get(0), args.get(1)); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index 180a8ff16f4eb..fb2362851e43c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -43,6 +43,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToGeoPoint; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Round; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialContains; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialDisjoint; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialIntersects; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesFunction; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialWithin; @@ -2933,6 +2934,7 @@ private record TestSpatialRelation(ShapeRelation relation, TestDataSource index, String function() { return switch (relation) { case INTERSECTS -> "ST_INTERSECTS"; + case DISJOINT -> "ST_DISJOINT"; case WITHIN -> "ST_WITHIN"; case CONTAINS -> "ST_CONTAINS"; default -> throw new IllegalArgumentException("Unsupported relation: " + relation); @@ -2942,6 +2944,7 @@ String function() { Class functionClass() { return switch (relation) { case INTERSECTS -> SpatialIntersects.class; + case DISJOINT -> SpatialDisjoint.class; case WITHIN -> literalRight ? SpatialWithin.class : SpatialContains.class; case CONTAINS -> literalRight ? SpatialContains.class : SpatialWithin.class; default -> throw new IllegalArgumentException("Unsupported relation: " + relation); @@ -2975,12 +2978,16 @@ public void testPushDownSpatialRelatesStringToSource() { TestSpatialRelation[] tests = new TestSpatialRelation[] { new TestSpatialRelation(ShapeRelation.INTERSECTS, airports, true, true), new TestSpatialRelation(ShapeRelation.INTERSECTS, airports, false, true), + new TestSpatialRelation(ShapeRelation.DISJOINT, airports, true, true), + new TestSpatialRelation(ShapeRelation.DISJOINT, airports, false, true), new TestSpatialRelation(ShapeRelation.WITHIN, airports, true, true), new TestSpatialRelation(ShapeRelation.WITHIN, airports, false, true), new TestSpatialRelation(ShapeRelation.CONTAINS, airports, true, true), new TestSpatialRelation(ShapeRelation.CONTAINS, airports, false, true), new TestSpatialRelation(ShapeRelation.INTERSECTS, airportsWeb, true, true), new TestSpatialRelation(ShapeRelation.INTERSECTS, airportsWeb, false, true), + new TestSpatialRelation(ShapeRelation.DISJOINT, airportsWeb, true, true), + new TestSpatialRelation(ShapeRelation.DISJOINT, airportsWeb, false, true), new TestSpatialRelation(ShapeRelation.WITHIN, airportsWeb, true, true), new TestSpatialRelation(ShapeRelation.WITHIN, airportsWeb, false, true), new TestSpatialRelation(ShapeRelation.CONTAINS, airportsWeb, true, true), @@ -3027,10 +3034,16 @@ public void testPushDownSpatialRelatesStringToSourceAndUseDocValuesForCentroid() TestSpatialRelation[] tests = new TestSpatialRelation[] { new TestSpatialRelation(ShapeRelation.INTERSECTS, airports, true, true), new TestSpatialRelation(ShapeRelation.INTERSECTS, airports, false, true), + new TestSpatialRelation(ShapeRelation.DISJOINT, airports, true, true), + new TestSpatialRelation(ShapeRelation.DISJOINT, airports, false, true), new TestSpatialRelation(ShapeRelation.WITHIN, airports, true, true), new TestSpatialRelation(ShapeRelation.WITHIN, airports, false, true), new TestSpatialRelation(ShapeRelation.CONTAINS, airports, true, true), new TestSpatialRelation(ShapeRelation.CONTAINS, airports, false, true), + new TestSpatialRelation(ShapeRelation.INTERSECTS, airportsWeb, true, true), + new TestSpatialRelation(ShapeRelation.INTERSECTS, airportsWeb, false, true), + new TestSpatialRelation(ShapeRelation.DISJOINT, airportsWeb, true, true), + new TestSpatialRelation(ShapeRelation.DISJOINT, airportsWeb, false, true), new TestSpatialRelation(ShapeRelation.WITHIN, airportsWeb, true, true), new TestSpatialRelation(ShapeRelation.WITHIN, airportsWeb, false, true), new TestSpatialRelation(ShapeRelation.CONTAINS, airportsWeb, true, true), From f0d445fea6826c7992488b31fa2e629ced0086db Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 8 Apr 2024 11:26:44 +0100 Subject: [PATCH 163/264] Further reduce usage of `SAME` threadpool name (#107096) Updates another couple of test suites that unnecessarily look up `DIRECT_EXECUTOR_SERVICE` from a `ThreadPool`. Relates #106279 --- .../index/shard/IndexShardTests.java | 23 ++++++----- .../TransportServiceLifecycleTests.java | 38 +++++++++++++------ 2 files changed, 37 insertions(+), 24 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 97bf9f4e380fa..c2706a7a3cf22 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -148,7 +148,6 @@ import java.util.Collections; import java.util.HashSet; import java.util.List; -import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.concurrent.BrokenBarrierException; @@ -401,19 +400,19 @@ public void testRunUnderPrimaryPermitDelaysToExecutorWhenBlocked() throws Except indexShard.acquireAllPrimaryOperationsPermits(onAcquired, new TimeValue(Long.MAX_VALUE, TimeUnit.NANOSECONDS)); final Releasable permit = onAcquired.actionGet(); final CountDownLatch latch = new CountDownLatch(1); - final String executorOnDelay = randomFrom( - ThreadPool.Names.FLUSH, - ThreadPool.Names.GENERIC, - ThreadPool.Names.MANAGEMENT, - ThreadPool.Names.SAME - ); + final String expectedThreadPoolName; + final Executor executorOnDelay; + if (randomBoolean()) { + expectedThreadPoolName = ThreadPool.Names.GENERIC; + executorOnDelay = EsExecutors.DIRECT_EXECUTOR_SERVICE; + } else { + expectedThreadPoolName = randomFrom(ThreadPool.Names.FLUSH, ThreadPool.Names.GENERIC, ThreadPool.Names.MANAGEMENT); + executorOnDelay = threadPool.executor(expectedThreadPoolName); + } indexShard.runUnderPrimaryPermit(() -> { - final String expectedThreadPoolName = executorOnDelay.equals(ThreadPool.Names.SAME) - ? "generic" - : executorOnDelay.toLowerCase(Locale.ROOT); - assertThat(Thread.currentThread().getName(), containsString(expectedThreadPoolName)); + assertThat(Thread.currentThread().getName(), containsString('[' + expectedThreadPoolName + ']')); latch.countDown(); - }, e -> fail(e.toString()), threadPool.executor(executorOnDelay)); + }, e -> fail(e.toString()), executorOnDelay); permit.close(); latch.await(); // we could race and assert on the count before the permit is returned diff --git a/server/src/test/java/org/elasticsearch/transport/TransportServiceLifecycleTests.java b/server/src/test/java/org/elasticsearch/transport/TransportServiceLifecycleTests.java index 87fbf113fc1c9..062cc71c9172d 100644 --- a/server/src/test/java/org/elasticsearch/transport/TransportServiceLifecycleTests.java +++ b/server/src/test/java/org/elasticsearch/transport/TransportServiceLifecycleTests.java @@ -72,13 +72,13 @@ public void testHandlersCompleteAtShutdown() throws Exception { while (keepGoing.get() && requestPermits.tryAcquire()) { nodeB.transportService.sendRequest( randomFrom(random, nodeA, nodeB).transportService.getLocalNode(), - TestNode.ACTION_NAME_PREFIX + randomFrom(random, TestNode.EXECUTOR_NAMES), + TestNode.randomActionName(random), TransportRequest.Empty.INSTANCE, new TransportResponseHandler() { final AtomicBoolean completed = new AtomicBoolean(); - final String executor = randomFrom(random, TestNode.EXECUTOR_NAMES); + final Executor executor = nodeB.randomExecutor(); @Override public void handleResponse(TransportResponse.Empty response) { @@ -99,7 +99,7 @@ public TransportResponse.Empty read(StreamInput in) { @Override public Executor executor() { - return nodeB.transportService.getThreadPool().executor(executor); + return executor; } } ); @@ -130,7 +130,7 @@ public void testInternalSendExceptionForksToHandlerExecutor() { final var future = new PlainActionFuture(); nodeA.transportService.sendRequest( nodeA.getThrowingConnection(), - TestNode.ACTION_NAME_PREFIX + randomFrom(TestNode.EXECUTOR_NAMES), + TestNode.randomActionName(random()), new TransportRequest.Empty(), TransportRequestOptions.EMPTY, new ActionListenerResponseHandler<>(future, unusedReader(), deterministicTaskQueue::scheduleNow) @@ -149,7 +149,7 @@ public void testInternalSendExceptionForksToGenericIfHandlerDoesNotFork() { final var future = new PlainActionFuture(); nodeA.transportService.sendRequest( nodeA.getThrowingConnection(), - TestNode.ACTION_NAME_PREFIX + randomFrom(TestNode.EXECUTOR_NAMES), + TestNode.randomActionName(random()), new TransportRequest.Empty(), TransportRequestOptions.EMPTY, new ActionListenerResponseHandler<>(future.delegateResponse((l, e) -> { @@ -178,7 +178,7 @@ public void testInternalSendExceptionForcesExecutionOnHandlerExecutor() { try { nodeA.transportService.sendRequest( nodeA.getThrowingConnection(), - TestNode.ACTION_NAME_PREFIX + randomFrom(TestNode.EXECUTOR_NAMES), + TestNode.randomActionName(random()), new TransportRequest.Empty(), TransportRequestOptions.EMPTY, new ActionListenerResponseHandler<>(future.delegateResponse((l, e) -> { @@ -197,14 +197,14 @@ public void testInternalSendExceptionForcesExecutionOnHandlerExecutor() { public void testInternalSendExceptionCompletesHandlerOnCallingThreadIfTransportServiceClosed() { final var nodeA = new TestNode("node-A"); - final var executor = nodeA.threadPool.executor(randomFrom(TestNode.EXECUTOR_NAMES)); + final var executor = nodeA.randomExecutor(); nodeA.close(); final var testThread = Thread.currentThread(); final var future = new PlainActionFuture(); nodeA.transportService.sendRequest( nodeA.getThrowingConnection(), - TestNode.ACTION_NAME_PREFIX + randomFrom(TestNode.EXECUTOR_NAMES), + TestNode.randomActionName(random()), new TransportRequest.Empty(), TransportRequestOptions.EMPTY, new ActionListenerResponseHandler<>(future.delegateResponse((l, e) -> { @@ -229,6 +229,7 @@ private static E getSendRequestException(Future future, } private static class Executors { + static final String DIRECT = "direct"; static final String SCALING_DROP_ON_SHUTDOWN = "scaling-drop-on-shutdown"; static final String SCALING_REJECT_ON_SHUTDOWN = "scaling-reject-on-shutdown"; static final String FIXED_BOUNDED_QUEUE = "fixed-bounded-queue"; @@ -238,8 +239,9 @@ private static class Executors { private static class TestNode implements Releasable { static final String ACTION_NAME_PREFIX = "internal:test/"; + static final String[] EXECUTOR_NAMES = new String[] { - ThreadPool.Names.SAME, + Executors.DIRECT, Executors.SCALING_DROP_ON_SHUTDOWN, Executors.SCALING_REJECT_ON_SHUTDOWN, Executors.FIXED_BOUNDED_QUEUE, @@ -293,10 +295,10 @@ public ExecutorService executor(String name) { null, emptySet() ); - for (final var executor : EXECUTOR_NAMES) { + for (final var executorName : EXECUTOR_NAMES) { transportService.registerRequestHandler( - ACTION_NAME_PREFIX + executor, - threadPool.executor(executor), + ACTION_NAME_PREFIX + executorName, + getExecutor(executorName), TransportRequest.Empty::new, (request, channel, task) -> { if (randomBoolean()) { @@ -311,6 +313,18 @@ public ExecutorService executor(String name) { transportService.acceptIncomingRequests(); } + Executor getExecutor(String executorName) { + return executorName.equals(Executors.DIRECT) ? EsExecutors.DIRECT_EXECUTOR_SERVICE : threadPool.executor(executorName); + } + + Executor randomExecutor() { + return getExecutor(randomFrom(TestNode.EXECUTOR_NAMES)); + } + + static String randomActionName(Random random) { + return ACTION_NAME_PREFIX + randomFrom(random, EXECUTOR_NAMES); + } + @Override public void close() { transportService.stop(); From cbdd3cde96675e3f1bc4f55bb0fe63afc9ce6618 Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Mon, 8 Apr 2024 12:32:25 +0200 Subject: [PATCH 164/264] Update FROM option preference (#107199) This updates the preference from an imperative `_shards` to a preferential `_local` to make the test more resilient to different sharding scenarios. --- .../esql/qa/testFixtures/src/main/resources/from.csv-spec | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/from.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/from.csv-spec index 11fb0ab532945..c2c0b82f1a664 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/from.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/from.csv-spec @@ -133,7 +133,7 @@ convertFromDatetimeWithOptions required_feature: esql.from_options // tag::convertFromDatetimeWithOptions[] - FROM employees OPTIONS "allow_no_indices"="false","preference"="_shards:0" + FROM employees OPTIONS "allow_no_indices"="false","preference"="_local" | SORT emp_no | EVAL hire_double = to_double(hire_date) | KEEP emp_no, hire_date, hire_double From 54ca10e13044bc8f5363b4b96b02d107ba3d9001 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Mon, 8 Apr 2024 12:40:04 +0200 Subject: [PATCH 165/264] Delete unused response deserialization code (#107181) With the transport client gone, lots of these constructors have become unused. Removing this dead code also allows making a lot of fields final as an added bonus. --- .../mustache/MultiSearchTemplateResponse.java | 21 --------- .../mustache/SearchTemplateResponse.java | 7 --- .../action/PainlessContextAction.java | 6 --- .../action/PainlessExecuteAction.java | 2 +- .../index/rankeval/RankEvalResponse.java | 6 +-- .../ClusterAllocationExplainResponse.java | 2 +- .../cluster/remote/RemoteInfoResponse.java | 6 --- .../cluster/state/ClusterStateResponse.java | 6 +-- .../GetStoredScriptResponse.java | 4 +- .../action/get/MultiGetItemResponse.java | 11 ----- .../action/get/MultiGetResponse.java | 5 --- .../ingest/SimulatePipelineResponse.java | 4 +- .../action/search/ClearScrollResponse.java | 7 --- .../search/ClosePointInTimeResponse.java | 7 --- .../search/OpenPointInTimeResponse.java | 6 --- .../TransportReplicationAction.java | 4 +- .../support/tasks/BaseTasksResponse.java | 4 +- .../termvectors/MultiTermVectorsResponse.java | 5 --- .../index/reindex/BulkByScrollResponse.java | 8 ++-- .../persistent/PersistentTaskResponse.java | 2 +- .../license/GetBasicStatusResponse.java | 2 +- .../license/GetTrialStatusResponse.java | 2 +- .../license/PostStartTrialResponse.java | 6 +-- .../protocol/xpack/XPackInfoResponse.java | 6 +-- .../xpack/graph/GraphExploreResponse.java | 37 ---------------- .../xpack/watcher/DeleteWatchResponse.java | 8 ---- .../xpack/watcher/PutWatchResponse.java | 43 +++---------------- .../core/action/XPackInfoFeatureResponse.java | 8 +--- .../action/XPackUsageFeatureResponse.java | 2 +- .../core/ilm/ExplainLifecycleResponse.java | 2 +- .../core/ilm/action/GetLifecycleAction.java | 2 +- .../core/ilm/action/GetStatusAction.java | 2 +- .../RemoveIndexLifecyclePolicyAction.java | 2 +- .../ml/action/EvaluateDataFrameAction.java | 4 +- .../xpack/core/ml/action/MlInfoAction.java | 7 +-- .../ml/action/PostCalendarEventsAction.java | 7 +-- .../core/ml/action/PreviewDatafeedAction.java | 5 --- .../core/ml/action/PutCalendarAction.java | 7 +-- .../action/PutDataFrameAnalyticsAction.java | 4 +- .../core/ml/action/PutDatafeedAction.java | 2 +- .../xpack/core/ml/action/PutFilterAction.java | 4 +- .../DelegatePkiAuthenticationResponse.java | 6 +-- .../action/apikey/UpdateApiKeyResponse.java | 6 --- .../OpenIdConnectAuthenticateResponse.java | 22 +++------- .../oidc/OpenIdConnectLogoutResponse.java | 8 +--- ...dConnectPrepareAuthenticationResponse.java | 19 ++------ .../privilege/GetPrivilegesResponse.java | 2 +- .../profile/ActivateProfileResponse.java | 6 --- .../action/profile/GetProfilesResponse.java | 7 --- .../profile/SuggestProfilesResponse.java | 7 --- .../security/action/role/PutRoleResponse.java | 8 +--- .../DeleteRoleMappingResponse.java | 6 --- .../rolemapping/GetRoleMappingsResponse.java | 12 +----- .../rolemapping/PutRoleMappingResponse.java | 8 +--- .../action/saml/SamlAuthenticateResponse.java | 27 +++--------- .../saml/SamlInvalidateSessionResponse.java | 14 ++---- .../action/saml/SamlLogoutResponse.java | 7 --- .../SamlPrepareAuthenticationResponse.java | 12 ++---- .../action/saml/SamlSpMetadataResponse.java | 8 +--- .../DeleteServiceAccountTokenResponse.java | 2 +- .../action/token/CreateTokenResponse.java | 12 +++--- .../action/user/DeleteUserResponse.java | 8 +--- .../action/user/GetUsersResponse.java | 26 ----------- .../action/user/HasPrivilegesResponse.java | 10 ++--- .../user/ProfileHasPrivilegesResponse.java | 2 +- .../security/action/user/PutUserResponse.java | 8 +--- .../ssl/action/GetCertificateInfoAction.java | 12 +----- .../actions/ack/AckWatchResponse.java | 8 +--- .../activate/ActivateWatchResponse.java | 8 +--- .../actions/execute/ExecuteWatchResponse.java | 7 --- .../actions/get/GetWatchResponse.java | 34 +++------------ .../rules/action/GetQueryRulesetAction.java | 1 - .../xpack/fleet/action/GetSecretResponse.java | 2 +- .../SamlInitiateSingleSignOnResponse.java | 20 +++------ .../idp/action/SamlMetadataResponse.java | 6 --- .../SamlValidateAuthnRequestResponse.java | 9 ---- .../sql/action/SqlTranslateResponse.java | 2 +- 77 files changed, 108 insertions(+), 539 deletions(-) diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponse.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponse.java index b867fcfb905ea..11d060002955a 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponse.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponse.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Iterators; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.AbstractRefCounted; @@ -38,16 +37,6 @@ public static class Item implements Writeable { private final SearchTemplateResponse response; private final Exception exception; - private Item(StreamInput in) throws IOException { - if (in.readBoolean()) { - this.response = new SearchTemplateResponse(in); - this.exception = null; - } else { - exception = in.readException(); - this.response = null; - } - } - public Item(SearchTemplateResponse response, Exception exception) { this.response = response; this.exception = exception; @@ -114,16 +103,6 @@ protected void closeInternal() { } }); - MultiSearchTemplateResponse(StreamInput in) throws IOException { - super(in); - items = in.readArray(Item::new, Item[]::new); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_0_0)) { - tookInMillis = in.readVLong(); - } else { - tookInMillis = -1L; - } - } - MultiSearchTemplateResponse(Item[] items, long tookInMillis) { this.items = items; this.tookInMillis = tookInMillis; diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java index 39da4066a7859..2e62f6e9c96f4 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.core.AbstractRefCounted; @@ -46,12 +45,6 @@ protected void closeInternal() { SearchTemplateResponse() {} - SearchTemplateResponse(StreamInput in) throws IOException { - super(in); - source = in.readOptionalBytesReference(); - response = in.readOptionalWriteable(SearchResponse::new); - } - public BytesReference getSource() { return source; } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessContextAction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessContextAction.java index 6d88ff1e8db6a..3c183830afa6d 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessContextAction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessContextAction.java @@ -108,12 +108,6 @@ public Response(List scriptContextNames, PainlessContextInfo painlessCon this.painlessContextInfo = painlessContextInfo; } - public Response(StreamInput in) throws IOException { - super(in); - scriptContextNames = in.readStringCollectionAsList(); - painlessContextInfo = in.readOptionalWriteable(PainlessContextInfo::new); - } - @Override public void writeTo(StreamOutput out) throws IOException { out.writeStringCollection(scriptContextNames); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessExecuteAction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessExecuteAction.java index 7f5f1fe4f84ea..6ab5fc724c711 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessExecuteAction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessExecuteAction.java @@ -462,7 +462,7 @@ static boolean needDocumentAndIndex(ScriptContext scriptContext) { public static class Response extends ActionResponse implements ToXContentObject { - private Object result; + private final Object result; Response(Object result) { this.result = result; diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalResponse.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalResponse.java index 061d8292b3e5f..fe6da7fe1ce68 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalResponse.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalResponse.java @@ -30,11 +30,11 @@ public class RankEvalResponse extends ActionResponse implements ToXContentObject { /** The overall evaluation result. */ - private double metricScore; + private final double metricScore; /** details about individual ranking evaluation queries, keyed by their id */ - private Map details; + private final Map details; /** exceptions for specific ranking evaluation queries, keyed by their id */ - private Map failures; + private final Map failures; public RankEvalResponse(double metricScore, Map partialResults, Map failures) { this.metricScore = metricScore; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainResponse.java index 39baf25f5dada..92413fc104be4 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainResponse.java @@ -22,7 +22,7 @@ */ public class ClusterAllocationExplainResponse extends ActionResponse implements ChunkedToXContentObject { - private ClusterAllocationExplanation cae; + private final ClusterAllocationExplanation cae; public ClusterAllocationExplainResponse(StreamInput in) throws IOException { super(in); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/RemoteInfoResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/RemoteInfoResponse.java index 89e1ede46d9a8..7a19e7b277a08 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/RemoteInfoResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/RemoteInfoResponse.java @@ -9,7 +9,6 @@ package org.elasticsearch.action.admin.cluster.remote; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.transport.RemoteConnectionInfo; import org.elasticsearch.xcontent.ToXContentObject; @@ -23,11 +22,6 @@ public final class RemoteInfoResponse extends ActionResponse implements ToXConte private final List infos; - RemoteInfoResponse(StreamInput in) throws IOException { - super(in); - infos = in.readCollectionAsImmutableList(RemoteConnectionInfo::new); - } - public RemoteInfoResponse(Collection infos) { this.infos = List.copyOf(infos); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/state/ClusterStateResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/state/ClusterStateResponse.java index f4cc4e2f8f5d2..fca9121a3a858 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/state/ClusterStateResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/state/ClusterStateResponse.java @@ -23,9 +23,9 @@ */ public class ClusterStateResponse extends ActionResponse { - private ClusterName clusterName; - private ClusterState clusterState; - private boolean waitForTimedOut = false; + private final ClusterName clusterName; + private final ClusterState clusterState; + private final boolean waitForTimedOut; public ClusterStateResponse(StreamInput in) throws IOException { super(in); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponse.java index 24604a3977096..b33a718daee8a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponse.java @@ -26,8 +26,8 @@ public class GetStoredScriptResponse extends ActionResponse implements ToXConten public static final ParseField FOUND_PARSE_FIELD = new ParseField("found"); public static final ParseField SCRIPT = new ParseField("script"); - private String id; - private StoredScriptSource source; + private final String id; + private final StoredScriptSource source; public GetStoredScriptResponse(StreamInput in) throws IOException { super(in); diff --git a/server/src/main/java/org/elasticsearch/action/get/MultiGetItemResponse.java b/server/src/main/java/org/elasticsearch/action/get/MultiGetItemResponse.java index 9e755cfd7f081..b10471e75fcf8 100644 --- a/server/src/main/java/org/elasticsearch/action/get/MultiGetItemResponse.java +++ b/server/src/main/java/org/elasticsearch/action/get/MultiGetItemResponse.java @@ -8,7 +8,6 @@ package org.elasticsearch.action.get; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -27,16 +26,6 @@ public MultiGetItemResponse(GetResponse response, MultiGetResponse.Failure failu this.failure = failure; } - MultiGetItemResponse(StreamInput in) throws IOException { - if (in.readBoolean()) { - failure = new MultiGetResponse.Failure(in); - response = null; - } else { - response = new GetResponse(in); - failure = null; - } - } - /** * The index name of the document. */ diff --git a/server/src/main/java/org/elasticsearch/action/get/MultiGetResponse.java b/server/src/main/java/org/elasticsearch/action/get/MultiGetResponse.java index 4f548e227dcfb..3306ac874243c 100644 --- a/server/src/main/java/org/elasticsearch/action/get/MultiGetResponse.java +++ b/server/src/main/java/org/elasticsearch/action/get/MultiGetResponse.java @@ -109,11 +109,6 @@ public MultiGetResponse(MultiGetItemResponse[] responses) { this.responses = responses; } - MultiGetResponse(StreamInput in) throws IOException { - super(in); - responses = in.readArray(MultiGetItemResponse::new, MultiGetItemResponse[]::new); - } - public MultiGetItemResponse[] getResponses() { return this.responses; } diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineResponse.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineResponse.java index 396a5b63b3cd5..e7ad434e3ad7c 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineResponse.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineResponse.java @@ -20,9 +20,9 @@ import java.util.List; public class SimulatePipelineResponse extends ActionResponse implements ToXContentObject { - private String pipelineId; + private final String pipelineId; private boolean verbose; - private List results; + private final List results; public SimulatePipelineResponse(StreamInput in) throws IOException { super(in); diff --git a/server/src/main/java/org/elasticsearch/action/search/ClearScrollResponse.java b/server/src/main/java/org/elasticsearch/action/search/ClearScrollResponse.java index 3d00d18565756..de0d7b98ef851 100644 --- a/server/src/main/java/org/elasticsearch/action/search/ClearScrollResponse.java +++ b/server/src/main/java/org/elasticsearch/action/search/ClearScrollResponse.java @@ -9,7 +9,6 @@ package org.elasticsearch.action.search; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.ParseField; @@ -34,12 +33,6 @@ public ClearScrollResponse(boolean succeeded, int numFreed) { this.numFreed = numFreed; } - public ClearScrollResponse(StreamInput in) throws IOException { - super(in); - succeeded = in.readBoolean(); - numFreed = in.readVInt(); - } - /** * @return Whether the attempt to clear a scroll was successful. */ diff --git a/server/src/main/java/org/elasticsearch/action/search/ClosePointInTimeResponse.java b/server/src/main/java/org/elasticsearch/action/search/ClosePointInTimeResponse.java index d8cbfa53ee8ca..09cd96289416f 100644 --- a/server/src/main/java/org/elasticsearch/action/search/ClosePointInTimeResponse.java +++ b/server/src/main/java/org/elasticsearch/action/search/ClosePointInTimeResponse.java @@ -8,11 +8,8 @@ package org.elasticsearch.action.search; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.rest.RestStatus; -import java.io.IOException; - import static org.elasticsearch.rest.RestStatus.NOT_FOUND; import static org.elasticsearch.rest.RestStatus.OK; @@ -21,10 +18,6 @@ public ClosePointInTimeResponse(boolean succeeded, int numFreed) { super(succeeded, numFreed); } - public ClosePointInTimeResponse(StreamInput in) throws IOException { - super(in); - } - @Override public RestStatus status() { if (isSucceeded() || getNumFreed() > 0) { diff --git a/server/src/main/java/org/elasticsearch/action/search/OpenPointInTimeResponse.java b/server/src/main/java/org/elasticsearch/action/search/OpenPointInTimeResponse.java index 82cb158a0c59a..fd565ad4878bf 100644 --- a/server/src/main/java/org/elasticsearch/action/search/OpenPointInTimeResponse.java +++ b/server/src/main/java/org/elasticsearch/action/search/OpenPointInTimeResponse.java @@ -9,7 +9,6 @@ package org.elasticsearch.action.search; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -24,11 +23,6 @@ public OpenPointInTimeResponse(String pointInTimeId) { this.pointInTimeId = Objects.requireNonNull(pointInTimeId, "Point in time parameter must be not null"); } - public OpenPointInTimeResponse(StreamInput in) throws IOException { - super(in); - pointInTimeId = in.readString(); - } - @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(pointInTimeId); diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java b/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java index d7ff0359bfd27..ac5b004886319 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java @@ -1183,8 +1183,8 @@ public PendingReplicationActions getPendingReplicationActions() { } public static class ReplicaResponse extends ActionResponse implements ReplicationOperation.ReplicaResponse { - private long localCheckpoint; - private long globalCheckpoint; + private final long localCheckpoint; + private final long globalCheckpoint; ReplicaResponse(StreamInput in) throws IOException { super(in); diff --git a/server/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksResponse.java b/server/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksResponse.java index 3e8290ad4fb4a..0aa565c87b4cd 100644 --- a/server/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksResponse.java +++ b/server/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksResponse.java @@ -35,8 +35,8 @@ public class BaseTasksResponse extends ActionResponse { public static final String TASK_FAILURES = "task_failures"; public static final String NODE_FAILURES = "node_failures"; - private List taskFailures; - private List nodeFailures; + private final List taskFailures; + private final List nodeFailures; public BaseTasksResponse(List taskFailures, List nodeFailures) { this.taskFailures = taskFailures == null ? Collections.emptyList() : List.copyOf(taskFailures); diff --git a/server/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsResponse.java b/server/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsResponse.java index b631d30cfd8bb..5789c4910db09 100644 --- a/server/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsResponse.java @@ -91,11 +91,6 @@ public MultiTermVectorsResponse(MultiTermVectorsItemResponse[] responses) { this.responses = responses; } - public MultiTermVectorsResponse(StreamInput in) throws IOException { - super(in); - responses = in.readArray(MultiTermVectorsItemResponse::new, MultiTermVectorsItemResponse[]::new); - } - public MultiTermVectorsItemResponse[] getResponses() { return this.responses; } diff --git a/server/src/main/java/org/elasticsearch/index/reindex/BulkByScrollResponse.java b/server/src/main/java/org/elasticsearch/index/reindex/BulkByScrollResponse.java index 5bdeac75989a8..87173aceaa059 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/BulkByScrollResponse.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/BulkByScrollResponse.java @@ -30,10 +30,10 @@ * Response used for actions that index many documents using a scroll request. */ public class BulkByScrollResponse extends ActionResponse implements ToXContentFragment { - private TimeValue took; - private BulkByScrollTask.Status status; - private List bulkFailures; - private List searchFailures; + private final TimeValue took; + private final BulkByScrollTask.Status status; + private final List bulkFailures; + private final List searchFailures; private boolean timedOut; static final String TOOK_FIELD = "took"; diff --git a/server/src/main/java/org/elasticsearch/persistent/PersistentTaskResponse.java b/server/src/main/java/org/elasticsearch/persistent/PersistentTaskResponse.java index 3560f3f28076d..fca6a9b2dde7d 100644 --- a/server/src/main/java/org/elasticsearch/persistent/PersistentTaskResponse.java +++ b/server/src/main/java/org/elasticsearch/persistent/PersistentTaskResponse.java @@ -19,7 +19,7 @@ * Response upon a successful start or an persistent task */ public class PersistentTaskResponse extends ActionResponse { - private PersistentTask task; + private final PersistentTask task; public PersistentTaskResponse(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetBasicStatusResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetBasicStatusResponse.java index 58e7be10cfa81..b0a162eb3ec14 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetBasicStatusResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetBasicStatusResponse.java @@ -17,7 +17,7 @@ public class GetBasicStatusResponse extends ActionResponse implements ToXContentObject { - private boolean eligibleToStartBasic; + private final boolean eligibleToStartBasic; GetBasicStatusResponse(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusResponse.java index 9126d22f33250..5bceab8edbe92 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusResponse.java @@ -17,7 +17,7 @@ public class GetTrialStatusResponse extends ActionResponse implements ToXContentObject { - private boolean eligibleToStartTrial; + private final boolean eligibleToStartTrial; GetTrialStatusResponse(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartTrialResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartTrialResponse.java index 853c3d39e4121..87b49f3ef9e82 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartTrialResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartTrialResponse.java @@ -48,9 +48,9 @@ RestStatus getRestStatus() { } - private Status status; - private Map acknowledgeMessages; - private String acknowledgeMessage; + private final Status status; + private final Map acknowledgeMessages; + private final String acknowledgeMessage; PostStartTrialResponse(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java index 34126064997d6..5ba0e584d63bb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java @@ -38,11 +38,11 @@ public class XPackInfoResponse extends ActionResponse implements ToXContentObjec // TODO move this constant to License.java once we move License.java to the protocol jar @Nullable - private BuildInfo buildInfo; + private final BuildInfo buildInfo; @Nullable - private LicenseInfo licenseInfo; + private final LicenseInfo licenseInfo; @Nullable - private FeatureSetsInfo featureSetsInfo; + private final FeatureSetsInfo featureSetsInfo; public XPackInfoResponse(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/GraphExploreResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/GraphExploreResponse.java index 5bf5ecb445c57..ce872b1e406ac 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/GraphExploreResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/GraphExploreResponse.java @@ -9,7 +9,6 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.search.ShardSearchFailure; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.TimeValue; import org.elasticsearch.protocol.xpack.graph.Connection.ConnectionId; @@ -23,8 +22,6 @@ import java.util.HashMap; import java.util.Map; -import static org.elasticsearch.action.search.ShardSearchFailure.readShardSearchFailure; - /** * Graph explore response holds a graph of {@link Vertex} and {@link Connection} objects * (nodes and edges in common graph parlance). @@ -43,40 +40,6 @@ public class GraphExploreResponse extends ActionResponse implements ToXContentOb public GraphExploreResponse() {} - public GraphExploreResponse(StreamInput in) throws IOException { - super(in); - tookInMillis = in.readVLong(); - timedOut = in.readBoolean(); - - int size = in.readVInt(); - if (size == 0) { - shardFailures = ShardSearchFailure.EMPTY_ARRAY; - } else { - shardFailures = new ShardSearchFailure[size]; - for (int i = 0; i < shardFailures.length; i++) { - shardFailures[i] = readShardSearchFailure(in); - } - } - // read vertices - size = in.readVInt(); - vertices = new HashMap<>(); - for (int i = 0; i < size; i++) { - Vertex n = Vertex.readFrom(in); - vertices.put(n.getId(), n); - } - - size = in.readVInt(); - - connections = new HashMap<>(); - for (int i = 0; i < size; i++) { - Connection e = new Connection(in, vertices); - connections.put(e.getId(), e); - } - - returnDetailedInfo = in.readBoolean(); - - } - public GraphExploreResponse( long tookInMillis, boolean timedOut, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponse.java index c6d673aec7d2a..ffeb0867723e7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.protocol.xpack.watcher; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -29,13 +28,6 @@ public DeleteWatchResponse(String id, long version, boolean found) { this.found = found; } - public DeleteWatchResponse(StreamInput in) throws IOException { - super(in); - id = in.readString(); - version = in.readVLong(); - found = in.readBoolean(); - } - public String getId() { return id; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/PutWatchResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/PutWatchResponse.java index 5c1f53bef3ef0..3bc3ebbd6f6f7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/PutWatchResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/PutWatchResponse.java @@ -7,9 +7,7 @@ package org.elasticsearch.protocol.xpack.watcher; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -18,22 +16,11 @@ public class PutWatchResponse extends ActionResponse implements ToXContentObject { - private String id; - private long version; - private long seqNo = SequenceNumbers.UNASSIGNED_SEQ_NO; - private long primaryTerm = SequenceNumbers.UNASSIGNED_PRIMARY_TERM; - private boolean created; - - public PutWatchResponse() {} - - public PutWatchResponse(StreamInput in) throws IOException { - super(in); - id = in.readString(); - version = in.readVLong(); - seqNo = in.readZLong(); - primaryTerm = in.readVLong(); - created = in.readBoolean(); - } + private final String id; + private final long version; + private final long seqNo; + private final long primaryTerm; + private final boolean created; public PutWatchResponse(String id, long version, long seqNo, long primaryTerm, boolean created) { this.id = id; @@ -43,26 +30,6 @@ public PutWatchResponse(String id, long version, long seqNo, long primaryTerm, b this.created = created; } - private void setId(String id) { - this.id = id; - } - - private void setVersion(long version) { - this.version = version; - } - - private void setSeqNo(long seqNo) { - this.seqNo = seqNo; - } - - private void setPrimaryTerm(long primaryTerm) { - this.primaryTerm = primaryTerm; - } - - private void setCreated(boolean created) { - this.created = created; - } - public String getId() { return id; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoFeatureResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoFeatureResponse.java index 5d066a4dc6c50..66037054da685 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoFeatureResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoFeatureResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.action; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.protocol.xpack.XPackInfoResponse.FeatureSetsInfo.FeatureSet; @@ -15,12 +14,7 @@ public class XPackInfoFeatureResponse extends ActionResponse { - private FeatureSet info; - - public XPackInfoFeatureResponse(StreamInput in) throws IOException { - super(in); - info = new FeatureSet(in); - } + private final FeatureSet info; public XPackInfoFeatureResponse(FeatureSet info) { this.info = info; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageFeatureResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageFeatureResponse.java index 33dec1371dc86..71bb9993f3a29 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageFeatureResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageFeatureResponse.java @@ -15,7 +15,7 @@ public class XPackUsageFeatureResponse extends ActionResponse { - private XPackFeatureSet.Usage usage; + private final XPackFeatureSet.Usage usage; public XPackUsageFeatureResponse(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ExplainLifecycleResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ExplainLifecycleResponse.java index 97c7d6d8cb60d..755851b2ec88c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ExplainLifecycleResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ExplainLifecycleResponse.java @@ -30,7 +30,7 @@ public class ExplainLifecycleResponse extends ActionResponse implements ToXConte public static final ParseField INDICES_FIELD = new ParseField("indices"); - private Map indexResponses; + private final Map indexResponses; public ExplainLifecycleResponse(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/GetLifecycleAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/GetLifecycleAction.java index 97d1fbf524963..d40220db794b7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/GetLifecycleAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/GetLifecycleAction.java @@ -40,7 +40,7 @@ protected GetLifecycleAction() { public static class Response extends ActionResponse implements ChunkedToXContentObject { - private List policies; + private final List policies; public Response(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/GetStatusAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/GetStatusAction.java index f70510de382a9..c3022adb2f60a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/GetStatusAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/GetStatusAction.java @@ -29,7 +29,7 @@ protected GetStatusAction() { public static class Response extends ActionResponse implements ToXContentObject { - private OperationMode mode; + private final OperationMode mode; public Response(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/RemoveIndexLifecyclePolicyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/RemoveIndexLifecyclePolicyAction.java index 68537fba3bfd1..e1171d9ab7dd3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/RemoveIndexLifecyclePolicyAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/RemoveIndexLifecyclePolicyAction.java @@ -47,7 +47,7 @@ public static class Response extends ActionResponse implements ToXContentObject PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), HAS_FAILURES_FIELD); } - private List failedIndexes; + private final List failedIndexes; public Response(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/EvaluateDataFrameAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/EvaluateDataFrameAction.java index 298b6e71fc855..9a350c3c68adb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/EvaluateDataFrameAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/EvaluateDataFrameAction.java @@ -188,8 +188,8 @@ public Task createTask(long id, String type, String action, TaskId parentTaskId, public static class Response extends ActionResponse implements ToXContentObject { - private String evaluationName; - private List metrics; + private final String evaluationName; + private final List metrics; public Response(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlInfoAction.java index 1f1eb69ce606c..94bb7047bfe23 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlInfoAction.java @@ -47,7 +47,7 @@ public ActionRequestValidationException validate() { public static class Response extends ActionResponse implements ToXContentObject { - private Map info; + private final Map info; public Response(Map info) { this.info = info; @@ -57,11 +57,6 @@ public Response() { this.info = Collections.emptyMap(); } - public Response(StreamInput in) throws IOException { - super(in); - info = in.readGenericMap(); - } - public Map getInfo() { return info; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PostCalendarEventsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PostCalendarEventsAction.java index 6ca201fd8034a..d58b699fb6555 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PostCalendarEventsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PostCalendarEventsAction.java @@ -120,12 +120,7 @@ public boolean equals(Object obj) { public static class Response extends ActionResponse implements ToXContentObject { - private List scheduledEvents; - - public Response(StreamInput in) throws IOException { - super(in); - in.readCollectionAsList(ScheduledEvent::new); - } + private final List scheduledEvents; public Response(List scheduledEvents) { this.scheduledEvents = scheduledEvents; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PreviewDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PreviewDatafeedAction.java index d03a6d5c0c7c5..7988f885a27da 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PreviewDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PreviewDatafeedAction.java @@ -298,11 +298,6 @@ public static class Response extends ActionResponse implements ToXContentObject private final BytesReference preview; - public Response(StreamInput in) throws IOException { - super(in); - preview = in.readBytesReference(); - } - public Response(BytesReference preview) { this.preview = preview; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutCalendarAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutCalendarAction.java index 755e610c4000c..eab89bb86022a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutCalendarAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutCalendarAction.java @@ -117,12 +117,7 @@ public boolean equals(Object obj) { public static class Response extends ActionResponse implements ToXContentObject { - private Calendar calendar; - - public Response(StreamInput in) throws IOException { - super(in); - calendar = new Calendar(in); - } + private final Calendar calendar; public Response(Calendar calendar) { this.calendar = calendar; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDataFrameAnalyticsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDataFrameAnalyticsAction.java index c9da8aa4dd579..fe26cdb0377fd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDataFrameAnalyticsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDataFrameAnalyticsAction.java @@ -156,14 +156,12 @@ public int hashCode() { public static class Response extends ActionResponse implements ToXContentObject { - private DataFrameAnalyticsConfig config; + private final DataFrameAnalyticsConfig config; public Response(DataFrameAnalyticsConfig config) { this.config = config; } - Response() {} - public Response(StreamInput in) throws IOException { super(in); config = new DataFrameAnalyticsConfig(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedAction.java index 67b1b2f9087e3..c234b24be5a9c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedAction.java @@ -83,7 +83,7 @@ public int hashCode() { public static class Response extends ActionResponse implements ToXContentObject { - private DatafeedConfig datafeed; + private final DatafeedConfig datafeed; public Response(DatafeedConfig datafeed) { this.datafeed = datafeed; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutFilterAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutFilterAction.java index 50216b72f20d6..5b8dae53840b0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutFilterAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutFilterAction.java @@ -99,9 +99,7 @@ public boolean equals(Object obj) { public static class Response extends ActionResponse implements ToXContentObject { - private MlFilter filter; - - Response() {} + private final MlFilter filter; Response(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/DelegatePkiAuthenticationResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/DelegatePkiAuthenticationResponse.java index dd356b8ab41ff..a5c8e10496b3b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/DelegatePkiAuthenticationResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/DelegatePkiAuthenticationResponse.java @@ -30,12 +30,10 @@ public final class DelegatePkiAuthenticationResponse extends ActionResponse impl private static final ParseField EXPIRES_IN_FIELD = new ParseField("expires_in"); private static final ParseField AUTHENTICATION = new ParseField("authentication"); - private String accessToken; - private TimeValue expiresIn; + private final String accessToken; + private final TimeValue expiresIn; private Authentication authentication; - DelegatePkiAuthenticationResponse() {} - public DelegatePkiAuthenticationResponse(String accessToken, TimeValue expiresIn, Authentication authentication) { this.accessToken = Objects.requireNonNull(accessToken); // always store expiration in seconds because this is how we "serialize" to JSON and we need to parse back diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyResponse.java index a1ed1c6092df8..334b395a05b31 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyResponse.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.core.security.action.apikey; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.xcontent.ToXContentObject; @@ -24,11 +23,6 @@ public UpdateApiKeyResponse(boolean updated) { this.updated = updated; } - public UpdateApiKeyResponse(StreamInput in) throws IOException { - super(in); - this.updated = in.readBoolean(); - } - public boolean isUpdated() { return updated; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateResponse.java index 936a2892a6dbe..92b27826e8759 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateResponse.java @@ -8,7 +8,6 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xpack.core.security.authc.Authentication; @@ -16,11 +15,11 @@ import java.io.IOException; public class OpenIdConnectAuthenticateResponse extends ActionResponse { - private String principal; - private String accessTokenString; - private String refreshTokenString; - private TimeValue expiresIn; - private Authentication authentication; + private final String principal; + private final String accessTokenString; + private final String refreshTokenString; + private final TimeValue expiresIn; + private final Authentication authentication; public OpenIdConnectAuthenticateResponse( Authentication authentication, @@ -36,17 +35,6 @@ public OpenIdConnectAuthenticateResponse( this.authentication = authentication; } - public OpenIdConnectAuthenticateResponse(StreamInput in) throws IOException { - super(in); - principal = in.readString(); - accessTokenString = in.readString(); - refreshTokenString = in.readString(); - expiresIn = in.readTimeValue(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_11_0)) { - authentication = new Authentication(in); - } - } - public String getPrincipal() { return principal; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectLogoutResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectLogoutResponse.java index 3dbfccf418c10..681ba15896778 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectLogoutResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectLogoutResponse.java @@ -7,19 +7,13 @@ package org.elasticsearch.xpack.core.security.action.oidc; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; public final class OpenIdConnectLogoutResponse extends ActionResponse { - private String endSessionUrl; - - public OpenIdConnectLogoutResponse(StreamInput in) throws IOException { - super(in); - this.endSessionUrl = in.readString(); - } + private final String endSessionUrl; public OpenIdConnectLogoutResponse(String endSessionUrl) { this.endSessionUrl = endSessionUrl; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationResponse.java index 88d8de80fe7a1..5dcfadd3dd01c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationResponse.java @@ -8,7 +8,6 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -21,19 +20,19 @@ */ public class OpenIdConnectPrepareAuthenticationResponse extends ActionResponse implements ToXContentObject { - private String authenticationRequestUrl; + private final String authenticationRequestUrl; /* * The oAuth2 state parameter used for CSRF protection. */ - private String state; + private final String state; /* * String value used to associate a Client session with an ID Token, and to mitigate replay attacks. */ - private String nonce; + private final String nonce; /* * String value: name of the realm used to perform authentication. */ - private String realmName; + private final String realmName; public OpenIdConnectPrepareAuthenticationResponse(String authorizationEndpointUrl, String state, String nonce, String realmName) { this.authenticationRequestUrl = authorizationEndpointUrl; @@ -42,16 +41,6 @@ public OpenIdConnectPrepareAuthenticationResponse(String authorizationEndpointUr this.realmName = realmName; } - public OpenIdConnectPrepareAuthenticationResponse(StreamInput in) throws IOException { - super(in); - authenticationRequestUrl = in.readString(); - state = in.readString(); - nonce = in.readString(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_11_0)) { - realmName = in.readString(); - } - } - public String getAuthenticationRequestUrl() { return authenticationRequestUrl; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/GetPrivilegesResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/GetPrivilegesResponse.java index 5f8755ef0c0da..7db9b26cfaa60 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/GetPrivilegesResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/GetPrivilegesResponse.java @@ -20,7 +20,7 @@ */ public final class GetPrivilegesResponse extends ActionResponse { - private ApplicationPrivilegeDescriptor[] privileges; + private final ApplicationPrivilegeDescriptor[] privileges; public GetPrivilegesResponse(ApplicationPrivilegeDescriptor... privileges) { this.privileges = Objects.requireNonNull(privileges, "Application privileges cannot be null"); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/ActivateProfileResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/ActivateProfileResponse.java index 8b8f905e59cbf..9d031d7c9065b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/ActivateProfileResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/ActivateProfileResponse.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.core.security.action.profile; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -23,11 +22,6 @@ public ActivateProfileResponse(Profile profile) { this.profile = profile; } - public ActivateProfileResponse(StreamInput in) throws IOException { - super(in); - this.profile = new Profile(in); - } - public Profile getProfile() { return profile; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/GetProfilesResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/GetProfilesResponse.java index 2dbf6743a5fde..77a411ad477f6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/GetProfilesResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/GetProfilesResponse.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.core.security.action.profile; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -29,12 +28,6 @@ public GetProfilesResponse(List profiles, Map errors this.errors = Objects.requireNonNull(errors); } - public GetProfilesResponse(StreamInput in) throws IOException { - super(in); - this.profiles = in.readCollectionAsImmutableList(Profile::new); - this.errors = in.readMap(StreamInput::readException); - } - public List getProfiles() { return profiles; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SuggestProfilesResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SuggestProfilesResponse.java index 0574bb4b100a5..6eaeb4f02ac7f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SuggestProfilesResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SuggestProfilesResponse.java @@ -30,13 +30,6 @@ public SuggestProfilesResponse(ProfileHit[] profileHits, long tookInMillis, Tota this.totalHits = totalHits; } - public SuggestProfilesResponse(StreamInput in) throws IOException { - super(in); - this.profileHits = in.readArray(ProfileHit::new, ProfileHit[]::new); - this.tookInMillis = in.readVLong(); - this.totalHits = Lucene.readTotalHits(in); - } - public ProfileHit[] getProfileHits() { return profileHits; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleResponse.java index 42b672cca6ad8..807c55643b425 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.security.action.role; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -20,12 +19,7 @@ */ public class PutRoleResponse extends ActionResponse implements ToXContentObject { - private boolean created; - - public PutRoleResponse(StreamInput in) throws IOException { - super(in); - this.created = in.readBoolean(); - } + private final boolean created; public PutRoleResponse(boolean created) { this.created = created; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/DeleteRoleMappingResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/DeleteRoleMappingResponse.java index 12393213fa740..87e7f3785015f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/DeleteRoleMappingResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/DeleteRoleMappingResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.security.action.rolemapping; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -22,11 +21,6 @@ public class DeleteRoleMappingResponse extends ActionResponse implements ToXCont private boolean found = false; - public DeleteRoleMappingResponse(StreamInput in) throws IOException { - super(in); - found = in.readBoolean(); - } - public DeleteRoleMappingResponse(boolean found) { this.found = found; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/GetRoleMappingsResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/GetRoleMappingsResponse.java index 51689af1d7bc6..13a751829797f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/GetRoleMappingsResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/GetRoleMappingsResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.security.action.rolemapping; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; @@ -20,16 +19,7 @@ */ public class GetRoleMappingsResponse extends ActionResponse { - private ExpressionRoleMapping[] mappings; - - public GetRoleMappingsResponse(StreamInput in) throws IOException { - super(in); - int size = in.readVInt(); - mappings = new ExpressionRoleMapping[size]; - for (int i = 0; i < size; i++) { - mappings[i] = new ExpressionRoleMapping(in); - } - } + private final ExpressionRoleMapping[] mappings; public GetRoleMappingsResponse(ExpressionRoleMapping... mappings) { this.mappings = mappings; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingResponse.java index d04b0bbe1195f..5a80736dab66d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.security.action.rolemapping; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -21,12 +20,7 @@ */ public class PutRoleMappingResponse extends ActionResponse implements ToXContentObject { - private boolean created; - - public PutRoleMappingResponse(StreamInput in) throws IOException { - super(in); - this.created = in.readBoolean(); - } + private final boolean created; public PutRoleMappingResponse(boolean created) { this.created = created; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlAuthenticateResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlAuthenticateResponse.java index 2cb0a76c2d6bf..71b5e93e60a2c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlAuthenticateResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlAuthenticateResponse.java @@ -8,7 +8,6 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xpack.core.security.authc.Authentication; @@ -21,26 +20,12 @@ */ public final class SamlAuthenticateResponse extends ActionResponse { - private String principal; - private String tokenString; - private String refreshToken; - private String realm; - private TimeValue expiresIn; - private Authentication authentication; - - public SamlAuthenticateResponse(StreamInput in) throws IOException { - super(in); - principal = in.readString(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_0_0)) { - realm = in.readString(); - } - tokenString = in.readString(); - refreshToken = in.readString(); - expiresIn = in.readTimeValue(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_11_0)) { - authentication = new Authentication(in); - } - } + private final String principal; + private final String tokenString; + private final String refreshToken; + private final String realm; + private final TimeValue expiresIn; + private final Authentication authentication; public SamlAuthenticateResponse(Authentication authentication, String tokenString, String refreshToken, TimeValue expiresIn) { this.principal = authentication.getEffectiveSubject().getUser().principal(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlInvalidateSessionResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlInvalidateSessionResponse.java index 097d38c4f886f..42956cb34033d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlInvalidateSessionResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlInvalidateSessionResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.security.action.saml; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; @@ -17,16 +16,9 @@ */ public final class SamlInvalidateSessionResponse extends ActionResponse { - private String realmName; - private int count; - private String redirectUrl; - - public SamlInvalidateSessionResponse(StreamInput in) throws IOException { - super(in); - realmName = in.readString(); - count = in.readInt(); - redirectUrl = in.readString(); - } + private final String realmName; + private final int count; + private final String redirectUrl; public SamlInvalidateSessionResponse(String realmName, int count, String redirectUrl) { this.realmName = realmName; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlLogoutResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlLogoutResponse.java index 0c94e9a372481..8c3e8bd64b9bb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlLogoutResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlLogoutResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.security.action.saml; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; @@ -20,12 +19,6 @@ public final class SamlLogoutResponse extends ActionResponse { private final String requestId; private final String redirectUrl; - public SamlLogoutResponse(StreamInput in) throws IOException { - super(in); - requestId = in.readString(); - redirectUrl = in.readString(); - } - public SamlLogoutResponse(String requestId, String redirectUrl) { this.requestId = requestId; this.redirectUrl = redirectUrl; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlPrepareAuthenticationResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlPrepareAuthenticationResponse.java index 19f50266e5a51..9c7539361837e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlPrepareAuthenticationResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlPrepareAuthenticationResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.security.action.saml; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; @@ -17,14 +16,9 @@ */ public final class SamlPrepareAuthenticationResponse extends ActionResponse { - private String realmName; - private String requestId; - private String redirectUrl; - - public SamlPrepareAuthenticationResponse(StreamInput in) throws IOException { - super(in); - redirectUrl = in.readString(); - } + private final String realmName; + private final String requestId; + private final String redirectUrl; public SamlPrepareAuthenticationResponse(String realmName, String requestId, String redirectUrl) { this.realmName = realmName; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlSpMetadataResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlSpMetadataResponse.java index f0cce0ef5e675..b47c47d4d83d4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlSpMetadataResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlSpMetadataResponse.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.core.security.action.saml; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; @@ -21,12 +20,7 @@ public String getXMLString() { return XMLString; } - private String XMLString; - - public SamlSpMetadataResponse(StreamInput in) throws IOException { - super(in); - XMLString = in.readString(); - } + private final String XMLString; public SamlSpMetadataResponse(String XMLString) { this.XMLString = XMLString; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/service/DeleteServiceAccountTokenResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/service/DeleteServiceAccountTokenResponse.java index 5443deac03bd9..c8b7a1ea04e36 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/service/DeleteServiceAccountTokenResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/service/DeleteServiceAccountTokenResponse.java @@ -18,7 +18,7 @@ public class DeleteServiceAccountTokenResponse extends ActionResponse implements ToXContentObject { - private boolean found; + private final boolean found; public DeleteServiceAccountTokenResponse(boolean found) { this.found = found; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenResponse.java index 73719c7cae489..30522e3389a8a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenResponse.java @@ -25,15 +25,13 @@ */ public final class CreateTokenResponse extends ActionResponse implements ToXContentObject { - private String tokenString; - private TimeValue expiresIn; - private String scope; - private String refreshToken; - private String kerberosAuthenticationResponseToken; + private final String tokenString; + private final TimeValue expiresIn; + private final String scope; + private final String refreshToken; + private final String kerberosAuthenticationResponseToken; private Authentication authentication; - CreateTokenResponse() {} - public CreateTokenResponse(StreamInput in) throws IOException { super(in); tokenString = in.readString(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/DeleteUserResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/DeleteUserResponse.java index 4b07a3db7a038..ec34d54b0d56e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/DeleteUserResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/DeleteUserResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.security.action.user; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -20,12 +19,7 @@ */ public class DeleteUserResponse extends ActionResponse implements ToXContentObject { - private boolean found; - - public DeleteUserResponse(StreamInput in) throws IOException { - super(in); - found = in.readBoolean(); - } + private final boolean found; public DeleteUserResponse(boolean found) { this.found = found; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/GetUsersResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/GetUsersResponse.java index 6395d2a090afa..c7f51b21f4920 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/GetUsersResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/GetUsersResponse.java @@ -8,13 +8,11 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.security.authc.Authentication; -import org.elasticsearch.xpack.core.security.user.InternalUser; import org.elasticsearch.xpack.core.security.user.User; import java.io.IOException; @@ -30,30 +28,6 @@ public class GetUsersResponse extends ActionResponse implements ToXContentObject @Nullable private final Map profileUidLookup; - public GetUsersResponse(StreamInput in) throws IOException { - super(in); - int size = in.readVInt(); - if (size < 0) { - users = null; - } else { - users = new User[size]; - for (int i = 0; i < size; i++) { - final User user = Authentication.AuthenticationSerializationHelper.readUserFrom(in); - assert false == user instanceof InternalUser : "should not get internal user [" + user + "]"; - users[i] = user; - } - } - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_5_0)) { - if (in.readBoolean()) { - profileUidLookup = in.readMap(StreamInput::readString); - } else { - profileUidLookup = null; - } - } else { - profileUidLookup = null; - } - } - public GetUsersResponse(Collection users) { this(users, null); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesResponse.java index e59f588ffd65c..6c83d4b38ff89 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesResponse.java @@ -27,11 +27,11 @@ * Response for a {@link HasPrivilegesRequest} */ public class HasPrivilegesResponse extends ActionResponse implements ToXContentObject { - private String username; - private boolean completeMatch; - private Map cluster; - private Set index; - private Map> application; + private final String username; + private final boolean completeMatch; + private final Map cluster; + private final Set index; + private final Map> application; public HasPrivilegesResponse() { this("", true, Collections.emptyMap(), Collections.emptyList(), Collections.emptyMap()); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/ProfileHasPrivilegesResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/ProfileHasPrivilegesResponse.java index 8e8ff50e5b4ac..9977ad459b8fd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/ProfileHasPrivilegesResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/ProfileHasPrivilegesResponse.java @@ -21,7 +21,7 @@ public class ProfileHasPrivilegesResponse extends ActionResponse implements ToXContentObject { - private Set hasPrivilegeUids; + private final Set hasPrivilegeUids; private final Map errors; public ProfileHasPrivilegesResponse(StreamInput in) throws IOException { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/PutUserResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/PutUserResponse.java index 86a25f8321176..fb6e699cd34c2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/PutUserResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/PutUserResponse.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.core.security.action.user; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -21,12 +20,7 @@ */ public class PutUserResponse extends ActionResponse implements ToXContentObject { - private boolean created; - - public PutUserResponse(StreamInput in) throws IOException { - super(in); - this.created = in.readBoolean(); - } + private final boolean created; public PutUserResponse(boolean created) { this.created = created; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/GetCertificateInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/GetCertificateInfoAction.java index cbb747272eebc..32d9725a909c6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/GetCertificateInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/GetCertificateInfoAction.java @@ -19,7 +19,6 @@ import org.elasticsearch.xpack.core.ssl.cert.CertificateInfo; import java.io.IOException; -import java.util.ArrayList; import java.util.Collection; /** @@ -52,16 +51,7 @@ public ActionRequestValidationException validate() { public static class Response extends ActionResponse implements ToXContentObject { - private Collection certificates; - - public Response(StreamInput in) throws IOException { - super(in); - this.certificates = new ArrayList<>(); - int count = in.readVInt(); - for (int i = 0; i < count; i++) { - certificates.add(new CertificateInfo(in)); - } - } + private final Collection certificates; public Response(Collection certificates) { this.certificates = certificates; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/ack/AckWatchResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/ack/AckWatchResponse.java index 97f4b7d619191..60d20046c4f29 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/ack/AckWatchResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/ack/AckWatchResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.watcher.transport.actions.ack; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.core.watcher.watch.WatchStatus; @@ -20,12 +19,7 @@ */ public class AckWatchResponse extends ActionResponse { - private WatchStatus status; - - public AckWatchResponse(StreamInput in) throws IOException { - super(in); - status = in.readBoolean() ? new WatchStatus(in) : null; - } + private final WatchStatus status; public AckWatchResponse(@Nullable WatchStatus status) { this.status = status; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/activate/ActivateWatchResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/activate/ActivateWatchResponse.java index 8091ba3b5ca26..8b0717c9855ec 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/activate/ActivateWatchResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/activate/ActivateWatchResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.watcher.transport.actions.activate; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.core.watcher.watch.WatchStatus; @@ -20,12 +19,7 @@ */ public class ActivateWatchResponse extends ActionResponse { - private WatchStatus status; - - public ActivateWatchResponse(StreamInput in) throws IOException { - super(in); - status = in.readBoolean() ? new WatchStatus(in) : null; - } + private final WatchStatus status; public ActivateWatchResponse(@Nullable WatchStatus status) { this.status = status; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/execute/ExecuteWatchResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/execute/ExecuteWatchResponse.java index 261a31211e497..cdb4503945904 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/execute/ExecuteWatchResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/execute/ExecuteWatchResponse.java @@ -8,7 +8,6 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -26,12 +25,6 @@ public class ExecuteWatchResponse extends ActionResponse implements ToXContentOb private final String recordId; private final XContentSource recordSource; - public ExecuteWatchResponse(StreamInput in) throws IOException { - super(in); - recordId = in.readString(); - recordSource = XContentSource.readFrom(in); - } - public ExecuteWatchResponse(String recordId, BytesReference recordSource, XContentType contentType) { this.recordId = recordId; this.recordSource = new XContentSource(recordSource, contentType); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/get/GetWatchResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/get/GetWatchResponse.java index d1da1cc490f4b..789925f3832ac 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/get/GetWatchResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/get/GetWatchResponse.java @@ -8,7 +8,6 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.index.seqno.SequenceNumbers; @@ -22,32 +21,13 @@ public class GetWatchResponse extends ActionResponse implements ToXContentObject { - private String id; - private WatchStatus status; - private boolean found; - private XContentSource source; - private long version; - private long seqNo; - private long primaryTerm; - - public GetWatchResponse(StreamInput in) throws IOException { - super(in); - id = in.readString(); - found = in.readBoolean(); - if (found) { - status = new WatchStatus(in); - source = XContentSource.readFrom(in); - version = in.readZLong(); - seqNo = in.readZLong(); - primaryTerm = in.readVLong(); - } else { - status = null; - source = null; - version = Versions.NOT_FOUND; - seqNo = SequenceNumbers.UNASSIGNED_SEQ_NO; - primaryTerm = SequenceNumbers.UNASSIGNED_PRIMARY_TERM; - } - } + private final String id; + private final WatchStatus status; + private final boolean found; + private final XContentSource source; + private final long version; + private final long seqNo; + private final long primaryTerm; /** * ctor for missing watch diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/GetQueryRulesetAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/GetQueryRulesetAction.java index 249cf66e39458..f7e6f166cf53f 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/GetQueryRulesetAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/GetQueryRulesetAction.java @@ -112,7 +112,6 @@ public static Request parse(XContentParser parser, String name) { public static class Response extends ActionResponse implements ToXContentObject { private final QueryRuleset queryRuleset; - private static final ParseField QUERY_RULESET_FIELD = new ParseField("queryRuleset"); public Response(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/GetSecretResponse.java b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/GetSecretResponse.java index 7bba867a74761..f4c26f5bcd094 100644 --- a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/GetSecretResponse.java +++ b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/GetSecretResponse.java @@ -19,7 +19,7 @@ public class GetSecretResponse extends ActionResponse implements ToXContentObject { - private String id; + private final String id; private final String value; public GetSecretResponse(StreamInput in) throws IOException { diff --git a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/SamlInitiateSingleSignOnResponse.java b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/SamlInitiateSingleSignOnResponse.java index a7cd9c606b3c6..c0a5157557f58 100644 --- a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/SamlInitiateSingleSignOnResponse.java +++ b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/SamlInitiateSingleSignOnResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.idp.action; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; import org.elasticsearch.xcontent.XContentBuilder; @@ -16,20 +15,11 @@ public class SamlInitiateSingleSignOnResponse extends ActionResponse { - private String postUrl; - private String samlResponse; - private String entityId; - private String samlStatus; - private String error; - - public SamlInitiateSingleSignOnResponse(StreamInput in) throws IOException { - super(in); - this.entityId = in.readString(); - this.postUrl = in.readString(); - this.samlResponse = in.readString(); - this.samlStatus = in.readString(); - this.error = in.readOptionalString(); - } + private final String postUrl; + private final String samlResponse; + private final String entityId; + private final String samlStatus; + private final String error; public SamlInitiateSingleSignOnResponse( String entityId, diff --git a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/SamlMetadataResponse.java b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/SamlMetadataResponse.java index 8e8a18f862bd7..6f1fb3cc32193 100644 --- a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/SamlMetadataResponse.java +++ b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/SamlMetadataResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.idp.action; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; @@ -17,11 +16,6 @@ public class SamlMetadataResponse extends ActionResponse { private final String xmlString; - public SamlMetadataResponse(StreamInput in) throws IOException { - super(in); - this.xmlString = in.readString(); - } - public SamlMetadataResponse(String xmlString) { this.xmlString = Objects.requireNonNull(xmlString, "Metadata XML string must be provided"); } diff --git a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/SamlValidateAuthnRequestResponse.java b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/SamlValidateAuthnRequestResponse.java index e2b32c7e7023c..b09abb190ef7c 100644 --- a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/SamlValidateAuthnRequestResponse.java +++ b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/SamlValidateAuthnRequestResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.idp.action; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; @@ -21,14 +20,6 @@ public class SamlValidateAuthnRequestResponse extends ActionResponse { private final boolean forceAuthn; private final Map authnState; - public SamlValidateAuthnRequestResponse(StreamInput in) throws IOException { - super(in); - this.spEntityId = in.readString(); - this.assertionConsumerService = in.readString(); - this.forceAuthn = in.readBoolean(); - this.authnState = in.readGenericMap(); - } - public SamlValidateAuthnRequestResponse(String spEntityId, String acs, boolean forceAuthn, Map authnState) { this.spEntityId = Objects.requireNonNull(spEntityId, "spEntityId is required for successful responses"); this.assertionConsumerService = Objects.requireNonNull(acs, "ACS is required for successful responses"); diff --git a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateResponse.java b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateResponse.java index 62ae0d54eb584..b215a724c06f3 100644 --- a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateResponse.java +++ b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateResponse.java @@ -20,7 +20,7 @@ * Response for the sql action for translating SQL queries into ES requests */ public class SqlTranslateResponse extends ActionResponse implements ToXContentObject { - private SearchSourceBuilder source; + private final SearchSourceBuilder source; public SqlTranslateResponse(StreamInput in) throws IOException { super(in); From 436ffe7255b0201f535a0b69a63d097dd1665ec3 Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Mon, 8 Apr 2024 13:50:06 +0300 Subject: [PATCH 166/264] Re-enable unittest with additional tracing (#107202) Related to #105437 --- .../org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java index 2aff5257a6ebf..fc3e46882ef84 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java @@ -191,7 +191,7 @@ private void createIndex(String index, String alias, boolean isTimeSeries) throw createIndexWithSettings(client(), index, alias, settings, mapping); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105437") + @TestLogging(value = "org.elasticsearch.xpack.ilm:TRACE", reason = "https://github.com/elastic/elasticsearch/issues/105437") public void testRollupIndex() throws Exception { createIndex(index, alias, true); index(client(), index, true, null, "@timestamp", "2020-01-01T05:10:00Z", "volume", 11.0, "metricset", randomAlphaOfLength(5)); From cf8fe17766238f69bfc1ad0436b9d71adefdb981 Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Mon, 8 Apr 2024 12:52:59 +0200 Subject: [PATCH 167/264] ES|QL: Make some REST tests deterministic (#107200) Adding an explicit SORT to two tests to make them more deterministic, especially for scenarios where the execution is on multiple nodes. --- .../elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java | 2 +- .../org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java index 752775b20b0e3..e04435b715c99 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java @@ -167,7 +167,7 @@ public void testNonExistentEnrichPolicy_KeepField() throws IOException { public void testMatchField_ImplicitFieldsList() throws IOException { Map result = runEsql( - new RestEsqlTestCase.RequestObjectBuilder().query("from test | enrich countries | keep number") + new RestEsqlTestCase.RequestObjectBuilder().query("from test | enrich countries | keep number | sort number") ); var columns = List.of(Map.of("name", "number", "type", "long")); var values = List.of(List.of(1000), List.of(1000), List.of(5000)); diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java index b67432f491cf3..ab288de4ad27d 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java @@ -322,7 +322,7 @@ public void testNullInAggs() throws IOException { matchesMap().entry("values", List.of(List.of(1))).entry("columns", List.of(Map.of("name", "min(value)", "type", "long"))) ); - builder = new RequestObjectBuilder().query(fromIndex() + " | stats min(value) by group"); + builder = new RequestObjectBuilder().query(fromIndex() + " | stats min(value) by group | sort group, `min(value)`"); result = runEsql(builder); assertMap( result, From 9496fa37469d543d73e558bdcf0d710d63d5833b Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Mon, 8 Apr 2024 07:24:52 -0400 Subject: [PATCH 168/264] Tidy up watcher logging and tests (#107175) --- .../smoketest/DocsClientYamlTestSuiteIT.java | 4 +- .../rest/yaml/ClientYamlTestResponse.java | 5 ++- .../rest/yaml/ESClientYamlSuiteTestCase.java | 3 +- .../EsqlClientYamlAsyncSubmitAndFetchIT.java | 2 +- .../xpack/watcher/WatcherRestTestCase.java | 8 ++-- .../watcher/WatcherYamlSuiteTestCase.java | 38 ++++++++----------- .../watcher/WatcherIndexingListener.java | 4 +- .../watcher/WatcherLifeCycleService.java | 3 +- .../smoketest/WatcherJiraYamlTestSuiteIT.java | 25 ++++++------ .../WatcherPagerDutyYamlTestSuiteIT.java | 25 ++++++------ .../WatcherSlackYamlTestSuiteIT.java | 25 ++++++------ 11 files changed, 63 insertions(+), 79 deletions(-) diff --git a/docs/src/yamlRestTest/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java b/docs/src/yamlRestTest/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java index 1d6df60df0f88..6191f33f1c5dd 100644 --- a/docs/src/yamlRestTest/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java +++ b/docs/src/yamlRestTest/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java @@ -251,7 +251,7 @@ public void reenableWatcher() throws Exception { if (isWatcherTest()) { assertBusy(() -> { ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", emptyMap(), emptyList(), emptyMap()); - String state = (String) response.evaluate("stats.0.watcher_state"); + String state = response.evaluate("stats.0.watcher_state"); switch (state) { case "stopped": @@ -261,7 +261,7 @@ public void reenableWatcher() throws Exception { emptyList(), emptyMap() ); - boolean isAcknowledged = (boolean) startResponse.evaluate("acknowledged"); + boolean isAcknowledged = startResponse.evaluate("acknowledged"); assertThat(isAcknowledged, is(true)); throw new AssertionError("waiting until stopped state reached started state"); case "stopping": diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestResponse.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestResponse.java index b09250e1527f3..8a6dada181c6c 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestResponse.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestResponse.java @@ -149,8 +149,9 @@ public boolean isError() { /** * Parses the response body and extracts a specific value from it (identified by the provided path) */ - public Object evaluate(String path) throws IOException { - return evaluate(path, Stash.EMPTY); + @SuppressWarnings("unchecked") + public T evaluate(String path) throws IOException { + return (T) evaluate(path, Stash.EMPTY); } /** diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java index 5ac83f94f6248..a32679d445629 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java @@ -469,8 +469,7 @@ static String readOsFromNodesInfo(RestClient restClient) throws IOException { ClientYamlTestResponse restTestResponse = new ClientYamlTestResponse(response); SortedSet osPrettyNames = new TreeSet<>(); - @SuppressWarnings("unchecked") - final Map nodes = (Map) restTestResponse.evaluate("nodes"); + final Map nodes = restTestResponse.evaluate("nodes"); for (Entry node : nodes.entrySet()) { @SuppressWarnings("unchecked") diff --git a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncSubmitAndFetchIT.java b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncSubmitAndFetchIT.java index 0f2bf2703f62f..b32a7385d12c5 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncSubmitAndFetchIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncSubmitAndFetchIT.java @@ -71,7 +71,7 @@ public void execute(ClientYamlTestExecutionContext executionContext) throws IOEx original.getApiCallSection().getNodeSelector() ); - String id = (String) startResponse.evaluate("id"); + String id = startResponse.evaluate("id"); boolean finishedEarly = id == null; if (finishedEarly) { /* diff --git a/x-pack/plugin/watcher/qa/common/src/main/java/org/elasticsearch/xpack/watcher/WatcherRestTestCase.java b/x-pack/plugin/watcher/qa/common/src/main/java/org/elasticsearch/xpack/watcher/WatcherRestTestCase.java index 341e92641f641..19f1133e4f14f 100644 --- a/x-pack/plugin/watcher/qa/common/src/main/java/org/elasticsearch/xpack/watcher/WatcherRestTestCase.java +++ b/x-pack/plugin/watcher/qa/common/src/main/java/org/elasticsearch/xpack/watcher/WatcherRestTestCase.java @@ -11,9 +11,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.ObjectPath; -import org.hamcrest.Matchers; import org.junit.After; -import org.junit.Assert; import org.junit.Before; import java.io.IOException; @@ -21,6 +19,8 @@ import java.util.Map; import java.util.concurrent.TimeUnit; +import static org.hamcrest.Matchers.is; + /** * Parent test class for Watcher (not-YAML) based REST tests */ @@ -36,7 +36,7 @@ public final void startWatcher() throws Exception { case "stopped": Response startResponse = ESRestTestCase.adminClient().performRequest(new Request("POST", "/_watcher/_start")); boolean isAcknowledged = ObjectPath.createFromResponse(startResponse).evaluate("acknowledged"); - Assert.assertThat(isAcknowledged, Matchers.is(true)); + assertThat(isAcknowledged, is(true)); throw new AssertionError("waiting until stopped state reached started state"); case "stopping": throw new AssertionError("waiting until stopping state reached stopped state to start again"); @@ -68,7 +68,7 @@ public final void stopWatcher() throws Exception { case "started": Response stopResponse = ESRestTestCase.adminClient().performRequest(new Request("POST", "/_watcher/_stop")); boolean isAcknowledged = ObjectPath.createFromResponse(stopResponse).evaluate("acknowledged"); - Assert.assertThat(isAcknowledged, Matchers.is(true)); + assertThat(isAcknowledged, is(true)); throw new AssertionError("waiting until started state reached stopped state"); default: throw new AssertionError("unknown state[" + state + "]"); diff --git a/x-pack/plugin/watcher/qa/common/src/main/java/org/elasticsearch/xpack/watcher/WatcherYamlSuiteTestCase.java b/x-pack/plugin/watcher/qa/common/src/main/java/org/elasticsearch/xpack/watcher/WatcherYamlSuiteTestCase.java index c7b1e0d0fcbee..ddcf976c84572 100644 --- a/x-pack/plugin/watcher/qa/common/src/main/java/org/elasticsearch/xpack/watcher/WatcherYamlSuiteTestCase.java +++ b/x-pack/plugin/watcher/qa/common/src/main/java/org/elasticsearch/xpack/watcher/WatcherYamlSuiteTestCase.java @@ -13,16 +13,15 @@ import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ClientYamlTestResponse; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import org.hamcrest.Matchers; import org.junit.After; -import org.junit.Assert; import org.junit.Before; +import java.util.List; +import java.util.Map; import java.util.concurrent.TimeUnit; -import static java.util.Collections.emptyList; -import static java.util.Collections.emptyMap; import static org.elasticsearch.xpack.watcher.WatcherRestTestCase.deleteAllWatcherData; +import static org.hamcrest.Matchers.is; /** * Parent test class for Watcher YAML based REST tests @@ -40,25 +39,25 @@ public static Iterable parameters() throws Exception { @Before public final void startWatcher() throws Exception { ESTestCase.assertBusy(() -> { - ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", emptyMap(), emptyList(), emptyMap()); - String state = (String) response.evaluate("stats.0.watcher_state"); + ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", Map.of(), List.of(), Map.of()); + String state = response.evaluate("stats.0.watcher_state"); switch (state) { case "stopped" -> { ClientYamlTestResponse startResponse = getAdminExecutionContext().callApi( "watcher.start", - emptyMap(), - emptyList(), - emptyMap() + Map.of(), + List.of(), + Map.of() ); - boolean isAcknowledged = (boolean) startResponse.evaluate("acknowledged"); - Assert.assertThat(isAcknowledged, Matchers.is(true)); + boolean isAcknowledged = startResponse.evaluate("acknowledged"); + assertThat(isAcknowledged, is(true)); throw new AssertionError("waiting until stopped state reached started state"); } case "stopping" -> throw new AssertionError("waiting until stopping state reached stopped state to start again"); case "starting" -> throw new AssertionError("waiting until starting state reached started state"); case "started" -> { - int watcherCount = (int) response.evaluate("stats.0.watch_count"); + int watcherCount = response.evaluate("stats.0.watch_count"); if (watcherCount > 0) { logger.info("expected 0 active watches, but got [{}], deleting watcher indices again", watcherCount); deleteAllWatcherData(); @@ -73,8 +72,8 @@ public final void startWatcher() throws Exception { @After public final void stopWatcher() throws Exception { ESTestCase.assertBusy(() -> { - ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", emptyMap(), emptyList(), emptyMap()); - String state = (String) response.evaluate("stats.0.watcher_state"); + ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", Map.of(), List.of(), Map.of()); + String state = response.evaluate("stats.0.watcher_state"); switch (state) { case "stopped": // all good here, we are done @@ -84,14 +83,9 @@ public final void stopWatcher() throws Exception { case "starting": throw new AssertionError("waiting until starting state reached started state to stop"); case "started": - ClientYamlTestResponse stopResponse = getAdminExecutionContext().callApi( - "watcher.stop", - emptyMap(), - emptyList(), - emptyMap() - ); - boolean isAcknowledged = (boolean) stopResponse.evaluate("acknowledged"); - Assert.assertThat(isAcknowledged, Matchers.is(true)); + ClientYamlTestResponse stopResponse = getAdminExecutionContext().callApi("watcher.stop", Map.of(), List.of(), Map.of()); + boolean isAcknowledged = stopResponse.evaluate("acknowledged"); + assertThat(isAcknowledged, is(true)); throw new AssertionError("waiting until started state reached stopped state"); default: throw new AssertionError("unknown state[" + state + "]"); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherIndexingListener.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherIndexingListener.java index 7e16a0353f2cd..e77c7aba6824d 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherIndexingListener.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherIndexingListener.java @@ -141,7 +141,7 @@ public void postIndex(ShardId shardId, Engine.Index operation, Engine.IndexResul logger.debug("adding watch [{}] to trigger service", watch.id()); triggerService.add(watch); } else { - logger.debug("removing watch [{}] to trigger service", watch.id()); + logger.debug("removing watch [{}] from trigger service", watch.id()); triggerService.remove(watch.id()); } } else { @@ -179,7 +179,7 @@ public void postIndex(ShardId shardId, Engine.Index index, Exception ex) { @Override public Engine.Delete preDelete(ShardId shardId, Engine.Delete delete) { if (isWatchDocument(shardId.getIndexName())) { - logger.debug("removing watch [{}] to trigger service via delete", delete.id()); + logger.debug("removing watch [{}] from trigger service via delete", delete.id()); triggerService.remove(delete.id()); } return delete; diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherLifeCycleService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherLifeCycleService.java index f6e34ccb243c8..cd0e066de2eaf 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherLifeCycleService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherLifeCycleService.java @@ -44,7 +44,7 @@ public class WatcherLifeCycleService implements ClusterStateListener { private final AtomicReference state = new AtomicReference<>(WatcherState.STARTED); private final AtomicReference> previousShardRoutings = new AtomicReference<>(Collections.emptyList()); private volatile boolean shutDown = false; // indicates that the node has been shutdown and we should never start watcher after this. - private volatile WatcherService watcherService; + private final WatcherService watcherService; private final EnumSet stopStates = EnumSet.of(WatcherState.STOPPED, WatcherState.STOPPING); WatcherLifeCycleService(ClusterService clusterService, WatcherService watcherService) { @@ -123,7 +123,6 @@ public void clusterChanged(ClusterChangedEvent event) { } else { logger.info("watcher has not been stopped. not currently in a stopping state, current state [{}]", state.get()); } - }); } } diff --git a/x-pack/qa/third-party/jira/src/yamlRestTest/java/org/elasticsearch/smoketest/WatcherJiraYamlTestSuiteIT.java b/x-pack/qa/third-party/jira/src/yamlRestTest/java/org/elasticsearch/smoketest/WatcherJiraYamlTestSuiteIT.java index f8977f8d3cf8d..250920382719a 100644 --- a/x-pack/qa/third-party/jira/src/yamlRestTest/java/org/elasticsearch/smoketest/WatcherJiraYamlTestSuiteIT.java +++ b/x-pack/qa/third-party/jira/src/yamlRestTest/java/org/elasticsearch/smoketest/WatcherJiraYamlTestSuiteIT.java @@ -17,13 +17,10 @@ import org.junit.Before; import java.io.IOException; -import java.util.Arrays; import java.util.List; +import java.util.Map; import java.util.concurrent.TimeUnit; -import static java.util.Collections.emptyList; -import static java.util.Collections.emptyMap; -import static java.util.Collections.singletonMap; import static org.hamcrest.Matchers.is; /** Runs rest tests against external cluster */ @@ -40,23 +37,23 @@ public static Iterable parameters() throws Exception { @Before public void startWatcher() throws Exception { - final List watcherTemplates = Arrays.asList(WatcherIndexTemplateRegistryField.TEMPLATE_NAMES_NO_ILM); + final List watcherTemplates = List.of(WatcherIndexTemplateRegistryField.TEMPLATE_NAMES_NO_ILM); assertBusy(() -> { try { - getAdminExecutionContext().callApi("watcher.start", emptyMap(), emptyList(), emptyMap()); + getAdminExecutionContext().callApi("watcher.start", Map.of(), List.of(), Map.of()); for (String template : watcherTemplates) { ClientYamlTestResponse templateExistsResponse = getAdminExecutionContext().callApi( "indices.exists_template", - singletonMap("name", template), - emptyList(), - emptyMap() + Map.of("name", template), + List.of(), + Map.of() ); assertThat(templateExistsResponse.getStatusCode(), is(200)); } - ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", emptyMap(), emptyList(), emptyMap()); - String state = (String) response.evaluate("stats.0.watcher_state"); + ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", Map.of(), List.of(), Map.of()); + String state = response.evaluate("stats.0.watcher_state"); assertThat(state, is("started")); } catch (IOException e) { throw new AssertionError(e); @@ -68,9 +65,9 @@ public void startWatcher() throws Exception { public void stopWatcher() throws Exception { assertBusy(() -> { try { - getAdminExecutionContext().callApi("watcher.stop", emptyMap(), emptyList(), emptyMap()); - ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", emptyMap(), emptyList(), emptyMap()); - String state = (String) response.evaluate("stats.0.watcher_state"); + getAdminExecutionContext().callApi("watcher.stop", Map.of(), List.of(), Map.of()); + ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", Map.of(), List.of(), Map.of()); + String state = response.evaluate("stats.0.watcher_state"); assertThat(state, is("stopped")); } catch (IOException e) { throw new AssertionError(e); diff --git a/x-pack/qa/third-party/pagerduty/src/yamlRestTest/java/org/elasticsearch/smoketest/WatcherPagerDutyYamlTestSuiteIT.java b/x-pack/qa/third-party/pagerduty/src/yamlRestTest/java/org/elasticsearch/smoketest/WatcherPagerDutyYamlTestSuiteIT.java index 10352b54912e5..a8e522f3836fb 100644 --- a/x-pack/qa/third-party/pagerduty/src/yamlRestTest/java/org/elasticsearch/smoketest/WatcherPagerDutyYamlTestSuiteIT.java +++ b/x-pack/qa/third-party/pagerduty/src/yamlRestTest/java/org/elasticsearch/smoketest/WatcherPagerDutyYamlTestSuiteIT.java @@ -17,13 +17,10 @@ import org.junit.Before; import java.io.IOException; -import java.util.Arrays; import java.util.List; +import java.util.Map; import java.util.concurrent.TimeUnit; -import static java.util.Collections.emptyList; -import static java.util.Collections.emptyMap; -import static java.util.Collections.singletonMap; import static org.hamcrest.Matchers.is; /** Runs rest tests against external cluster */ @@ -40,23 +37,23 @@ public static Iterable parameters() throws Exception { @Before public void startWatcher() throws Exception { - final List watcherTemplates = Arrays.asList(WatcherIndexTemplateRegistryField.TEMPLATE_NAMES_NO_ILM); + final List watcherTemplates = List.of(WatcherIndexTemplateRegistryField.TEMPLATE_NAMES_NO_ILM); assertBusy(() -> { try { - getAdminExecutionContext().callApi("watcher.start", emptyMap(), emptyList(), emptyMap()); + getAdminExecutionContext().callApi("watcher.start", Map.of(), List.of(), Map.of()); for (String template : watcherTemplates) { ClientYamlTestResponse templateExistsResponse = getAdminExecutionContext().callApi( "indices.exists_template", - singletonMap("name", template), - emptyList(), - emptyMap() + Map.of("name", template), + List.of(), + Map.of() ); assertThat(templateExistsResponse.getStatusCode(), is(200)); } - ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", emptyMap(), emptyList(), emptyMap()); - String state = (String) response.evaluate("stats.0.watcher_state"); + ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", Map.of(), List.of(), Map.of()); + String state = response.evaluate("stats.0.watcher_state"); assertThat(state, is("started")); } catch (IOException e) { throw new AssertionError(e); @@ -68,9 +65,9 @@ public void startWatcher() throws Exception { public void stopWatcher() throws Exception { assertBusy(() -> { try { - getAdminExecutionContext().callApi("watcher.stop", emptyMap(), emptyList(), emptyMap()); - ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", emptyMap(), emptyList(), emptyMap()); - String state = (String) response.evaluate("stats.0.watcher_state"); + getAdminExecutionContext().callApi("watcher.stop", Map.of(), List.of(), Map.of()); + ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", Map.of(), List.of(), Map.of()); + String state = response.evaluate("stats.0.watcher_state"); assertThat(state, is("stopped")); } catch (IOException e) { throw new AssertionError(e); diff --git a/x-pack/qa/third-party/slack/src/yamlRestTest/java/org/elasticsearch/smoketest/WatcherSlackYamlTestSuiteIT.java b/x-pack/qa/third-party/slack/src/yamlRestTest/java/org/elasticsearch/smoketest/WatcherSlackYamlTestSuiteIT.java index 5c083cb90cd69..9cb64bab89d34 100644 --- a/x-pack/qa/third-party/slack/src/yamlRestTest/java/org/elasticsearch/smoketest/WatcherSlackYamlTestSuiteIT.java +++ b/x-pack/qa/third-party/slack/src/yamlRestTest/java/org/elasticsearch/smoketest/WatcherSlackYamlTestSuiteIT.java @@ -17,13 +17,10 @@ import org.junit.Before; import java.io.IOException; -import java.util.Arrays; import java.util.List; +import java.util.Map; import java.util.concurrent.TimeUnit; -import static java.util.Collections.emptyList; -import static java.util.Collections.emptyMap; -import static java.util.Collections.singletonMap; import static org.hamcrest.Matchers.is; /** Runs rest tests against external cluster */ @@ -40,23 +37,23 @@ public static Iterable parameters() throws Exception { @Before public void startWatcher() throws Exception { - final List watcherTemplates = Arrays.asList(WatcherIndexTemplateRegistryField.TEMPLATE_NAMES_NO_ILM); + final List watcherTemplates = List.of(WatcherIndexTemplateRegistryField.TEMPLATE_NAMES_NO_ILM); assertBusy(() -> { try { - getAdminExecutionContext().callApi("watcher.start", emptyMap(), emptyList(), emptyMap()); + getAdminExecutionContext().callApi("watcher.start", Map.of(), List.of(), Map.of()); for (String template : watcherTemplates) { ClientYamlTestResponse templateExistsResponse = getAdminExecutionContext().callApi( "indices.exists_template", - singletonMap("name", template), - emptyList(), - emptyMap() + Map.of("name", template), + List.of(), + Map.of() ); assertThat(templateExistsResponse.getStatusCode(), is(200)); } - ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", emptyMap(), emptyList(), emptyMap()); - String state = (String) response.evaluate("stats.0.watcher_state"); + ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", Map.of(), List.of(), Map.of()); + String state = response.evaluate("stats.0.watcher_state"); assertThat(state, is("started")); } catch (IOException e) { throw new AssertionError(e); @@ -68,9 +65,9 @@ public void startWatcher() throws Exception { public void stopWatcher() throws Exception { assertBusy(() -> { try { - getAdminExecutionContext().callApi("watcher.stop", emptyMap(), emptyList(), emptyMap()); - ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", emptyMap(), emptyList(), emptyMap()); - String state = (String) response.evaluate("stats.0.watcher_state"); + getAdminExecutionContext().callApi("watcher.stop", Map.of(), List.of(), Map.of()); + ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", Map.of(), List.of(), Map.of()); + String state = response.evaluate("stats.0.watcher_state"); assertThat(state, is("stopped")); } catch (IOException e) { throw new AssertionError(e); From 53ec28d0c99130ed55693c2164d1d67b703fb563 Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Mon, 8 Apr 2024 15:36:17 +0300 Subject: [PATCH 169/264] [TEST] Accept "failed" as status in total downsample latency metric (#107207) * Update 8.13 release notes with known issue * revert unintended * reword * reword * reword * Add rolling update test for downsampling * limit testing to 8.10+ * add retry logic for querying * remove unused variable * check all search results * minor refactor * spotless * Add full cluster restart test for downsampling * Accept "failed" as status in total latency metric. --- .../xpack/downsample/DownsampleActionSingleNodeTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java index 3c4be50b25a73..2057518307fc0 100644 --- a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java +++ b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java @@ -1186,7 +1186,7 @@ private void assertDownsampleIndex(String sourceIndex, String downsampleIndex, D measurement.value().longValue() >= 0 && measurement.value().longValue() < 1000_000 ); assertEquals(1, measurement.attributes().size()); - assertThat(measurement.attributes().get("status"), Matchers.in(List.of("success", "invalid_configuration"))); + assertThat(measurement.attributes().get("status"), Matchers.in(List.of("success", "invalid_configuration", "failed"))); } }, 10, TimeUnit.SECONDS); } From bdf9c605b5535835f25a8dcf0d79f4f090f8c322 Mon Sep 17 00:00:00 2001 From: Chris Hegarty <62058229+ChrisHegarty@users.noreply.github.com> Date: Mon, 8 Apr 2024 13:53:30 +0100 Subject: [PATCH 170/264] ES|QL fix no-length substring with supplementary (4-byte) character (#107183) This commit fixes a bug in the no-length substring variant with supplementary (4-byte) characters. --- docs/changelog/107183.yaml | 5 +++++ .../src/main/resources/string.csv-spec | 9 ++++++++- .../function/scalar/string/Substring.java | 9 +++------ .../function/scalar/string/SubstringTests.java | 14 ++++++++++++++ 4 files changed, 30 insertions(+), 7 deletions(-) create mode 100644 docs/changelog/107183.yaml diff --git a/docs/changelog/107183.yaml b/docs/changelog/107183.yaml new file mode 100644 index 0000000000000..226d036456858 --- /dev/null +++ b/docs/changelog/107183.yaml @@ -0,0 +1,5 @@ +pr: 107183 +summary: ES|QL fix no-length substring with supplementary (4-byte) character +area: ES|QL +type: bug +issues: [] diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec index f22e1b2de7f6a..aeb87ea5b66f4 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec @@ -172,6 +172,13 @@ emp_no:integer | last_name:keyword | x:keyword | z:keyword 10010 | Piveteau | P | a ; +substring Emoji#[skip:-8.13.99,reason:bug fix in 8.14] +row a = "🐱Meow!🐶Woof!" | eval sub1 = substring(a, 2) | eval sub2 = substring(a, 2, 100); + +a:keyword | sub1:keyword | sub2:keyword +🐱Meow!🐶Woof! | Meow!🐶Woof! | Meow!🐶Woof! +; + ltrim from employees | sort emp_no | limit 10 | eval name = concat(" ", first_name, " ") | eval name = ltrim(name) | eval name = concat("'", name, "'") | keep emp_no, name; @@ -1236,7 +1243,7 @@ emp_no:integer | last_name:keyword | f_s:keyword | f_l:integer ; locateUtf16Emoji#[skip:-8.13.99,reason:new string function added in 8.14] -row a = "🐱Meow!🐶Woof!" | eval f_s = substring(a, 3) | eval f_l = locate(a, f_s); +row a = "🐱Meow!🐶Woof!" | eval f_s = substring(a, 2) | eval f_l = locate(a, f_s); a:keyword | f_s:keyword | f_l:integer 🐱Meow!🐶Woof! | Meow!🐶Woof! | 3 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java index 3bd7d660352c3..a1f2586f4faed 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java @@ -86,12 +86,9 @@ public boolean foldable() { @Evaluator(extraName = "NoLength") static BytesRef process(BytesRef str, int start) { - if (str.length == 0) { - return null; - } - int codePointCount = UnicodeUtil.codePointCount(str); - int indexStart = indexStart(codePointCount, start); - return new BytesRef(str.utf8ToString().substring(indexStart)); + int length = str.length; // we just need a value at least the length of the string + return process(str, start, length); + } @Evaluator diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java index 4736ba2cc74d7..648fffada6dc1 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java @@ -25,6 +25,7 @@ import java.util.List; import java.util.function.Supplier; +import static java.nio.charset.StandardCharsets.UTF_8; import static org.elasticsearch.compute.data.BlockUtils.toJavaObject; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -161,6 +162,19 @@ public void testUnicode() { assert s.length() == 8 && s.codePointCount(0, s.length()) == 7; assertThat(process(s, 3, 1000), equalTo("tiger")); assertThat(process(s, -6, 1000), equalTo("\ud83c\udf09tiger")); + assert "🐱".length() == 2 && "🐶".length() == 2; + assert "🐱".codePointCount(0, 2) == 1 && "🐶".codePointCount(0, 2) == 1; + assert "🐱".getBytes(UTF_8).length == 4 && "🐶".getBytes(UTF_8).length == 4; + + for (Integer len : new Integer[] { null, 100, 100000 }) { + assertThat(process(s, 3, len), equalTo("tiger")); + assertThat(process(s, -6, len), equalTo("\ud83c\udf09tiger")); + + assertThat(process("🐱Meow!🐶Woof!", 0, len), equalTo("🐱Meow!🐶Woof!")); + assertThat(process("🐱Meow!🐶Woof!", 1, len), equalTo("🐱Meow!🐶Woof!")); + assertThat(process("🐱Meow!🐶Woof!", 2, len), equalTo("Meow!🐶Woof!")); + assertThat(process("🐱Meow!🐶Woof!", 3, len), equalTo("eow!🐶Woof!")); + } } public void testNegativeLength() { From 9edd67f911c3c9cdd16eecec88ab6d51fdda7fdf Mon Sep 17 00:00:00 2001 From: Chris Hegarty <62058229+ChrisHegarty@users.noreply.github.com> Date: Mon, 8 Apr 2024 13:56:23 +0100 Subject: [PATCH 171/264] Fix ES|QL locate with supplementary (4-byte) character (#107172) This commit fixes the ES|QL locate with supplementary (4-byte) character. --- .../src/main/resources/string.csv-spec | 2 +- .../function/scalar/string/Locate.java | 6 ++- .../function/scalar/string/LocateTests.java | 53 +++++++++++++++++++ 3 files changed, 59 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec index aeb87ea5b66f4..69638ef459805 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec @@ -1246,7 +1246,7 @@ locateUtf16Emoji#[skip:-8.13.99,reason:new string function added in 8.14] row a = "🐱Meow!🐶Woof!" | eval f_s = substring(a, 2) | eval f_l = locate(a, f_s); a:keyword | f_s:keyword | f_l:integer -🐱Meow!🐶Woof! | Meow!🐶Woof! | 3 +🐱Meow!🐶Woof! | Meow!🐶Woof! | 2 ; locateNestedSubstring#[skip:-8.13.99,reason:new string function added in 8.14] diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Locate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Locate.java index a1157fad6c46f..c8b546718aabf 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Locate.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Locate.java @@ -96,7 +96,11 @@ static int process(BytesRef str, BytesRef substr, int start) { int codePointCount = UnicodeUtil.codePointCount(str); int indexStart = indexStart(codePointCount, start); String utf8ToString = str.utf8ToString(); - return 1 + utf8ToString.indexOf(substr.utf8ToString(), utf8ToString.offsetByCodePoints(0, indexStart)); + int idx = utf8ToString.indexOf(substr.utf8ToString(), utf8ToString.offsetByCodePoints(0, indexStart)); + if (idx == -1) { + return 0; + } + return 1 + utf8ToString.codePointCount(0, idx); } @Evaluator(extraName = "NoStart") diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java index c1d3df53ece60..b95f05039630a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java @@ -25,6 +25,7 @@ import java.util.List; import java.util.function.Supplier; +import static java.nio.charset.StandardCharsets.UTF_8; import static org.elasticsearch.compute.data.BlockUtils.toJavaObject; import static org.hamcrest.Matchers.equalTo; @@ -131,6 +132,58 @@ public void testExactString() { assertThat(process("界世", "界世", 0), equalTo(1)); } + public void testSupplementaryCharacter() { + // some assertions about the supplementary (4-byte) character we'll use for testing + assert "𠜎".length() == 2; + assert "𠜎".codePointCount(0, 2) == 1; + assert "𠜎".getBytes(UTF_8).length == 4; + + assertThat(process("a ti𠜎er", "𠜎er", 0), equalTo(5)); + assertThat(process("a ti𠜎er", "i𠜎e", 0), equalTo(4)); + assertThat(process("a ti𠜎er", "ti𠜎", 0), equalTo(3)); + assertThat(process("a ti𠜎er", "er", 0), equalTo(6)); + assertThat(process("a ti𠜎er", "r", 0), equalTo(7)); + + assertThat(process("𠜎a ti𠜎er", "𠜎er", 0), equalTo(6)); + assertThat(process("𠜎a ti𠜎er", "i𠜎e", 0), equalTo(5)); + assertThat(process("𠜎a ti𠜎er", "ti𠜎", 0), equalTo(4)); + assertThat(process("𠜎a ti𠜎er", "er", 0), equalTo(7)); + assertThat(process("𠜎a ti𠜎er", "r", 0), equalTo(8)); + + // exact + assertThat(process("a ti𠜎er", "a ti𠜎er", 0), equalTo(1)); + assertThat(process("𠜎𠜎𠜎abc", "𠜎𠜎𠜎abc", 0), equalTo(1)); + assertThat(process(" 𠜎𠜎𠜎abc", " 𠜎𠜎𠜎abc", 0), equalTo(1)); + assertThat(process("𠜎𠜎𠜎 abc ", "𠜎𠜎𠜎 abc ", 0), equalTo(1)); + + // prefix + assertThat(process("𠜎abc", "𠜎", 0), equalTo(1)); + assertThat(process("𠜎 abc", "𠜎 ", 0), equalTo(1)); + assertThat(process("𠜎𠜎𠜎abc", "𠜎𠜎𠜎", 0), equalTo(1)); + assertThat(process("𠜎𠜎𠜎 abc", "𠜎𠜎𠜎 ", 0), equalTo(1)); + assertThat(process(" 𠜎𠜎𠜎 abc", " 𠜎𠜎𠜎 ", 0), equalTo(1)); + assertThat(process("𠜎 𠜎 𠜎 abc", "𠜎 𠜎 𠜎 ", 0), equalTo(1)); + + // suffix + assertThat(process("abc𠜎", "𠜎", 0), equalTo(4)); + assertThat(process("abc 𠜎", " 𠜎", 0), equalTo(4)); + assertThat(process("abc𠜎𠜎𠜎", "𠜎𠜎𠜎", 0), equalTo(4)); + assertThat(process("abc 𠜎𠜎𠜎", " 𠜎𠜎𠜎", 0), equalTo(4)); + assertThat(process("abc𠜎𠜎𠜎 ", "𠜎𠜎𠜎 ", 0), equalTo(4)); + + // out of range + assertThat(process("𠜎a ti𠜎er", "𠜎a ti𠜎ers", 0), equalTo(0)); + assertThat(process("a ti𠜎er", "aa ti𠜎er", 0), equalTo(0)); + assertThat(process("abc𠜎𠜎", "𠜎𠜎𠜎", 0), equalTo(0)); + + assert "🐱".length() == 2 && "🐶".length() == 2; + assert "🐱".codePointCount(0, 2) == 1 && "🐶".codePointCount(0, 2) == 1; + assert "🐱".getBytes(UTF_8).length == 4 && "🐶".getBytes(UTF_8).length == 4; + assertThat(process("🐱Meow!🐶Woof!", "🐱Meow!🐶Woof!", 0), equalTo(1)); + assertThat(process("🐱Meow!🐶Woof!", "Meow!🐶Woof!", 0), equalTo(2)); + assertThat(process("🐱Meow!🐶Woof!", "eow!🐶Woof!", 0), equalTo(3)); + } + private Integer process(String str, String substr, Integer start) { try ( EvalOperator.ExpressionEvaluator eval = evaluator( From 3f998469e22ff51d641e25879b87ec4ec9b39560 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Mon, 8 Apr 2024 09:29:07 -0400 Subject: [PATCH 172/264] Document ESQL's csv-spec files (#107169) --- .../testFixtures/src/main/resources/README.md | 178 ++++++++++++++++++ 1 file changed, 178 insertions(+) create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/README.md diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/README.md b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/README.md new file mode 100644 index 0000000000000..fdd52c6aac229 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/README.md @@ -0,0 +1,178 @@ +# ESQL's CSV-SPEC Integration Tests + +ESQL has lots of different kinds of integration tests! Like the rest of +Elasticsearch it has YAML tests and Java Rest tests and ESIntegTestCase +subclasses, but it *also* has CSV-SPEC tests. You can think of them like +the YAML tests, but they can *only* call _query and assert on the response. +That simplicity let's us run them in lots of contexts and keeps them *fast*. +As such, most of ESQL's integration tests are CSV-SPEC tests. + +## Running + +CSV-SPEC tests run in lots of different ways. The simplest way to run a +CSV-SPEC test is to open ESQL's CsvTests.java and run it right in IntelliJ using +the unit runner. As of this writing that runs 1,350 tests in about 35 seconds. +It's fast because it doesn't stand up an Elasticsearch node at all. It runs +like a big unit test + +The second-simplest way to run the CSV-SPEC tests is to run `EsqlSpecIT` in +`:x-pack:plugin:esql:qa:server:single-node` via the Gradle runner in IntelliJ +or on the command line. That will boot a real Elasticsearch node, create some +test data, and run the tests. The tests are reused in a few more scenarios, +include multi-node and mixed-cluster. + +## Organization + +The CSV-SPEC tests grew organically for a long time, but we've since grown +general organizing principles. But lots of tests don't follow those principles. +See organic growth. Anyway! + +### Files named after types + +Basic support for a type, like, say, `integer` or `geo_point` will live in a +file named after the type. + +* `boolean` +* `date` +* `floats` (`double`) +* `ints` (`integer` and `long`) +* `ip` +* `null` +* `unsigned_long` +* `version` + +Many functions can take lots of different types as input. Like `TO_STRING` +and `VALUES`. Those tests also live in these files. + +### Themed functions + +Some files are named after groups of functions and contain, unsurprisingly, +the tests for those functions: + +* `comparison` +* `conditional` +* `math` + +### Files named after operations + +Lots of commands have files named after operations in the ESQL language and +contain the integration testing of the syntax and options in that operation. +Operations will appear in many of the other files, especially `FROM`, `WHERE`, +`LIMIT`, and `EVAL`, but to test particular functions. + +* `dissect` +* `drop` +* `enrich` +* `eval` +* `grok` +* `order` +* `keep` +* `limit` +* `meta` +* `mv_expand` +* `rename` +* `row` +* `stats` +* `topN` +* `where` +* `where-like` + +### Deprecated files + +When we first implemented copying snippets into the documentation I dumped all +the snippets into `docs.csv-spec`. This was supposed to be a temporary holding +area until they were relocated, and we haven't had time to do that. Don't put +more tests in there. + +## Embedding examples in the documentation + +Snippets from these tests can be embedded into the asciidoc documentation of +ESQL using the following rather arcane snippet: + +```asciidoc +[source.merge.styled,esql] +---- +include::{esql-specs}/floats.csv-spec[tag=sin] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/floats.csv-spec[tag=sin-result] +|=== +``` +
    + What is this asciidoc syntax? + +The first section is a source code block for the ES|QL query: + +- a [source](https://docs.asciidoctor.org/asciidoc/latest/verbatim/source-blocks/) code block (delimited by `----`) + - `source.merge.styled,esql` indicates custom syntax highlighting for ES|QL +- an [include directive](https://docs.asciidoctor.org/asciidoc/latest/directives/include/) to import content from another file (i.e. test files here) into the current document +- a directory path defined as an [attribute](https://docs.asciidoctor.org/asciidoc/latest/attributes/document-attributes/) or variable, within curly braces: `{esql-specs}` +- a [tagged region](https://docs.asciidoctor.org/asciidoc/latest/directives/include-tagged-regions/#tagging-regions) `[tag=sin]` to only include a specific section of file + +The second section is the response returned as a table: + +- styled using `[%header.monospaced.styled,format=dsv,separator=|]` +- delimited by `|===` +- again using includes, attributes, and tagged regions +
    + +The example above extracts the `sin` test from the `floats` file. If you are +writing the tests for a function don't build this by hand, instead annotate +the `.java` file for the function with `@FunctionInfo` and add an `examples` +field like this: + +```java +@FunctionInfo( + returnType = "double", + description = "Returns ths {wikipedia}/Sine_and_cosine[Sine] trigonometric function of an angle.", + examples = @Example(file = "floats", tag = "sin") +) +``` + +Running the tests will generate the asciidoc files for you. See +`esql/functions/README.md` for all of the docs the tests generate. + +Either way, CSV-SPEC files must be tagged using four special comments so snippets can be +included in the docs: + +```csv-spec +sin +// tag::sin[] +ROW a=1.8 +| EVAL sin=SIN(a) +// end::sin[] +; + +// tag::sin-result[] +a:double | sin:double + 1.8 | 0.9738476308781951 +// end::sin-result[] +; +``` + +The `// tag::` and `// end::` are standard asciidoc syntax for working with [tagged regions](https://docs.asciidoctor.org/asciidoc/latest/directives/include-tagged-regions/#tagging-regions). Weird looking but +you aren't going to type it by accident! + +Finally, this'll appear in the docs as a table kind of like this: + +| a:double | sin:double | +|---------:|-------------------:| +| 1.8 | 0.9738476308781951 | + +### Skipping tests in old versions + +CSV-SPEC tests run against half-upgraded clusters in the +`x-pack:plugin:esql:qa:server:mixed-cluster` project and will fail if they test +new behavior against an old node. To stop them from running you should create +a `NodeFeature` in `EsqlFeatures` for your change. Then you can skip it by +adding a `required_feature` to your test like so: +```csv-spec +mvSlice +required_feature: esql.mv_sort + +row a = [true, false, false, true] +| eval a1 = mv_slice(a, 1), a2 = mv_slice(a, 2, 3); +``` + +That skips nodes that don't have the `esql.mv_sort` feature. From 43efc9505700217e258ad1df3b7f1e71213ccc1a Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Mon, 8 Apr 2024 15:30:10 +0200 Subject: [PATCH 173/264] Refactoring on merging InternalTerms (#107049) This refactor introduces a TermsAggregationReducer that holds the logic to merge InternalTerms. The main difference is that we are accumulating the buckets now instead of the internal aggregations. --- .../bucket/terms/AbstractInternalTerms.java | 191 +++++++++--------- .../bucket/terms/DoubleTerms.java | 16 +- .../aggregations/bucket/terms/LongTerms.java | 15 +- .../bucket/terms/StringTerms.java | 16 +- .../multiterms/InternalMultiTerms.java | 113 ++++++----- 5 files changed, 181 insertions(+), 170 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java index c423b2ca8cb51..11bd63bcdaa8c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.search.aggregations.AggregationErrors; import org.elasticsearch.search.aggregations.AggregationReduceContext; +import org.elasticsearch.search.aggregations.AggregatorReducer; import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.DelayedBucket; import org.elasticsearch.search.aggregations.InternalAggregation; @@ -112,23 +113,6 @@ private B reduceBucket(List buckets, AggregationReduceContext context) { return createBucket(docCount, aggs, docCountError, buckets.get(0)); } - private BucketOrder getReduceOrder(List aggregations) { - BucketOrder thisReduceOrder = null; - for (InternalAggregation aggregation : aggregations) { - @SuppressWarnings("unchecked") - A terms = (A) aggregation; - if (terms.getBuckets().size() == 0) { - continue; - } - if (thisReduceOrder == null) { - thisReduceOrder = terms.getReduceOrder(); - } else if (thisReduceOrder.equals(terms.getReduceOrder()) == false) { - return getOrder(); - } - } - return thisReduceOrder != null ? thisReduceOrder : getOrder(); - } - private long getDocCountError(A terms) { int size = terms.getBuckets().size(); if (size == 0 || size < terms.getShardSize() || isKeyOrder(terms.getOrder())) { @@ -154,47 +138,37 @@ private long getDocCountError(A terms) { * @return the order we used to reduce the buckets */ private BucketOrder reduceBuckets( - List aggregations, + List> bucketsList, + BucketOrder thisReduceOrder, AggregationReduceContext reduceContext, Consumer> sink ) { - /* - * Buckets returned by a partial reduce or a shard response are sorted by key since {@link Version#V_7_10_0}. - * That allows to perform a merge sort when reducing multiple aggregations together. - * For backward compatibility, we disable the merge sort and use ({@link #reduceLegacy} if any of - * the provided aggregations use a different {@link #reduceOrder}. - */ - BucketOrder thisReduceOrder = getReduceOrder(aggregations); if (isKeyOrder(thisReduceOrder)) { // extract the primary sort in case this is a compound order. thisReduceOrder = InternalOrder.key(isKeyAsc(thisReduceOrder)); - reduceMergeSort(aggregations, thisReduceOrder, reduceContext, sink); + reduceMergeSort(bucketsList, thisReduceOrder, reduceContext, sink); } else { - reduceLegacy(aggregations, reduceContext, sink); + reduceLegacy(bucketsList, reduceContext, sink); } return thisReduceOrder; } private void reduceMergeSort( - List aggregations, + List> bucketsList, BucketOrder thisReduceOrder, AggregationReduceContext reduceContext, Consumer> sink ) { assert isKeyOrder(thisReduceOrder); final Comparator cmp = thisReduceOrder.comparator(); - final PriorityQueue> pq = new PriorityQueue<>(aggregations.size()) { + final PriorityQueue> pq = new PriorityQueue<>(bucketsList.size()) { @Override protected boolean lessThan(IteratorAndCurrent a, IteratorAndCurrent b) { return cmp.compare(a.current(), b.current()) < 0; } }; - for (InternalAggregation aggregation : aggregations) { - @SuppressWarnings("unchecked") - A terms = (A) aggregation; - if (terms.getBuckets().isEmpty() == false) { - pq.add(new IteratorAndCurrent<>(terms.getBuckets().iterator())); - } + for (List buckets : bucketsList) { + pq.add(new IteratorAndCurrent<>(buckets.iterator())); } // list of buckets coming from different shards that have the same key List sameTermBuckets = new ArrayList<>(); @@ -228,19 +202,11 @@ protected boolean lessThan(IteratorAndCurrent a, IteratorAndCurrent b) { } } - private void reduceLegacy( - List aggregations, - AggregationReduceContext reduceContext, - Consumer> sink - ) { - Map> bucketMap = new HashMap<>(); - for (InternalAggregation aggregation : aggregations) { - @SuppressWarnings("unchecked") - A terms = (A) aggregation; - if (terms.getBuckets().isEmpty() == false) { - for (B bucket : terms.getBuckets()) { - bucketMap.computeIfAbsent(bucket.getKey(), k -> new ArrayList<>()).add(bucket); - } + private void reduceLegacy(List> bucketsList, AggregationReduceContext reduceContext, Consumer> sink) { + final Map> bucketMap = new HashMap<>(); + for (List buckets : bucketsList) { + for (B bucket : buckets) { + bucketMap.computeIfAbsent(bucket.getKey(), k -> new ArrayList<>()).add(bucket); } } for (List sameTermBuckets : bucketMap.values()) { @@ -248,21 +214,49 @@ private void reduceLegacy( } } - public InternalAggregation doReduce(List aggregations, AggregationReduceContext reduceContext) { - long sumDocCountError = 0; - long[] otherDocCount = new long[] { 0 }; - A referenceTerms = null; - for (InternalAggregation aggregation : aggregations) { + public final AggregatorReducer termsAggregationReducer(AggregationReduceContext reduceContext, int size) { + return new TermsAggregationReducer(reduceContext, size); + } + + private class TermsAggregationReducer implements AggregatorReducer { + private final List> bucketsList; + private final AggregationReduceContext reduceContext; + + private long sumDocCountError = 0; + private final long[] otherDocCount = new long[] { 0 }; + private A referenceTerms = null; + /* + * Buckets returned by a partial reduce or a shard response are sorted by key since {@link Version#V_7_10_0}. + * That allows to perform a merge sort when reducing multiple aggregations together. + * For backward compatibility, we disable the merge sort and use ({@link #reduceLegacy} if any of + * the provided aggregations use a different {@link #reduceOrder}. + */ + private BucketOrder thisReduceOrder = null; + + private TermsAggregationReducer(AggregationReduceContext reduceContext, int size) { + bucketsList = new ArrayList<>(size); + this.reduceContext = reduceContext; + } + + @Override + public void accept(InternalAggregation aggregation) { + if (aggregation.canLeadReduction() == false) { + return; + } @SuppressWarnings("unchecked") A terms = (A) aggregation; - if (referenceTerms == null && terms.canLeadReduction()) { + if (referenceTerms == null) { referenceTerms = terms; - } - if (referenceTerms != null && referenceTerms.getClass().equals(terms.getClass()) == false && terms.canLeadReduction()) { + } else if (referenceTerms.getClass().equals(terms.getClass()) == false) { // control gets into this loop when the same field name against which the query is executed // is of different types in different indices. throw AggregationErrors.reduceTypeMismatch(referenceTerms.getName(), Optional.empty()); } + if (thisReduceOrder == null) { + thisReduceOrder = terms.getReduceOrder(); + } else if (thisReduceOrder != getOrder() && thisReduceOrder.equals(terms.getReduceOrder()) == false) { + thisReduceOrder = getOrder(); + } otherDocCount[0] += terms.getSumOfOtherDocCounts(); final long thisAggDocCountError = getDocCountError(terms); if (sumDocCountError != -1) { @@ -283,52 +277,63 @@ public InternalAggregation doReduce(List aggregations, Aggr // later in this method. bucket.updateDocCountError(-thisAggDocCountError); } + if (terms.getBuckets().isEmpty() == false) { + bucketsList.add(terms.getBuckets()); + } } - BucketOrder thisReduceOrder; - List result; - if (reduceContext.isFinalReduce()) { - TopBucketBuilder top = TopBucketBuilder.build( - getRequiredSize(), - getOrder(), - removed -> otherDocCount[0] += removed.getDocCount() - ); - thisReduceOrder = reduceBuckets(aggregations, reduceContext, bucket -> { - if (bucket.getDocCount() >= getMinDocCount()) { - top.add(bucket); - } - }); - result = top.build(); - } else { - /* - * We can prune the list on partial reduce if the aggregation is ordered - * by key and not filtered on doc count. The results come in key order - * so we can just stop iteration early. - */ - boolean canPrune = isKeyOrder(getOrder()) && getMinDocCount() == 0; - result = new ArrayList<>(); - thisReduceOrder = reduceBuckets(aggregations, reduceContext, bucket -> { - if (canPrune == false || result.size() < getRequiredSize()) { - result.add(bucket.reduced()); + @Override + public InternalAggregation get() { + BucketOrder thisReduceOrder; + List result; + if (isKeyOrder(getOrder()) && getMinDocCount() <= 1) { + /* + * the aggregation is order by key and not filtered on doc count. The results come in key order + * so we can just have an optimize collection. + */ + result = new ArrayList<>(); + thisReduceOrder = reduceBuckets(bucketsList, getThisReduceOrder(), reduceContext, bucket -> { + if (result.size() < getRequiredSize()) { + result.add(bucket.reduced()); + } else { + otherDocCount[0] += bucket.getDocCount(); + } + }); + } else if (reduceContext.isFinalReduce()) { + TopBucketBuilder top = TopBucketBuilder.build( + getRequiredSize(), + getOrder(), + removed -> otherDocCount[0] += removed.getDocCount() + ); + thisReduceOrder = reduceBuckets(bucketsList, getThisReduceOrder(), reduceContext, bucket -> { + if (bucket.getDocCount() >= getMinDocCount()) { + top.add(bucket); + } + }); + result = top.build(); + } else { + result = new ArrayList<>(); + thisReduceOrder = reduceBuckets(bucketsList, getThisReduceOrder(), reduceContext, bucket -> result.add(bucket.reduced())); + } + for (B r : result) { + if (sumDocCountError == -1) { + r.setDocCountError(-1); } else { - otherDocCount[0] += bucket.getDocCount(); + r.updateDocCountError(sumDocCountError); } - }); - } - for (B r : result) { + } + long docCountError; if (sumDocCountError == -1) { - r.setDocCountError(-1); + docCountError = -1; } else { - r.updateDocCountError(sumDocCountError); + docCountError = bucketsList.size() == 1 ? 0 : sumDocCountError; } + return create(name, result, reduceContext.isFinalReduce() ? getOrder() : thisReduceOrder, docCountError, otherDocCount[0]); } - long docCountError; - if (sumDocCountError == -1) { - docCountError = -1; - } else { - docCountError = aggregations.size() == 1 ? 0 : sumDocCountError; + + private BucketOrder getThisReduceOrder() { + return thisReduceOrder == null ? getOrder() : thisReduceOrder; } - return create(name, result, reduceContext.isFinalReduce() ? getOrder() : thisReduceOrder, docCountError, otherDocCount[0]); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/DoubleTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/DoubleTerms.java index 6710dd51a3dd7..2e40ab35b21c0 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/DoubleTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/DoubleTerms.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Releasables; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.AggregatorReducer; @@ -18,7 +19,6 @@ import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; -import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Objects; @@ -190,21 +190,25 @@ protected DoubleTerms create(String name, List buckets, BucketOrder redu @Override protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceContext, int size) { return new AggregatorReducer() { - private final List aggregations = new ArrayList<>(); + private final AggregatorReducer processor = termsAggregationReducer(reduceContext, size); @Override public void accept(InternalAggregation aggregation) { if (aggregation instanceof LongTerms longTerms) { - DoubleTerms dTerms = LongTerms.convertLongTermsToDouble(longTerms, format); - aggregations.add(dTerms); + processor.accept(LongTerms.convertLongTermsToDouble(longTerms, format)); } else { - aggregations.add(aggregation); + processor.accept(aggregation); } } @Override public InternalAggregation get() { - return ((AbstractInternalTerms) aggregations.get(0)).doReduce(aggregations, reduceContext); + return processor.get(); + } + + @Override + public void close() { + Releasables.close(processor); } }; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTerms.java index b0af2c3d4e618..76f33b1c0e726 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTerms.java @@ -213,8 +213,8 @@ protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceCont } return new AggregatorReducer() { - final List aggregations = new ArrayList<>(size); - boolean isPromotedToDouble = false; + private List aggregations = new ArrayList<>(size); + private boolean isPromotedToDouble = false; @Override public void accept(InternalAggregation aggregation) { @@ -243,7 +243,16 @@ private void promoteToDouble(List aggregations) { @Override public InternalAggregation get() { - return ((AbstractInternalTerms) aggregations.get(0)).doReduce(aggregations, reduceContext); + try ( + AggregatorReducer processor = ((AbstractInternalTerms) aggregations.get(0)).termsAggregationReducer( + reduceContext, + size + ) + ) { + aggregations.forEach(processor::accept); + aggregations = null; // release memory + return processor.get(); + } } }; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTerms.java index 44bbf62c7cb19..aa3788f241079 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTerms.java @@ -14,12 +14,10 @@ import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.AggregatorReducer; import org.elasticsearch.search.aggregations.BucketOrder; -import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; -import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Objects; @@ -153,19 +151,7 @@ public StringTerms(StreamInput in) throws IOException { @Override protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceContext, int size) { - return new AggregatorReducer() { - private final List aggregations = new ArrayList<>(size); - - @Override - public void accept(InternalAggregation aggregation) { - aggregations.add(aggregation); - } - - @Override - public InternalAggregation get() { - return ((AbstractInternalTerms) aggregations.get(0)).doReduce(aggregations, reduceContext); - } - }; + return termsAggregationReducer(reduceContext, size); } @Override diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java index 3e321d57d877c..0f732d2017c74 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java @@ -439,44 +439,6 @@ public InternalMultiTerms create(List buckets) { ); } - /** - * Checks if any keys need to be promoted to double from long or unsigned_long - */ - private boolean[] needsPromotionToDouble(List aggregations) { - if (aggregations.size() < 2) { - return null; - } - boolean[] promotions = null; - - for (int i = 0; i < keyConverters.size(); i++) { - boolean hasLong = false; - boolean hasUnsignedLong = false; - boolean hasDouble = false; - boolean hasNonNumber = false; - for (InternalAggregation aggregation : aggregations) { - InternalMultiTerms agg = (InternalMultiTerms) aggregation; - KeyConverter keyConverter = agg.keyConverters.get(i); - switch (keyConverter) { - case DOUBLE -> hasDouble = true; - case LONG -> hasLong = true; - case UNSIGNED_LONG -> hasUnsignedLong = true; - default -> hasNonNumber = true; - } - } - if (hasNonNumber && (hasDouble || hasUnsignedLong || hasLong)) { - throw AggregationErrors.reduceTypeMismatch(name, Optional.of(i + 1)); - } - // Promotion to double is required if at least 2 of these 3 conditions are true. - if ((hasDouble ? 1 : 0) + (hasUnsignedLong ? 1 : 0) + (hasLong ? 1 : 0) > 1) { - if (promotions == null) { - promotions = new boolean[keyConverters.size()]; - } - promotions[i] = true; - } - } - return promotions; - } - private InternalAggregation promoteToDouble(InternalAggregation aggregation, boolean[] needsPromotion) { InternalMultiTerms multiTerms = (InternalMultiTerms) aggregation; List multiTermsBuckets = multiTerms.getBuckets(); @@ -539,33 +501,78 @@ private InternalAggregation promoteToDouble(InternalAggregation aggregation, boo ); } - public List getProcessedAggs(List aggregations, boolean[] needsPromotionToDouble) { - if (needsPromotionToDouble != null) { - List newAggs = new ArrayList<>(aggregations.size()); - for (InternalAggregation agg : aggregations) { - newAggs.add(promoteToDouble(agg, needsPromotionToDouble)); - } - return newAggs; - } else { - return aggregations; - } - } - @Override protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceContext, int size) { return new AggregatorReducer() { - final List aggregations = new ArrayList<>(size); + private List aggregations = new ArrayList<>(size); @Override public void accept(InternalAggregation aggregation) { aggregations.add(aggregation); } + private List getProcessedAggs(List aggregations, boolean[] needsPromotionToDouble) { + if (needsPromotionToDouble != null) { + aggregations.replaceAll(agg -> promoteToDouble(agg, needsPromotionToDouble)); + } + return aggregations; + } + + /** + * Checks if any keys need to be promoted to double from long or unsigned_long + */ + private boolean[] needsPromotionToDouble(List aggregations) { + if (aggregations.size() < 2) { + return null; + } + boolean[] promotions = null; + + for (int i = 0; i < keyConverters.size(); i++) { + boolean hasLong = false; + boolean hasUnsignedLong = false; + boolean hasDouble = false; + boolean hasNonNumber = false; + for (InternalAggregation aggregation : aggregations) { + InternalMultiTerms agg = (InternalMultiTerms) aggregation; + KeyConverter keyConverter = agg.keyConverters.get(i); + switch (keyConverter) { + case DOUBLE -> hasDouble = true; + case LONG -> hasLong = true; + case UNSIGNED_LONG -> hasUnsignedLong = true; + default -> hasNonNumber = true; + } + } + if (hasNonNumber && (hasDouble || hasUnsignedLong || hasLong)) { + throw AggregationErrors.reduceTypeMismatch(name, Optional.of(i + 1)); + } + // Promotion to double is required if at least 2 of these 3 conditions are true. + if ((hasDouble ? 1 : 0) + (hasUnsignedLong ? 1 : 0) + (hasLong ? 1 : 0) > 1) { + if (promotions == null) { + promotions = new boolean[keyConverters.size()]; + } + promotions[i] = true; + } + } + return promotions; + } + @Override public InternalAggregation get() { - List processed = getProcessedAggs(aggregations, needsPromotionToDouble(aggregations)); - return ((AbstractInternalTerms) processed.get(0)).doReduce(processed, reduceContext); + final boolean[] needsPromotionToDouble = needsPromotionToDouble(aggregations); + if (needsPromotionToDouble != null) { + aggregations.replaceAll(agg -> promoteToDouble(agg, needsPromotionToDouble)); + } + try ( + AggregatorReducer processor = ((AbstractInternalTerms) aggregations.get(0)).termsAggregationReducer( + reduceContext, + size + ) + ) { + aggregations.forEach(processor::accept); + aggregations = null; // release memory + return processor.get(); + } } }; } From ba983cf22c5970a940d2bf988e6052a121d22af9 Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 8 Apr 2024 15:49:25 +0100 Subject: [PATCH 174/264] Fix `TransportTasksActionTests#testFailedTasksCount` (#107190) Prior to #106733 when the `TestNodesAction` threw an exception it would immediately unregister the task: https://github.com/elastic/elasticsearch/blob/d39d1e2c249f49b8170d4f50329934d871b2b382/server/src/main/java/org/elasticsearch/transport/RequestHandlerRegistry.java#L78 However with that change the exception is caught and passed to the `TaskTransportChannel`, so unregistration happens after sending the response and may therefore not be recorded by the time the test makes its assertion. This commit fixes the test with a busy-wait. Closes #107043 --- .../node/tasks/TransportTasksActionTests.java | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java index 9ddcf8a596226..67cba13661e34 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java @@ -563,7 +563,6 @@ public void testCancellingTasksThatDontSupportCancellation() throws Exception { responseLatch.await(10, TimeUnit.SECONDS); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107043") public void testFailedTasksCount() throws Exception { Settings settings = Settings.builder().put(MockTaskManager.USE_MOCK_TASK_MANAGER_SETTING.getKey(), true).build(); setupTestNodes(settings); @@ -605,14 +604,14 @@ protected NodeResponse nodeOperation(NodeRequest request, Task task) { // Make sure that actions are still registered in the task manager on all nodes // Twice on the coordinating node and once on all other nodes. - assertEquals(4, listeners[0].getEvents().size()); - assertEquals(2, listeners[0].getRegistrationEvents().size()); - assertEquals(2, listeners[0].getUnregistrationEvents().size()); - for (int i = 1; i < listeners.length; i++) { - assertEquals(2, listeners[i].getEvents().size()); - assertEquals(1, listeners[i].getRegistrationEvents().size()); - assertEquals(1, listeners[i].getUnregistrationEvents().size()); - } + assertBusy(() -> { + assertEquals(2, listeners[0].getRegistrationEvents().size()); + assertEquals(2, listeners[0].getUnregistrationEvents().size()); + for (int i = 1; i < listeners.length; i++) { + assertEquals(1, listeners[i].getRegistrationEvents().size()); + assertEquals(1, listeners[i].getUnregistrationEvents().size()); + } + }); } private List getAllTaskDescriptions() { From 9d62deb709b1f4932dd92ae6999902af939daeb1 Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Mon, 8 Apr 2024 17:15:10 +0200 Subject: [PATCH 175/264] ES|QL: Make more tests deterministic (#107217) Fixes https://github.com/elastic/elasticsearch/issues/105540 Fixes https://github.com/elastic/elasticsearch/issues/103866 Making a few more ES|QL CSV tests deterministic, especially those that involve `AVG()` (mostly failures like `expected:<27517.27973714994[7]> but was:<27517.27973714994[4]>` due to double precision in distributed execution) --- .../esql/qa/testFixtures/src/main/resources/keep.csv-spec | 6 +++--- .../esql/qa/testFixtures/src/main/resources/stats.csv-spec | 7 ++++--- .../qa/testFixtures/src/main/resources/string.csv-spec | 2 +- 3 files changed, 8 insertions(+), 7 deletions(-) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/keep.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/keep.csv-spec index facf06eb6a960..14a3807b8729c 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/keep.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/keep.csv-spec @@ -280,10 +280,10 @@ avg_salary:double | x:double ; averageOfEvalValue -from employees | eval ratio = salary / height | stats avg(ratio); +from employees | eval ratio = salary / height | stats avg = avg(ratio) | eval avg = round(avg, 8); -avg(ratio):double -27517.279737149947 +avg:double +27517.27973715 ; simpleWhere diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index 6ccaf1eb0b6e7..fb2d46baf27ff 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -137,6 +137,7 @@ avgOfDouble FROM employees | STATS AVG(height) // end::avg[] +| EVAL `AVG(height)` = ROUND(`AVG(height)`, 5) ; // tag::avg-result[] @@ -159,7 +160,7 @@ h:double 1.76818359375 ; avgOfScaledFloat -from employees | stats h = avg(height.scaled_float); +from employees | stats h = avg(height.scaled_float) | eval h = round(h, 4); h:double 1.7682 @@ -1025,13 +1026,13 @@ c:long | cd:long docsStatsAvgNestedExpression#[skip:-8.12.99,reason:supported in 8.13+] // tag::docsStatsAvgNestedExpression[] FROM employees -| STATS avg_salary_change = AVG(MV_AVG(salary_change)) +| STATS avg_salary_change = ROUND(AVG(MV_AVG(salary_change)), 10) // end::docsStatsAvgNestedExpression[] ; // tag::docsStatsAvgNestedExpression-result[] avg_salary_change:double -1.3904535864978902 +1.3904535865 // end::docsStatsAvgNestedExpression-result[] ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec index 69638ef459805..5a81a05cee143 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec @@ -799,7 +799,7 @@ emp_no:integer | full_name:keyword | full_name_2:keyword | job_positions:keyword ; showTextFields -from hosts | where host == "beta" | keep host, host_group, description; +from hosts | sort description, card, ip0, ip1 | where host == "beta" | keep host, host_group, description; ignoreOrder:true host:keyword | host_group:text | description:text From ef7e50c97df05d9e013c388bc1dc88c5d5b3cf28 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Mon, 8 Apr 2024 17:23:49 +0200 Subject: [PATCH 176/264] Fix native initialization for ESTestCase (#107203) This tweaks the idea specific setup for multi java version projects to only ignore those in idea when importing and syncing the project so we keep those sourcesets not imported to idea by default but picked up when triggering (gradle) test execution (where idea.active = true) --- .../java/org/elasticsearch/gradle/internal/MrjarPlugin.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java index c64bd3cc9c068..16c286bfdd3f2 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java @@ -62,13 +62,13 @@ public class MrjarPlugin implements Plugin { public void apply(Project project) { project.getPluginManager().apply(ElasticsearchJavaBasePlugin.class); var javaExtension = project.getExtensions().getByType(JavaPluginExtension.class); - var isIdea = System.getProperty("idea.active", "false").equals("true"); + var isIdeaSync = System.getProperty("idea.sync.active", "false").equals("true"); var ideaSourceSetsEnabled = project.hasProperty(MRJAR_IDEA_ENABLED) && project.property(MRJAR_IDEA_ENABLED).equals("true"); // Ignore version-specific source sets if we are importing into IntelliJ and have not explicitly enabled this. // Avoids an IntelliJ bug: // https://youtrack.jetbrains.com/issue/IDEA-285640/Compiler-Options-Settings-language-level-is-set-incorrectly-with-JDK-19ea - if (isIdea == false || ideaSourceSetsEnabled) { + if (isIdeaSync == false || ideaSourceSetsEnabled) { List mainVersions = findSourceVersions(project); List mainSourceSets = new ArrayList<>(); mainSourceSets.add(SourceSet.MAIN_SOURCE_SET_NAME); From 00aee781a2dfcf5f8fdbcb1419ed4747b88cbf36 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Mon, 8 Apr 2024 11:45:22 -0400 Subject: [PATCH 177/264] Forward port release notes for v8.13.1 (#107002) --- .../reference/migration/migrate_8_13.asciidoc | 17 ++++--- docs/reference/release-notes.asciidoc | 2 + docs/reference/release-notes/8.13.1.asciidoc | 33 +++++++++++++ .../release-notes/highlights.asciidoc | 48 +++++-------------- 4 files changed, 56 insertions(+), 44 deletions(-) create mode 100644 docs/reference/release-notes/8.13.1.asciidoc diff --git a/docs/reference/migration/migrate_8_13.asciidoc b/docs/reference/migration/migrate_8_13.asciidoc index c9e726d940b1d..dca10671e57bc 100644 --- a/docs/reference/migration/migrate_8_13.asciidoc +++ b/docs/reference/migration/migrate_8_13.asciidoc @@ -16,14 +16,17 @@ coming::[8.13.0] [[breaking-changes-8.13]] === Breaking changes -The following changes in {es} 8.13 might affect your applications -and prevent them from operating normally. -Before upgrading to 8.13, review these changes and take the described steps -to mitigate the impact. +There are no breaking changes in 8.13. - -There are no notable breaking changes in {es} 8.13. -But there are some less critical breaking changes. +[discrete] +[[migrate-notable-changes-8.13]] +=== Notable changes +The following are notable, non-breaking updates to be aware of: + +* Changes to features that are in Technical Preview. +* Changes to log formats. +* Changes to non-public APIs. +* Behaviour changes that repair critical bugs. [discrete] [[breaking_813_index_setting_changes]] diff --git a/docs/reference/release-notes.asciidoc b/docs/reference/release-notes.asciidoc index e3c8da281f2a1..f9da92aef925e 100644 --- a/docs/reference/release-notes.asciidoc +++ b/docs/reference/release-notes.asciidoc @@ -7,6 +7,7 @@ This section summarizes the changes in each release. * <> +* <> * <> * <> * <> @@ -63,6 +64,7 @@ This section summarizes the changes in each release. -- include::release-notes/8.14.0.asciidoc[] +include::release-notes/8.13.1.asciidoc[] include::release-notes/8.13.0.asciidoc[] include::release-notes/8.12.2.asciidoc[] include::release-notes/8.12.1.asciidoc[] diff --git a/docs/reference/release-notes/8.13.1.asciidoc b/docs/reference/release-notes/8.13.1.asciidoc new file mode 100644 index 0000000000000..9f5f34d27eb79 --- /dev/null +++ b/docs/reference/release-notes/8.13.1.asciidoc @@ -0,0 +1,33 @@ +[[release-notes-8.13.1]] +== {es} version 8.13.1 + +Also see <>. + +[[bug-8.13.1]] +[float] +=== Bug fixes + +Aggregations:: +* Add test to exercise reduction of terms aggregation order by key {es-pull}106799[#106799] + +Downsampling:: +* Gate reading of optional string array for bwc {es-pull}106878[#106878] + +Machine Learning:: +* Fix Array out of bounds exception in the XLM Roberta tokenizer {es-pull}106655[#106655] + +Search:: +* Fix concurrency bug in `AbstractStringScriptFieldAutomatonQuery` {es-pull}106678[#106678] (issue: {es-issue}105911[#105911]) +* Fix the background set of significant terms aggregations in case the data is in different shards than the foreground set {es-pull}106564[#106564] + +Transform:: +* Fail checkpoint on missing clusters {es-pull}106793[#106793] (issues: {es-issue}104533[#104533], {es-issue}106790[#106790]) + +[[enhancement-8.13.1]] +[float] +=== Enhancements + +Transform:: +* Raise loglevel of events related to transform lifecycle from DEBUG to INFO {es-pull}106602[#106602] + + diff --git a/docs/reference/release-notes/highlights.asciidoc b/docs/reference/release-notes/highlights.asciidoc index 25096779521e4..8d9d743a239f5 100644 --- a/docs/reference/release-notes/highlights.asciidoc +++ b/docs/reference/release-notes/highlights.asciidoc @@ -31,46 +31,20 @@ endif::[] // tag::notable-highlights[] [discrete] -[[improve_storage_efficiency_for_non_metric_fields_in_tsdb]] -=== Improve storage efficiency for non-metric fields in TSDB -Adds a new `doc_values` encoding for non-metric fields in TSDB that takes advantage of TSDB's index sorting. -While terms that are used in multiple documents (such as the host name) are already stored only once in the terms dictionary, -there are a lot of repetitions in the references to the terms dictionary that are stored in `doc_values` (ordinals). -In TSDB, documents (and therefore `doc_values`) are implicitly sorted by dimenstions and timestamp. -This means that for each time series, we are storing long consecutive runs of the same ordinal. -With this change, we are introducing an encoding that detects and efficiently stores runs of the same value (such as `1 1 1 2 2 2 …`), -and runs of cycling values (such as `1 2 1 2 …`). -In our testing, we have seen a reduction in storage size by about 13%. -The effectiveness of this encoding depends on how many non-metric fields, such as dimensions, are used. -The more non-metric fields, the more effective this improvement will be. +[[add_global_retention_in_data_stream_lifecycle]] +=== Add global retention in data stream lifecycle +Data stream lifecycle now supports configuring retention on a cluster level, namely global retention. Global retention +allows us to configure two different retentions: -{es-pull}99747[#99747] +- `default_retention` is applied to all data streams managed by the data stream lifecycle that do not have retention +defined on the data stream level. +- `max_retention` is applied to all data streams managed by the data stream lifecycle and it allows any data stream +data to be deleted after the `max_retention` has passed. -[discrete] -[[ga_release_of_synonyms_api]] -=== GA Release of Synonyms API -Removes the beta label for the Synonyms API to make it GA. - -{es-pull}103223[#103223] - -[discrete] -[[flag_in_field_caps_to_return_only_fields_with_values_in_index]] -=== Flag in `_field_caps` to return only fields with values in index -We added support for filtering the field capabilities API output by removing -fields that don't have a value. This can be done through the newly added -`include_empty_fields` parameter, which defaults to true. - -{es-pull}103651[#103651] - -[discrete] -[[new_lucene_9_10_release]] -=== New Lucene 9.10 release -- https://github.com/apache/lucene/pull/13090: Prevent humongous allocations in ScalarQuantizer when building quantiles. -- https://github.com/apache/lucene/pull/12962: Speedup concurrent multi-segment HNSW graph search -- https://github.com/apache/lucene/pull/13033: Range queries on numeric/date/ip fields now exit earlier on segments whose values don't intersect with the query range. This should especially help when there are other required clauses in the `bool` query and when the range filter is narrow, e.g. filtering on the last 5 minutes. -- https://github.com/apache/lucene/pull/13026: `bool` queries that mix `filter` and `should` clauses will now propagate minimum competitive scores through the `should` clauses. This should yield speedups when sorting by descending score. +Furthermore, we introduce the term `effective_retention` which is the retention applied at a certain moment to a data +stream considering all the available retention configurations. -{es-pull}105578[#105578] +{es-pull}105682[#105682] // end::notable-highlights[] From ab52ef1f06af1b08f5808ad165f062dc0dbdff7e Mon Sep 17 00:00:00 2001 From: Felix Barnsteiner Date: Mon, 8 Apr 2024 17:55:41 +0200 Subject: [PATCH 178/264] Fix merging component templates with a mix of dotted and nested object mapper definitions (#106077) Co-authored-by: Andrei Dan --- docs/changelog/106077.yaml | 7 ++ .../RankFeatureMetaFieldMapperTests.java | 6 +- .../PercolatorFieldMapperTests.java | 13 +-- .../metadata/MetadataMappingService.java | 16 +--- .../index/mapper/DocumentParserContext.java | 10 ++- .../index/mapper/MapperBuilderContext.java | 25 +++++- .../index/mapper/MapperMergeContext.java | 13 ++- .../index/mapper/MapperService.java | 21 ++--- .../elasticsearch/index/mapper/Mapping.java | 8 +- .../index/mapper/MappingParser.java | 12 ++- .../index/mapper/NestedObjectMapper.java | 20 ++--- .../index/mapper/ObjectMapper.java | 41 +++------ .../index/mapper/PassThroughObjectMapper.java | 10 ++- .../index/mapper/RootObjectMapper.java | 10 +-- .../index/mapper/DocumentMapperTests.java | 6 +- .../mapper/MapperBuilderContextTests.java | 30 +++++++ .../index/mapper/MapperMergeContextTests.java | 6 ++ .../index/mapper/MapperServiceTests.java | 87 +++++++++++++++++++ .../index/mapper/NestedObjectMapperTests.java | 3 +- .../index/mapper/ObjectMapperMergeTests.java | 16 +--- .../index/mapper/ObjectMapperTests.java | 23 +++-- .../index/similarity/SimilarityTests.java | 3 +- .../index/mapper/MetadataMapperTestCase.java | 9 +- 23 files changed, 276 insertions(+), 119 deletions(-) create mode 100644 docs/changelog/106077.yaml create mode 100644 server/src/test/java/org/elasticsearch/index/mapper/MapperBuilderContextTests.java diff --git a/docs/changelog/106077.yaml b/docs/changelog/106077.yaml new file mode 100644 index 0000000000000..eb987cd9617f8 --- /dev/null +++ b/docs/changelog/106077.yaml @@ -0,0 +1,7 @@ +pr: 106077 +summary: Fix merging component templates with a mix of dotted and nested object mapper + definitions +area: Mapping +type: bug +issues: + - 105482 diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/RankFeatureMetaFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/RankFeatureMetaFieldMapperTests.java index b9ca544e7532d..9f559c8f55858 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/RankFeatureMetaFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/RankFeatureMetaFieldMapperTests.java @@ -49,7 +49,11 @@ public void testBasics() throws Exception { .endObject() ); - Mapping parsedMapping = createMapperService(mapping).parseMapping("type", new CompressedXContent(mapping)); + Mapping parsedMapping = createMapperService(mapping).parseMapping( + "type", + MapperService.MergeReason.MAPPING_UPDATE, + new CompressedXContent(mapping) + ); assertEquals(mapping, parsedMapping.toCompressedXContent().toString()); assertNotNull(parsedMapping.getMetadataMapperByClass(RankFeatureMetaFieldMapper.class)); } diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java index 46b9e365fd0ea..4adc7f9b5ba27 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java @@ -52,6 +52,7 @@ import org.elasticsearch.index.mapper.LuceneDocument; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.mapper.TestDocumentParserContext; @@ -206,7 +207,7 @@ public void init() throws Exception { .endObject() .endObject() ); - mapperService.merge("doc", new CompressedXContent(mapper), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge("doc", new CompressedXContent(mapper), MergeReason.MAPPING_UPDATE); } private void addQueryFieldMappings() throws Exception { @@ -223,7 +224,7 @@ private void addQueryFieldMappings() throws Exception { .endObject() .endObject() ); - mapperService.merge("doc", new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge("doc", new CompressedXContent(percolatorMapper), MergeReason.MAPPING_UPDATE); fieldType = (PercolatorFieldMapper.PercolatorFieldType) mapperService.fieldType(fieldName); } @@ -699,7 +700,7 @@ public void testAllowNoAdditionalSettings() throws Exception { MapperParsingException e = expectThrows( MapperParsingException.class, () -> indexServiceWithoutSettings.mapperService() - .merge("doc", new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE) + .merge("doc", new CompressedXContent(percolatorMapper), MergeReason.MAPPING_UPDATE) ); assertThat(e.getMessage(), containsString("Mapping definition for [" + fieldName + "] has unsupported parameters: [index : no]")); } @@ -722,7 +723,7 @@ public void testMultiplePercolatorFields() throws Exception { .endObject() .endObject() ); - mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MergeReason.MAPPING_UPDATE); QueryBuilder queryBuilder = matchQuery("field", "value"); ParsedDocument doc = mapperService.documentMapper() @@ -763,7 +764,7 @@ public void testNestedPercolatorField() throws Exception { .endObject() .endObject() ); - mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MergeReason.MAPPING_UPDATE); QueryBuilder queryBuilder = matchQuery("field", "value"); ParsedDocument doc = mapperService.documentMapper() @@ -912,7 +913,7 @@ public void testEmptyName() throws Exception { ); MapperParsingException e = expectThrows( MapperParsingException.class, - () -> mapperService.parseMapping("type1", new CompressedXContent(mapping)) + () -> mapperService.parseMapping("type1", MergeReason.MAPPING_UPDATE, new CompressedXContent(mapping)) ); assertThat(e.getMessage(), containsString("field name cannot be an empty string")); } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java index 7e2c0849a6fad..3ca206eaddb28 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java @@ -133,6 +133,7 @@ private static ClusterState applyRequest( final CompressedXContent mappingUpdateSource = request.source(); final Metadata metadata = currentState.metadata(); final List updateList = new ArrayList<>(); + MergeReason reason = request.autoUpdate() ? MergeReason.MAPPING_AUTO_UPDATE : MergeReason.MAPPING_UPDATE; for (Index index : request.indices()) { MapperService mapperService = indexMapperServices.get(index); // IMPORTANT: always get the metadata from the state since it get's batched @@ -147,13 +148,8 @@ private static ClusterState applyRequest( updateList.add(indexMetadata); // try and parse it (no need to add it here) so we can bail early in case of parsing exception // first, simulate: just call merge and ignore the result - Mapping mapping = mapperService.parseMapping(MapperService.SINGLE_MAPPING_NAME, mappingUpdateSource); - MapperService.mergeMappings( - mapperService.documentMapper(), - mapping, - request.autoUpdate() ? MergeReason.MAPPING_AUTO_UPDATE : MergeReason.MAPPING_UPDATE, - mapperService.getIndexSettings() - ); + Mapping mapping = mapperService.parseMapping(MapperService.SINGLE_MAPPING_NAME, reason, mappingUpdateSource); + MapperService.mergeMappings(mapperService.documentMapper(), mapping, reason, mapperService.getIndexSettings()); } Metadata.Builder builder = Metadata.builder(metadata); boolean updated = false; @@ -169,11 +165,7 @@ private static ClusterState applyRequest( if (existingMapper != null) { existingSource = existingMapper.mappingSource(); } - DocumentMapper mergedMapper = mapperService.merge( - MapperService.SINGLE_MAPPING_NAME, - mappingUpdateSource, - request.autoUpdate() ? MergeReason.MAPPING_AUTO_UPDATE : MergeReason.MAPPING_UPDATE - ); + DocumentMapper mergedMapper = mapperService.merge(MapperService.SINGLE_MAPPING_NAME, mappingUpdateSource, reason); CompressedXContent updatedSource = mergedMapper.mappingSource(); if (existingSource != null) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java index 92aa8662eaf9d..a42477bed2146 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java @@ -15,6 +15,7 @@ import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.IndexAnalyzers; +import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.xcontent.FilterXContentParserWrapper; import org.elasticsearch.xcontent.FlatteningXContentParser; import org.elasticsearch.xcontent.XContentParser; @@ -618,7 +619,14 @@ public final MapperBuilderContext createDynamicMapperBuilderContext() { if (objectMapper instanceof PassThroughObjectMapper passThroughObjectMapper) { containsDimensions = passThroughObjectMapper.containsDimensions(); } - return new MapperBuilderContext(p, mappingLookup().isSourceSynthetic(), false, containsDimensions, dynamic); + return new MapperBuilderContext( + p, + mappingLookup().isSourceSynthetic(), + false, + containsDimensions, + dynamic, + MergeReason.MAPPING_UPDATE + ); } public abstract XContentParser parser(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperBuilderContext.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperBuilderContext.java index bbfb9298c23ca..15caa7f5a6238 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperBuilderContext.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperBuilderContext.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.mapper.MapperService.MergeReason; import java.util.Objects; @@ -22,7 +23,11 @@ public class MapperBuilderContext { * The root context, to be used when building a tree of mappers */ public static MapperBuilderContext root(boolean isSourceSynthetic, boolean isDataStream) { - return new MapperBuilderContext(null, isSourceSynthetic, isDataStream, false, ObjectMapper.Defaults.DYNAMIC); + return root(isSourceSynthetic, isDataStream, MergeReason.MAPPING_UPDATE); + } + + public static MapperBuilderContext root(boolean isSourceSynthetic, boolean isDataStream, MergeReason mergeReason) { + return new MapperBuilderContext(null, isSourceSynthetic, isDataStream, false, ObjectMapper.Defaults.DYNAMIC, mergeReason); } private final String path; @@ -30,9 +35,10 @@ public static MapperBuilderContext root(boolean isSourceSynthetic, boolean isDat private final boolean isDataStream; private final boolean parentObjectContainsDimensions; private final ObjectMapper.Dynamic dynamic; + private final MergeReason mergeReason; MapperBuilderContext(String path) { - this(path, false, false, false, ObjectMapper.Defaults.DYNAMIC); + this(path, false, false, false, ObjectMapper.Defaults.DYNAMIC, MergeReason.MAPPING_UPDATE); } MapperBuilderContext( @@ -40,7 +46,8 @@ public static MapperBuilderContext root(boolean isSourceSynthetic, boolean isDat boolean isSourceSynthetic, boolean isDataStream, boolean parentObjectContainsDimensions, - ObjectMapper.Dynamic dynamic + ObjectMapper.Dynamic dynamic, + MergeReason mergeReason ) { Objects.requireNonNull(dynamic, "dynamic must not be null"); this.path = path; @@ -48,6 +55,7 @@ public static MapperBuilderContext root(boolean isSourceSynthetic, boolean isDat this.isDataStream = isDataStream; this.parentObjectContainsDimensions = parentObjectContainsDimensions; this.dynamic = dynamic; + this.mergeReason = mergeReason; } /** @@ -79,7 +87,8 @@ public MapperBuilderContext createChildContext( this.isSourceSynthetic, this.isDataStream, parentObjectContainsDimensions, - getDynamic(dynamic) + getDynamic(dynamic), + this.mergeReason ); } @@ -121,4 +130,12 @@ public boolean parentObjectContainsDimensions() { public ObjectMapper.Dynamic getDynamic() { return dynamic; } + + /** + * The merge reason to use when merging mappers while building the mapper. + * See also {@link ObjectMapper.Builder#buildMappers(MapperBuilderContext)}. + */ + public MergeReason getMergeReason() { + return mergeReason; + } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperMergeContext.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperMergeContext.java index 8f8854ad47c7d..1e3f69baf86dd 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperMergeContext.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperMergeContext.java @@ -8,6 +8,8 @@ package org.elasticsearch.index.mapper; +import org.elasticsearch.index.mapper.MapperService.MergeReason; + /** * Holds context used when merging mappings. * As the merge process also involves building merged {@link Mapper.Builder}s, @@ -23,11 +25,18 @@ private MapperMergeContext(MapperBuilderContext mapperBuilderContext, NewFieldsB this.newFieldsBudget = newFieldsBudget; } + static MapperMergeContext root(boolean isSourceSynthetic, boolean isDataStream, long newFieldsBudget) { + return root(isSourceSynthetic, isDataStream, MergeReason.MAPPING_UPDATE, newFieldsBudget); + } + /** * The root context, to be used when merging a tree of mappers */ - public static MapperMergeContext root(boolean isSourceSynthetic, boolean isDataStream, long newFieldsBudget) { - return new MapperMergeContext(MapperBuilderContext.root(isSourceSynthetic, isDataStream), NewFieldsBudget.of(newFieldsBudget)); + public static MapperMergeContext root(boolean isSourceSynthetic, boolean isDataStream, MergeReason mergeReason, long newFieldsBudget) { + return new MapperMergeContext( + MapperBuilderContext.root(isSourceSynthetic, isDataStream, mergeReason), + NewFieldsBudget.of(newFieldsBudget) + ); } /** diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java index 4646936b8891f..f91c4f176c6da 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -310,7 +310,7 @@ public void updateMapping(final IndexMetadata currentIndexMetadata, final IndexM if (newMappingMetadata != null) { String type = newMappingMetadata.type(); CompressedXContent incomingMappingSource = newMappingMetadata.source(); - Mapping incomingMapping = parseMapping(type, incomingMappingSource); + Mapping incomingMapping = parseMapping(type, MergeReason.MAPPING_UPDATE, incomingMappingSource); DocumentMapper previousMapper; synchronized (this) { previousMapper = this.mapper; @@ -366,7 +366,7 @@ boolean assertNoUpdateRequired(final IndexMetadata newIndexMetadata) { // that the incoming mappings are the same as the current ones: we need to // parse the incoming mappings into a DocumentMapper and check that its // serialization is the same as the existing mapper - Mapping newMapping = parseMapping(mapping.type(), mapping.source()); + Mapping newMapping = parseMapping(mapping.type(), MergeReason.MAPPING_UPDATE, mapping.source()); final CompressedXContent currentSource = this.mapper.mappingSource(); final CompressedXContent newSource = newMapping.toCompressedXContent(); if (Objects.equals(currentSource, newSource) == false @@ -533,7 +533,7 @@ public DocumentMapper merge(String type, CompressedXContent mappingSource, Merge } private synchronized DocumentMapper doMerge(String type, MergeReason reason, Map mappingSourceAsMap) { - Mapping incomingMapping = parseMapping(type, mappingSourceAsMap); + Mapping incomingMapping = parseMapping(type, reason, mappingSourceAsMap); Mapping mapping = mergeMappings(this.mapper, incomingMapping, reason, this.indexSettings); // TODO: In many cases the source here is equal to mappingSource so we need not serialize again. // We should identify these cases reliably and save expensive serialization here @@ -542,7 +542,7 @@ private synchronized DocumentMapper doMerge(String type, MergeReason reason, Map return newMapper; } this.mapper = newMapper; - assert assertSerialization(newMapper); + assert assertSerialization(newMapper, reason); return newMapper; } @@ -552,9 +552,9 @@ private DocumentMapper newDocumentMapper(Mapping mapping, MergeReason reason, Co return newMapper; } - public Mapping parseMapping(String mappingType, CompressedXContent mappingSource) { + public Mapping parseMapping(String mappingType, MergeReason reason, CompressedXContent mappingSource) { try { - return mappingParser.parse(mappingType, mappingSource); + return mappingParser.parse(mappingType, reason, mappingSource); } catch (Exception e) { throw new MapperParsingException("Failed to parse mapping: {}", e, e.getMessage()); } @@ -564,12 +564,13 @@ public Mapping parseMapping(String mappingType, CompressedXContent mappingSource * A method to parse mapping from a source in a map form. * * @param mappingType the mapping type + * @param reason the merge reason to use when merging mappers while building the mapper * @param mappingSource mapping source already converted to a map form, but not yet processed otherwise * @return a parsed mapping */ - public Mapping parseMapping(String mappingType, Map mappingSource) { + public Mapping parseMapping(String mappingType, MergeReason reason, Map mappingSource) { try { - return mappingParser.parse(mappingType, mappingSource); + return mappingParser.parse(mappingType, reason, mappingSource); } catch (Exception e) { throw new MapperParsingException("Failed to parse mapping: {}", e, e.getMessage()); } @@ -619,10 +620,10 @@ static Mapping mergeMappings(DocumentMapper currentMapper, Mapping incomingMappi return newMapping; } - private boolean assertSerialization(DocumentMapper mapper) { + private boolean assertSerialization(DocumentMapper mapper, MergeReason reason) { // capture the source now, it may change due to concurrent parsing final CompressedXContent mappingSource = mapper.mappingSource(); - Mapping newMapping = parseMapping(mapper.type(), mappingSource); + Mapping newMapping = parseMapping(mapper.type(), reason, mappingSource); if (newMapping.toCompressedXContent().equals(mappingSource) == false) { throw new AssertionError( "Mapping serialization result is different from source. \n--> Source [" diff --git a/server/src/main/java/org/elasticsearch/index/mapper/Mapping.java b/server/src/main/java/org/elasticsearch/index/mapper/Mapping.java index 903e4e5da5b29..b5de3971fa091 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/Mapping.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/Mapping.java @@ -137,8 +137,8 @@ public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() { * @return the resulting merged mapping. */ Mapping merge(Mapping mergeWith, MergeReason reason, long newFieldsBudget) { - MapperMergeContext mergeContext = MapperMergeContext.root(isSourceSynthetic(), false, newFieldsBudget); - RootObjectMapper mergedRoot = root.merge(mergeWith.root, reason, mergeContext); + MapperMergeContext mergeContext = MapperMergeContext.root(isSourceSynthetic(), false, reason, newFieldsBudget); + RootObjectMapper mergedRoot = root.merge(mergeWith.root, mergeContext); // When merging metadata fields as part of applying an index template, new field definitions // completely overwrite existing ones instead of being merged. This behavior matches how we @@ -176,11 +176,11 @@ Mapping merge(Mapping mergeWith, MergeReason reason, long newFieldsBudget) { * @param fieldsBudget the maximum number of fields this mapping may have */ public Mapping withFieldsBudget(long fieldsBudget) { - MapperMergeContext mergeContext = MapperMergeContext.root(isSourceSynthetic(), false, fieldsBudget); + MapperMergeContext mergeContext = MapperMergeContext.root(isSourceSynthetic(), false, MergeReason.MAPPING_RECOVERY, fieldsBudget); // get a copy of the root mapper, without any fields RootObjectMapper shallowRoot = root.withoutMappers(); // calling merge on the shallow root to ensure we're only adding as many fields as allowed by the fields budget - return new Mapping(shallowRoot.merge(root, MergeReason.MAPPING_RECOVERY, mergeContext), metadataMappers, meta); + return new Mapping(shallowRoot.merge(root, mergeContext), metadataMappers, meta); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappingParser.java b/server/src/main/java/org/elasticsearch/index/mapper/MappingParser.java index 8b30915ca4d3c..86d8c1686858c 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappingParser.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappingParser.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.IndexMode; +import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.xcontent.XContentType; import java.util.Collections; @@ -79,20 +80,25 @@ static Map convertToMap(CompressedXContent source) { } Mapping parse(@Nullable String type, CompressedXContent source) throws MapperParsingException { + return parse(type, MergeReason.MAPPING_UPDATE, source); + } + + Mapping parse(@Nullable String type, MergeReason reason, CompressedXContent source) throws MapperParsingException { Map mapping = convertToMap(source); - return parse(type, mapping); + return parse(type, reason, mapping); } /** * A method to parse mapping from a source in a map form. * * @param type the mapping type + * @param reason the merge reason to use when merging mappers while building the mapper * @param mappingSource mapping source already converted to a map form, but not yet processed otherwise * @return a parsed mapping * @throws MapperParsingException in case of parsing error */ @SuppressWarnings("unchecked") - Mapping parse(@Nullable String type, Map mappingSource) throws MapperParsingException { + Mapping parse(@Nullable String type, MergeReason reason, Map mappingSource) throws MapperParsingException { if (mappingSource.isEmpty()) { if (type == null) { throw new MapperParsingException("malformed mapping, no type name found"); @@ -178,7 +184,7 @@ Mapping parse(@Nullable String type, Map mappingSource) throws M } return new Mapping( - rootObjectMapper.build(MapperBuilderContext.root(isSourceSynthetic, isDataStream)), + rootObjectMapper.build(MapperBuilderContext.root(isSourceSynthetic, isDataStream, reason)), metadataMappers.values().toArray(new MetadataFieldMapper[0]), meta ); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java index f07d69d86f36c..5c2880a4bf760 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java @@ -65,7 +65,8 @@ public NestedObjectMapper build(MapperBuilderContext context) { NestedMapperBuilderContext nestedContext = new NestedMapperBuilderContext( context.buildFullName(name()), parentIncludedInRoot, - context.getDynamic(dynamic) + context.getDynamic(dynamic), + context.getMergeReason() ); final String fullPath = context.buildFullName(name()); final String nestedTypePath; @@ -121,14 +122,14 @@ private static class NestedMapperBuilderContext extends MapperBuilderContext { final boolean parentIncludedInRoot; - NestedMapperBuilderContext(String path, boolean parentIncludedInRoot, Dynamic dynamic) { - super(path, false, false, false, dynamic); + NestedMapperBuilderContext(String path, boolean parentIncludedInRoot, Dynamic dynamic, MapperService.MergeReason mergeReason) { + super(path, false, false, false, dynamic, mergeReason); this.parentIncludedInRoot = parentIncludedInRoot; } @Override public MapperBuilderContext createChildContext(String name, Dynamic dynamic) { - return new NestedMapperBuilderContext(buildFullName(name), parentIncludedInRoot, getDynamic(dynamic)); + return new NestedMapperBuilderContext(buildFullName(name), parentIncludedInRoot, getDynamic(dynamic), getMergeReason()); } } @@ -226,16 +227,14 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } @Override - public ObjectMapper merge(Mapper mergeWith, MapperService.MergeReason reason, MapperMergeContext parentMergeContext) { + public ObjectMapper merge(Mapper mergeWith, MapperMergeContext parentMergeContext) { if ((mergeWith instanceof NestedObjectMapper) == false) { MapperErrors.throwNestedMappingConflictError(mergeWith.name()); } NestedObjectMapper mergeWithObject = (NestedObjectMapper) mergeWith; - return merge(mergeWithObject, reason, parentMergeContext); - } - ObjectMapper merge(NestedObjectMapper mergeWithObject, MapperService.MergeReason reason, MapperMergeContext parentMergeContext) { - var mergeResult = MergeResult.build(this, mergeWithObject, reason, parentMergeContext); + final MapperService.MergeReason reason = parentMergeContext.getMapperBuilderContext().getMergeReason(); + var mergeResult = MergeResult.build(this, mergeWithObject, parentMergeContext); Explicit incInParent = this.includeInParent; Explicit incInRoot = this.includeInRoot; if (reason == MapperService.MergeReason.INDEX_TEMPLATE) { @@ -287,7 +286,8 @@ protected MapperMergeContext createChildContext(MapperMergeContext mapperMergeCo new NestedMapperBuilderContext( mapperBuilderContext.buildFullName(name), parentIncludedInRoot, - mapperBuilderContext.getDynamic(dynamic) + mapperBuilderContext.getDynamic(dynamic), + mapperBuilderContext.getMergeReason() ) ); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java index 33e736ff122a1..ba396e9a72d30 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java @@ -454,11 +454,6 @@ public final boolean subobjects() { return subobjects.value(); } - @Override - public ObjectMapper merge(Mapper mergeWith, MapperMergeContext mapperMergeContext) { - return merge(mergeWith, MergeReason.MAPPING_UPDATE, mapperMergeContext); - } - @Override public void validate(MappingLookup mappers) { for (Mapper mapper : this.mappers.values()) { @@ -470,7 +465,8 @@ protected MapperMergeContext createChildContext(MapperMergeContext mapperMergeCo return mapperMergeContext.createChildContext(name, dynamic); } - public ObjectMapper merge(Mapper mergeWith, MergeReason reason, MapperMergeContext parentMergeContext) { + @Override + public ObjectMapper merge(Mapper mergeWith, MapperMergeContext parentMergeContext) { if (mergeWith instanceof ObjectMapper == false) { MapperErrors.throwObjectMappingConflictError(mergeWith.name()); } @@ -478,11 +474,7 @@ public ObjectMapper merge(Mapper mergeWith, MergeReason reason, MapperMergeConte // TODO stop NestedObjectMapper extending ObjectMapper? MapperErrors.throwNestedMappingConflictError(mergeWith.name()); } - return merge((ObjectMapper) mergeWith, reason, parentMergeContext); - } - - ObjectMapper merge(ObjectMapper mergeWith, MergeReason reason, MapperMergeContext parentMergeContext) { - var mergeResult = MergeResult.build(this, mergeWith, reason, parentMergeContext); + var mergeResult = MergeResult.build(this, (ObjectMapper) mergeWith, parentMergeContext); return new ObjectMapper( simpleName(), fullPath, @@ -499,13 +491,9 @@ protected record MergeResult( ObjectMapper.Dynamic dynamic, Map mappers ) { - static MergeResult build( - ObjectMapper existing, - ObjectMapper mergeWithObject, - MergeReason reason, - MapperMergeContext parentMergeContext - ) { + static MergeResult build(ObjectMapper existing, ObjectMapper mergeWithObject, MapperMergeContext parentMergeContext) { final Explicit enabled; + final MergeReason reason = parentMergeContext.getMapperBuilderContext().getMergeReason(); if (mergeWithObject.enabled.explicit()) { if (reason == MergeReason.INDEX_TEMPLATE) { enabled = mergeWithObject.enabled; @@ -532,13 +520,7 @@ static MergeResult build( subObjects = existing.subobjects; } MapperMergeContext objectMergeContext = existing.createChildContext(parentMergeContext, existing.simpleName()); - Map mergedMappers = buildMergedMappers( - existing, - mergeWithObject, - reason, - objectMergeContext, - subObjects.value() - ); + Map mergedMappers = buildMergedMappers(existing, mergeWithObject, objectMergeContext, subObjects.value()); return new MergeResult( enabled, subObjects, @@ -550,7 +532,6 @@ static MergeResult build( private static Map buildMergedMappers( ObjectMapper existing, ObjectMapper mergeWithObject, - MergeReason reason, MapperMergeContext objectMergeContext, boolean subobjects ) { @@ -576,11 +557,11 @@ private static Map buildMergedMappers( } else if (objectMergeContext.decrementFieldBudgetIfPossible(mergeWithMapper.getTotalFieldsCount())) { putMergedMapper(mergedMappers, mergeWithMapper); } else if (mergeWithMapper instanceof ObjectMapper om) { - putMergedMapper(mergedMappers, truncateObjectMapper(reason, objectMergeContext, om)); + putMergedMapper(mergedMappers, truncateObjectMapper(objectMergeContext, om)); } } else if (mergeIntoMapper instanceof ObjectMapper objectMapper) { assert subobjects : "existing object mappers are supposed to be flattened if subobjects is false"; - putMergedMapper(mergedMappers, objectMapper.merge(mergeWithMapper, reason, objectMergeContext)); + putMergedMapper(mergedMappers, objectMapper.merge(mergeWithMapper, objectMergeContext)); } else { assert mergeIntoMapper instanceof FieldMapper || mergeIntoMapper instanceof FieldAliasMapper; if (mergeWithMapper instanceof NestedObjectMapper) { @@ -591,7 +572,7 @@ private static Map buildMergedMappers( // If we're merging template mappings when creating an index, then a field definition always // replaces an existing one. - if (reason == MergeReason.INDEX_TEMPLATE) { + if (objectMergeContext.getMapperBuilderContext().getMergeReason() == MergeReason.INDEX_TEMPLATE) { putMergedMapper(mergedMappers, mergeWithMapper); } else { putMergedMapper(mergedMappers, mergeIntoMapper.merge(mergeWithMapper, objectMergeContext)); @@ -607,13 +588,13 @@ private static void putMergedMapper(Map mergedMappers, @Nullable } } - private static ObjectMapper truncateObjectMapper(MergeReason reason, MapperMergeContext context, ObjectMapper objectMapper) { + private static ObjectMapper truncateObjectMapper(MapperMergeContext context, ObjectMapper objectMapper) { // there's not enough capacity for the whole object mapper, // so we're just trying to add the shallow object, without it's sub-fields ObjectMapper shallowObjectMapper = objectMapper.withoutMappers(); if (context.decrementFieldBudgetIfPossible(shallowObjectMapper.getTotalFieldsCount())) { // now trying to add the sub-fields one by one via a merge, until we hit the limit - return shallowObjectMapper.merge(objectMapper, reason, context); + return shallowObjectMapper.merge(objectMapper, context); } return null; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/PassThroughObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/PassThroughObjectMapper.java index 16b4d0b49917f..d44f03d72e211 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/PassThroughObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/PassThroughObjectMapper.java @@ -10,7 +10,6 @@ import org.elasticsearch.common.Explicit; import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -100,9 +99,14 @@ public PassThroughObjectMapper.Builder newBuilder(IndexVersion indexVersionCreat return builder; } - public PassThroughObjectMapper merge(ObjectMapper mergeWith, MergeReason reason, MapperMergeContext parentBuilderContext) { - final var mergeResult = MergeResult.build(this, mergeWith, reason, parentBuilderContext); + @Override + public PassThroughObjectMapper merge(Mapper mergeWith, MapperMergeContext parentBuilderContext) { + if (mergeWith instanceof PassThroughObjectMapper == false) { + MapperErrors.throwObjectMappingConflictError(mergeWith.name()); + } + PassThroughObjectMapper mergeWithObject = (PassThroughObjectMapper) mergeWith; + final var mergeResult = MergeResult.build(this, mergeWithObject, parentBuilderContext); final Explicit containsDimensions = (mergeWithObject.timeSeriesDimensionSubFields.explicit()) ? mergeWithObject.timeSeriesDimensionSubFields diff --git a/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java index 90d9c879c57e1..8db3a970e31c8 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java @@ -345,15 +345,13 @@ protected MapperMergeContext createChildContext(MapperMergeContext mapperMergeCo } @Override - public RootObjectMapper merge(Mapper mergeWith, MergeReason reason, MapperMergeContext parentMergeContext) { + public RootObjectMapper merge(Mapper mergeWith, MapperMergeContext parentMergeContext) { if (mergeWith instanceof RootObjectMapper == false) { MapperErrors.throwObjectMappingConflictError(mergeWith.name()); } - return merge((RootObjectMapper) mergeWith, reason, parentMergeContext); - } - RootObjectMapper merge(RootObjectMapper mergeWithObject, MergeReason reason, MapperMergeContext parentMergeContext) { - final var mergeResult = MergeResult.build(this, mergeWithObject, reason, parentMergeContext); + RootObjectMapper mergeWithObject = (RootObjectMapper) mergeWith; + final var mergeResult = MergeResult.build(this, mergeWithObject, parentMergeContext); final Explicit numericDetection; if (mergeWithObject.numericDetection.explicit()) { numericDetection = mergeWithObject.numericDetection; @@ -377,7 +375,7 @@ RootObjectMapper merge(RootObjectMapper mergeWithObject, MergeReason reason, Map final Explicit dynamicTemplates; if (mergeWithObject.dynamicTemplates.explicit()) { - if (reason == MergeReason.INDEX_TEMPLATE) { + if (parentMergeContext.getMapperBuilderContext().getMergeReason() == MergeReason.INDEX_TEMPLATE) { Map templatesByKey = new LinkedHashMap<>(); for (DynamicTemplate template : this.dynamicTemplates.value()) { templatesByKey.put(template.name(), template); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperTests.java index 144bfa3e8701e..486b33d9b155a 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperTests.java @@ -464,7 +464,11 @@ public void testDeeplyNestedMapping() throws Exception { threads[threadId] = new Thread(() -> { try { latch.await(); - mapperService.parseMapping("_doc", new CompressedXContent(Strings.toString(builders[threadId]))); + mapperService.parseMapping( + "_doc", + MergeReason.MAPPING_UPDATE, + new CompressedXContent(Strings.toString(builders[threadId])) + ); } catch (Exception e) { throw new AssertionError(e); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MapperBuilderContextTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MapperBuilderContextTests.java new file mode 100644 index 0000000000000..8c9197b0f3173 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/mapper/MapperBuilderContextTests.java @@ -0,0 +1,30 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.mapper; + +import org.elasticsearch.test.ESTestCase; + +public class MapperBuilderContextTests extends ESTestCase { + + public void testRoot() { + MapperBuilderContext root = MapperBuilderContext.root(false, false); + assertFalse(root.isSourceSynthetic()); + assertFalse(root.isDataStream()); + assertEquals(MapperService.MergeReason.MAPPING_UPDATE, root.getMergeReason()); + } + + public void testRootWithMergeReason() { + MapperService.MergeReason mergeReason = randomFrom(MapperService.MergeReason.values()); + MapperBuilderContext root = MapperBuilderContext.root(false, false, mergeReason); + assertFalse(root.isSourceSynthetic()); + assertFalse(root.isDataStream()); + assertEquals(mergeReason, root.getMergeReason()); + } + +} diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MapperMergeContextTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MapperMergeContextTests.java index 9c38487dbdf7b..77d3259ea1091 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MapperMergeContextTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MapperMergeContextTests.java @@ -29,4 +29,10 @@ public void testAddFieldIfPossibleUnlimited() { assertTrue(context.decrementFieldBudgetIfPossible(Integer.MAX_VALUE)); } + public void testMergeReasons() { + MapperService.MergeReason mergeReason = randomFrom(MapperService.MergeReason.values()); + MapperMergeContext context = MapperMergeContext.root(false, false, mergeReason, Integer.MAX_VALUE); + assertEquals(mergeReason, context.getMapperBuilderContext().getMergeReason()); + } + } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java index 7f762bbfa7234..0a49907b25567 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java @@ -1707,6 +1707,93 @@ public void testExpandDottedNotationToObjectMappers() throws IOException { }"""); } + public void testMergeDottedAndNestedNotation() throws IOException { + CompressedXContent mapping1 = new CompressedXContent(""" + { + "properties": { + "parent.child": { + "type": "keyword" + } + } + }"""); + + CompressedXContent mapping2 = new CompressedXContent(""" + { + "properties": { + "parent" : { + "properties" : { + "child" : { + "type" : "integer" + } + } + } + } + }"""); + + assertMergeEquals(List.of(mapping1, mapping2), """ + { + "_doc" : { + "properties" : { + "parent" : { + "properties" : { + "child" : { + "type" : "integer" + } + } + } + } + } + }"""); + + assertMergeEquals(List.of(mapping2, mapping1), """ + { + "_doc" : { + "properties" : { + "parent" : { + "properties" : { + "child" : { + "type" : "keyword" + } + } + } + } + } + }"""); + } + + public void testDottedAndNestedNotationInSameMapping() throws IOException { + CompressedXContent mapping = new CompressedXContent(""" + { + "properties": { + "parent.child": { + "type": "keyword" + }, + "parent" : { + "properties" : { + "child" : { + "type" : "integer" + } + } + } + } + }"""); + + assertMergeEquals(List.of(mapping), """ + { + "_doc" : { + "properties" : { + "parent" : { + "properties" : { + "child" : { + "type" : "integer" + } + } + } + } + } + }"""); + } + private void assertMergeEquals(List mappingSources, String expected) throws IOException { final MapperService mapperServiceBulk = createMapperService(mapping(b -> {})); // simulates multiple component templates being merged in a composable index template diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java index 61d62c1e41969..25e4ccdf4d3a9 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java @@ -1515,8 +1515,7 @@ public void testMergeNested() { NestedObjectMapper result = (NestedObjectMapper) firstMapper.merge( secondMapper, - MapperService.MergeReason.INDEX_TEMPLATE, - MapperMergeContext.root(false, false, Long.MAX_VALUE) + MapperMergeContext.root(false, false, MapperService.MergeReason.INDEX_TEMPLATE, Long.MAX_VALUE) ); assertFalse(result.isIncludeInParent()); assertTrue(result.isIncludeInRoot()); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java index 3c4aca4d36284..94a4c2ea92fbb 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java @@ -75,10 +75,7 @@ public void testMergeDisabledField() { new ObjectMapper.Builder("disabled", Explicit.IMPLICIT_TRUE) ).build(MapperBuilderContext.root(false, false)); - RootObjectMapper merged = (RootObjectMapper) rootObjectMapper.merge( - mergeWith, - MapperMergeContext.root(false, false, Long.MAX_VALUE) - ); + RootObjectMapper merged = rootObjectMapper.merge(mergeWith, MapperMergeContext.root(false, false, Long.MAX_VALUE)); assertFalse(((ObjectMapper) merged.getMapper("disabled")).isEnabled()); } @@ -93,8 +90,7 @@ public void testMergeEnabled() { ObjectMapper result = rootObjectMapper.merge( mergeWith, - MapperService.MergeReason.INDEX_TEMPLATE, - MapperMergeContext.root(false, false, Long.MAX_VALUE) + MapperMergeContext.root(false, false, MapperService.MergeReason.INDEX_TEMPLATE, Long.MAX_VALUE) ); assertTrue(result.isEnabled()); } @@ -115,8 +111,7 @@ public void testMergeEnabledForRootMapper() { ObjectMapper result = firstMapper.merge( secondMapper, - MapperService.MergeReason.INDEX_TEMPLATE, - MapperMergeContext.root(false, false, Long.MAX_VALUE) + MapperMergeContext.root(false, false, MapperService.MergeReason.INDEX_TEMPLATE, Long.MAX_VALUE) ); assertFalse(result.isEnabled()); } @@ -131,10 +126,7 @@ public void testMergeDisabledRootMapper() { Collections.singletonMap("test", new TestRuntimeField("test", "long")) ).build(MapperBuilderContext.root(false, false)); - RootObjectMapper merged = (RootObjectMapper) rootObjectMapper.merge( - mergeWith, - MapperMergeContext.root(false, false, Long.MAX_VALUE) - ); + RootObjectMapper merged = rootObjectMapper.merge(mergeWith, MapperMergeContext.root(false, false, Long.MAX_VALUE)); assertFalse(merged.isEnabled()); assertEquals(1, merged.runtimeFields().size()); assertEquals("test", merged.runtimeFields().iterator().next().name()); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java index 74b293ca7d6d6..154132c772927 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java @@ -126,6 +126,7 @@ public void testMerge() throws IOException { assertNull(mapper.mapping().getRoot().dynamic()); Mapping mergeWith = mapperService.parseMapping( "_doc", + MergeReason.MAPPING_UPDATE, new CompressedXContent(BytesReference.bytes(topMapping(b -> b.field("dynamic", "strict")))) ); Mapping merged = mapper.mapping().merge(mergeWith, reason, Long.MAX_VALUE); @@ -463,10 +464,14 @@ public void testSubobjectsCannotBeUpdated() throws IOException { MapperService mapperService = createMapperService(fieldMapping(b -> b.field("type", "object"))); DocumentMapper mapper = mapperService.documentMapper(); assertNull(mapper.mapping().getRoot().dynamic()); - Mapping mergeWith = mapperService.parseMapping("_doc", new CompressedXContent(BytesReference.bytes(fieldMapping(b -> { - b.field("type", "object"); - b.field("subobjects", "false"); - })))); + Mapping mergeWith = mapperService.parseMapping( + "_doc", + MergeReason.MAPPING_UPDATE, + new CompressedXContent(BytesReference.bytes(fieldMapping(b -> { + b.field("type", "object"); + b.field("subobjects", "false"); + }))) + ); MapperException exception = expectThrows( MapperException.class, () -> mapper.mapping().merge(mergeWith, MergeReason.MAPPING_UPDATE, Long.MAX_VALUE) @@ -478,9 +483,13 @@ public void testSubobjectsCannotBeUpdatedOnRoot() throws IOException { MapperService mapperService = createMapperService(topMapping(b -> b.field("subobjects", false))); DocumentMapper mapper = mapperService.documentMapper(); assertNull(mapper.mapping().getRoot().dynamic()); - Mapping mergeWith = mapperService.parseMapping("_doc", new CompressedXContent(BytesReference.bytes(topMapping(b -> { - b.field("subobjects", true); - })))); + Mapping mergeWith = mapperService.parseMapping( + "_doc", + MergeReason.MAPPING_UPDATE, + new CompressedXContent(BytesReference.bytes(topMapping(b -> { + b.field("subobjects", true); + }))) + ); MapperException exception = expectThrows( MapperException.class, () -> mapper.mapping().merge(mergeWith, MergeReason.MAPPING_UPDATE, Long.MAX_VALUE) diff --git a/server/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java b/server/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java index a52fd7e608d24..9b686417badfc 100644 --- a/server/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java +++ b/server/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java @@ -27,6 +27,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.lucene.similarity.LegacyBM25Similarity; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -254,7 +255,7 @@ public void testResolveSimilaritiesFromMapping_Unknown() throws IOException { IndexService indexService = createIndex("foo"); MapperParsingException e = expectThrows( MapperParsingException.class, - () -> indexService.mapperService().parseMapping("type", new CompressedXContent(mapping)) + () -> indexService.mapperService().parseMapping("type", MergeReason.MAPPING_UPDATE, new CompressedXContent(mapping)) ); assertThat(e.getMessage(), equalTo("Failed to parse mapping: Unknown Similarity type [unknown_similarity] for field [field1]")); } diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MetadataMapperTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MetadataMapperTestCase.java index 77391aadaa554..1b00ba3e9fd09 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MetadataMapperTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MetadataMapperTestCase.java @@ -12,6 +12,7 @@ import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; +import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.test.index.IndexVersionUtils; import org.elasticsearch.xcontent.XContentBuilder; @@ -120,7 +121,7 @@ public final void testUnsupportedParametersAreRejected() throws IOException { + "}"; MapperParsingException exception = expectThrows( MapperParsingException.class, - () -> mapperService.parseMapping("_doc", new CompressedXContent(mappingAsString)) + () -> mapperService.parseMapping("_doc", MergeReason.MAPPING_UPDATE, new CompressedXContent(mappingAsString)) ); assertEquals( "Failed to parse mapping: unknown parameter [anything] on metadata field [" + fieldName() + "]", @@ -136,7 +137,7 @@ public final void testFixedMetaFieldsAreNotConfigurable() throws IOException { String mappingAsString = "{\n" + " \"_doc\" : {\n" + " \"" + fieldName() + "\" : {\n" + " }\n" + " }\n" + "}"; MapperParsingException exception = expectThrows( MapperParsingException.class, - () -> mapperService.parseMapping("_doc", new CompressedXContent(mappingAsString)) + () -> mapperService.parseMapping("_doc", MergeReason.MAPPING_UPDATE, new CompressedXContent(mappingAsString)) ); assertEquals("Failed to parse mapping: " + fieldName() + " is not configurable", exception.getMessage()); } @@ -161,7 +162,7 @@ public void testTypeAndFriendsAreAcceptedBefore_8_6_0() throws IOException { + " }\n" + " }\n" + "}"; - assertNotNull(mapperService.parseMapping("_doc", new CompressedXContent(mappingAsString))); + assertNotNull(mapperService.parseMapping("_doc", MergeReason.MAPPING_UPDATE, new CompressedXContent(mappingAsString))); } } @@ -184,7 +185,7 @@ public void testTypeAndFriendsAreDeprecatedFrom_8_6_0() throws IOException { + " }\n" + " }\n" + "}"; - assertNotNull(mapperService.parseMapping("_doc", new CompressedXContent(mappingAsString))); + assertNotNull(mapperService.parseMapping("_doc", MergeReason.MAPPING_UPDATE, new CompressedXContent(mappingAsString))); assertWarnings("Parameter [" + param + "] has no effect on metadata field [" + fieldName() + "] and will be removed in future"); } } From 073048abf2e67161457c33a7403149db025bc14d Mon Sep 17 00:00:00 2001 From: John Verwolf Date: Mon, 8 Apr 2024 09:28:34 -0700 Subject: [PATCH 179/264] Track ongoing search tasks (#107129) --- docs/changelog/107129.yaml | 5 +++++ .../elasticsearch/threadpool/ThreadPool.java | 18 ++++++++++++++++-- 2 files changed, 21 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/107129.yaml diff --git a/docs/changelog/107129.yaml b/docs/changelog/107129.yaml new file mode 100644 index 0000000000000..6c9b9094962c1 --- /dev/null +++ b/docs/changelog/107129.yaml @@ -0,0 +1,5 @@ +pr: 107129 +summary: Track ongoing search tasks +area: Search +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java index cf554fe81d4a3..507eff05780b8 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java +++ b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java @@ -151,6 +151,8 @@ public static ThreadPoolType fromType(String type) { entry(Names.SYSTEM_CRITICAL_WRITE, ThreadPoolType.FIXED) ); + public static final double searchAutoscalingEWMA = 0.1; + private final Map executors; private final ThreadPoolInfo threadPoolInfo; @@ -222,7 +224,13 @@ public ThreadPool(final Settings settings, MeterRegistry meterRegistry, final Ex builders.put(Names.ANALYZE, new FixedExecutorBuilder(settings, Names.ANALYZE, 1, 16, TaskTrackingConfig.DO_NOT_TRACK)); builders.put( Names.SEARCH, - new FixedExecutorBuilder(settings, Names.SEARCH, searchOrGetThreadPoolSize, 1000, TaskTrackingConfig.DEFAULT) + new FixedExecutorBuilder( + settings, + Names.SEARCH, + searchOrGetThreadPoolSize, + 1000, + new TaskTrackingConfig(true, searchAutoscalingEWMA) + ) ); builders.put( Names.SEARCH_WORKER, @@ -230,7 +238,13 @@ public ThreadPool(final Settings settings, MeterRegistry meterRegistry, final Ex ); builders.put( Names.SEARCH_COORDINATION, - new FixedExecutorBuilder(settings, Names.SEARCH_COORDINATION, halfProc, 1000, TaskTrackingConfig.DEFAULT) + new FixedExecutorBuilder( + settings, + Names.SEARCH_COORDINATION, + halfProc, + 1000, + new TaskTrackingConfig(true, searchAutoscalingEWMA) + ) ); builders.put( Names.AUTO_COMPLETE, From 91bdfb84a0b04be531b113bd58119ea4804f6e18 Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Mon, 8 Apr 2024 13:41:53 -0600 Subject: [PATCH 180/264] Clarify data stream recommendations and best practices (#107233) * Clarify data stream recommendations and best practices Our documentation around data streams versus aliases could be interpreted in a way where someone doing *any* updates thinks they need to use an alias with indices instead of a data stream. This commit enhances the documentation around these areas to determine the correct abstraction in a more concrete way. It also tries to clarify that data streams still allow updates to the backing indices, and that a difference is last-write-wins versus first-write-wins. --- .../data-streams/data-streams.asciidoc | 40 ++++++++++++++----- docs/reference/ilm/ilm-tutorial.asciidoc | 17 ++++---- .../ilm/set-up-lifecycle-policy.asciidoc | 7 +++- 3 files changed, 45 insertions(+), 19 deletions(-) diff --git a/docs/reference/data-streams/data-streams.asciidoc b/docs/reference/data-streams/data-streams.asciidoc index 307930d64c4fb..9c7137563caef 100644 --- a/docs/reference/data-streams/data-streams.asciidoc +++ b/docs/reference/data-streams/data-streams.asciidoc @@ -18,6 +18,28 @@ automate the management of these backing indices. For example, you can use hardware and delete unneeded indices. {ilm-init} can help you reduce costs and overhead as your data grows. + +[discrete] +[[should-you-use-a-data-stream]] +== Should you use a data stream? + +To determine whether you should use a data stream for your data, you should consider the format of +the data, and your expected interaction. A good candidate for using a data stream will match the +following criteria: + +* Your data contains a timestamp field, or one could be automatically generated. +* You mostly perform indexing requests, with occasional updates and deletes. +* You index documents without an `_id`, or when indexing documents with an explicit `_id` you expect first-write-wins behavior. + +For most time series data use-cases, a data stream will be a good fit. However, if you find that +your data doesn't fit into these categories (for example, if you frequently send multiple documents +using the same `_id` expecting last-write-wins), you may want to use an index alias with a write +index instead. See documentation for <> for more information. + +Keep in mind that some features such as <> and +<> require a data stream. + [discrete] [[backing-indices]] == Backing indices @@ -116,19 +138,19 @@ You should not derive any intelligence from the backing indices names. [discrete] [[data-streams-append-only]] -== Append-only +== Append-only (mostly) -Data streams are designed for use cases where existing data is rarely, -if ever, updated. You cannot send update or deletion requests for existing -documents directly to a data stream. Instead, use the +Data streams are designed for use cases where existing data is rarely updated. You cannot send +update or deletion requests for existing documents directly to a data stream. However, you can still +<> in a data stream by submitting +requests directly to the document's backing index. + +If you need to update a larger number of documents in a data stream, you can use the <> and <> APIs. -If needed, you can <> by submitting requests directly to the document's backing index. - -TIP: If you frequently update or delete existing time series data, use an index -alias with a write index instead of a data stream. See +TIP: If you frequently send multiple documents using the same `_id` expecting last-write-wins, you +may want to use an index alias with a write index instead. See <>. include::set-up-a-data-stream.asciidoc[] diff --git a/docs/reference/ilm/ilm-tutorial.asciidoc b/docs/reference/ilm/ilm-tutorial.asciidoc index c7f2c3537b5e8..4513c523056a9 100644 --- a/docs/reference/ilm/ilm-tutorial.asciidoc +++ b/docs/reference/ilm/ilm-tutorial.asciidoc @@ -282,14 +282,15 @@ DELETE /_index_template/timeseries_template [[manage-time-series-data-without-data-streams]] === Manage time series data without data streams -Even though <> are a convenient way to scale -and manage time series data, they are designed to be append-only. We recognise there -might be use-cases where data needs to be updated or deleted in place and the -data streams don't support delete and update requests directly, -so the index APIs would need to be used directly on the data stream's backing indices. - -In these cases, you can use an index alias to manage indices containing the time series data -and periodically roll over to a new index. +Even though <> are a convenient way to scale and manage time series +data, they are designed to be append-only. We recognise there might be use-cases where data needs to +be updated or deleted in place and the data streams don't support delete and update requests +directly, so the index APIs would need to be used directly on the data stream's backing indices. In +these cases we still recommend using a data stream. + +If you frequently send multiple documents using the same `_id` expecting last-write-wins, you can +use an index alias instead of a data stream to manage indices containing the time series data and +periodically roll over to a new index. To automate rollover and management of time series indices with {ilm-init} using an index alias, you: diff --git a/docs/reference/ilm/set-up-lifecycle-policy.asciidoc b/docs/reference/ilm/set-up-lifecycle-policy.asciidoc index 79be6205a8c88..b6310050a4f25 100644 --- a/docs/reference/ilm/set-up-lifecycle-policy.asciidoc +++ b/docs/reference/ilm/set-up-lifecycle-policy.asciidoc @@ -81,6 +81,8 @@ To use a policy that triggers the rollover action, you need to configure the policy in the index template used to create each new index. You specify the name of the policy and the alias used to reference the rolling indices. +TIP: An `index.lifecycle.rollover_alias` setting is only required if using {ilm} with an alias. It is unnecessary when using <>. + You can use the {kib} Create template wizard to create a template. To access the wizard, open the menu and go to *Stack Management > Index Management*. In the *Index Templates* tab, click *Create template*. @@ -128,8 +130,9 @@ DELETE _index_template/my_template [[create-initial-index]] ==== Create an initial managed index -When you set up policies for your own rolling indices, you need to manually create the first index -managed by a policy and designate it as the write index. +When you set up policies for your own rolling indices, if you are not using the recommended +<>, you need to manually create the first index managed by a policy and +designate it as the write index. IMPORTANT: When you enable {ilm} for {beats} or the {ls} {es} output plugin, the necessary policies and configuration changes are applied automatically. From f1bb5bb5aa0870b939d814650d81967d256c4baf Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Mon, 8 Apr 2024 16:56:57 -0400 Subject: [PATCH 181/264] Bump versions after 8.13.2 release --- .buildkite/pipelines/intake.yml | 2 +- .buildkite/pipelines/periodic-packaging.yml | 6 +++--- .buildkite/pipelines/periodic.yml | 10 +++++----- .ci/bwcVersions | 2 +- .ci/snapshotBwcVersions | 2 +- server/src/main/java/org/elasticsearch/Version.java | 1 + .../resources/org/elasticsearch/TransportVersions.csv | 1 + .../org/elasticsearch/index/IndexVersions.csv | 1 + 8 files changed, 14 insertions(+), 11 deletions(-) diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index abde05ec7919e..612838592712b 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -48,7 +48,7 @@ steps: timeout_in_minutes: 300 matrix: setup: - BWC_VERSION: ["7.17.20", "8.13.2", "8.14.0"] + BWC_VERSION: ["7.17.20", "8.13.3", "8.14.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index 53243c2c081eb..58dcf875ce297 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -529,8 +529,8 @@ steps: env: BWC_VERSION: 8.12.2 - - label: "{{matrix.image}} / 8.13.2 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.13.2 + - label: "{{matrix.image}} / 8.13.3 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.13.3 timeout_in_minutes: 300 matrix: setup: @@ -543,7 +543,7 @@ steps: machineType: custom-16-32768 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 8.13.2 + BWC_VERSION: 8.13.3 - label: "{{matrix.image}} / 8.14.0 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.14.0 diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index c5b9bb830a8d6..3462e0fb95aba 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -312,8 +312,8 @@ steps: buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.12.2 - - label: 8.13.2 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.13.2#bwcTest + - label: 8.13.3 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.13.3#bwcTest timeout_in_minutes: 300 agents: provider: gcp @@ -321,7 +321,7 @@ steps: machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 8.13.2 + BWC_VERSION: 8.13.3 - label: 8.14.0 / bwc command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.14.0#bwcTest timeout_in_minutes: 300 @@ -396,7 +396,7 @@ steps: setup: ES_RUNTIME_JAVA: - openjdk17 - BWC_VERSION: ["7.17.20", "8.13.2", "8.14.0"] + BWC_VERSION: ["7.17.20", "8.13.3", "8.14.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 @@ -438,7 +438,7 @@ steps: - graalvm-ce17 - openjdk17 - openjdk21 - BWC_VERSION: ["7.17.20", "8.13.2", "8.14.0"] + BWC_VERSION: ["7.17.20", "8.13.3", "8.14.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.ci/bwcVersions b/.ci/bwcVersions index 2d8ace4845f4f..d3e57196e1c89 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -30,5 +30,5 @@ BWC_VERSION: - "8.10.4" - "8.11.4" - "8.12.2" - - "8.13.2" + - "8.13.3" - "8.14.0" diff --git a/.ci/snapshotBwcVersions b/.ci/snapshotBwcVersions index 89449ff7f9f2f..db131b89ffa4e 100644 --- a/.ci/snapshotBwcVersions +++ b/.ci/snapshotBwcVersions @@ -1,4 +1,4 @@ BWC_VERSION: - "7.17.20" - - "8.13.2" + - "8.13.3" - "8.14.0" diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 992308bd32018..99e811c021845 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -170,6 +170,7 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_8_13_0 = new Version(8_13_00_99); public static final Version V_8_13_1 = new Version(8_13_01_99); public static final Version V_8_13_2 = new Version(8_13_02_99); + public static final Version V_8_13_3 = new Version(8_13_03_99); public static final Version V_8_14_0 = new Version(8_14_00_99); public static final Version CURRENT = V_8_14_0; diff --git a/server/src/main/resources/org/elasticsearch/TransportVersions.csv b/server/src/main/resources/org/elasticsearch/TransportVersions.csv index 679270e90e894..4bae460e3bce2 100644 --- a/server/src/main/resources/org/elasticsearch/TransportVersions.csv +++ b/server/src/main/resources/org/elasticsearch/TransportVersions.csv @@ -115,3 +115,4 @@ 8.12.2,8560001 8.13.0,8595000 8.13.1,8595000 +8.13.2,8595000 diff --git a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv index b60066601bf68..61cc2167a9048 100644 --- a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv +++ b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv @@ -115,3 +115,4 @@ 8.12.2,8500010 8.13.0,8503000 8.13.1,8503000 +8.13.2,8503000 From e28ecbb5fb59e1fca0e06436bdc6591df46db5e1 Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Mon, 8 Apr 2024 16:57:35 -0400 Subject: [PATCH 182/264] Prune changelogs after 8.13.2 release --- docs/changelog/106247.yaml | 5 ----- docs/changelog/106673.yaml | 6 ------ docs/changelog/106873.yaml | 6 ------ docs/changelog/106990.yaml | 5 ----- docs/changelog/107054.yaml | 6 ------ docs/changelog/107059.yaml | 5 ----- 6 files changed, 33 deletions(-) delete mode 100644 docs/changelog/106247.yaml delete mode 100644 docs/changelog/106673.yaml delete mode 100644 docs/changelog/106873.yaml delete mode 100644 docs/changelog/106990.yaml delete mode 100644 docs/changelog/107054.yaml delete mode 100644 docs/changelog/107059.yaml diff --git a/docs/changelog/106247.yaml b/docs/changelog/106247.yaml deleted file mode 100644 index 5895dffd685a4..0000000000000 --- a/docs/changelog/106247.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106247 -summary: Fix a downsample persistent task assignment bug -area: Downsampling -type: bug -issues: [] diff --git a/docs/changelog/106673.yaml b/docs/changelog/106673.yaml deleted file mode 100644 index 9a716d20ad2bc..0000000000000 --- a/docs/changelog/106673.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 106673 -summary: "ESQL: Fix fully pruned aggregates" -area: ES|QL -type: bug -issues: - - 106427 diff --git a/docs/changelog/106873.yaml b/docs/changelog/106873.yaml deleted file mode 100644 index f823caff7aefe..0000000000000 --- a/docs/changelog/106873.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 106873 -summary: Query API Key Information API support for the `typed_keys` request parameter -area: Security -type: enhancement -issues: - - 106817 diff --git a/docs/changelog/106990.yaml b/docs/changelog/106990.yaml deleted file mode 100644 index 26646e742a5ee..0000000000000 --- a/docs/changelog/106990.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106990 -summary: Address concurrency issue in top hits aggregation -area: Aggregations -type: bug -issues: [] diff --git a/docs/changelog/107054.yaml b/docs/changelog/107054.yaml deleted file mode 100644 index 6511cb5185492..0000000000000 --- a/docs/changelog/107054.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 107054 -summary: Query API Keys support for both `aggs` and `aggregations` keywords -area: Security -type: enhancement -issues: - - 106839 diff --git a/docs/changelog/107059.yaml b/docs/changelog/107059.yaml deleted file mode 100644 index 6c7ee48f9b53b..0000000000000 --- a/docs/changelog/107059.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 107059 -summary: "[Connector API] Support numeric for configuration select option value type" -area: Application -type: bug -issues: [] From 1c89a8ee280f6df07dc3de90e610bbc248da214d Mon Sep 17 00:00:00 2001 From: sliu1013 Date: Tue, 9 Apr 2024 08:05:35 +0800 Subject: [PATCH 183/264] Fix typo in IndicesAdminClient#prepareOpen Javadoc (#107198) --- .../org/elasticsearch/client/internal/IndicesAdminClient.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/client/internal/IndicesAdminClient.java b/server/src/main/java/org/elasticsearch/client/internal/IndicesAdminClient.java index d931302740f19..69b897df4d76d 100644 --- a/server/src/main/java/org/elasticsearch/client/internal/IndicesAdminClient.java +++ b/server/src/main/java/org/elasticsearch/client/internal/IndicesAdminClient.java @@ -247,7 +247,7 @@ public interface IndicesAdminClient extends ElasticsearchClient { /** * Opens one or more indices based on their index name. * - * @param indices The name of the indices to close + * @param indices The name of the indices to open */ OpenIndexRequestBuilder prepareOpen(String... indices); From 911aaf8ef92d8ecba7d0389571ef18fb82a99eb7 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 9 Apr 2024 02:08:36 -0400 Subject: [PATCH 184/264] Forward port release notes for v8.13.2 (#107243) --- docs/reference/release-notes.asciidoc | 2 ++ docs/reference/release-notes/8.13.2.asciidoc | 31 ++++++++++++++++++++ 2 files changed, 33 insertions(+) create mode 100644 docs/reference/release-notes/8.13.2.asciidoc diff --git a/docs/reference/release-notes.asciidoc b/docs/reference/release-notes.asciidoc index f9da92aef925e..05c97d51a38e7 100644 --- a/docs/reference/release-notes.asciidoc +++ b/docs/reference/release-notes.asciidoc @@ -7,6 +7,7 @@ This section summarizes the changes in each release. * <> +* <> * <> * <> * <> @@ -64,6 +65,7 @@ This section summarizes the changes in each release. -- include::release-notes/8.14.0.asciidoc[] +include::release-notes/8.13.2.asciidoc[] include::release-notes/8.13.1.asciidoc[] include::release-notes/8.13.0.asciidoc[] include::release-notes/8.12.2.asciidoc[] diff --git a/docs/reference/release-notes/8.13.2.asciidoc b/docs/reference/release-notes/8.13.2.asciidoc new file mode 100644 index 0000000000000..1da23b5125833 --- /dev/null +++ b/docs/reference/release-notes/8.13.2.asciidoc @@ -0,0 +1,31 @@ +[[release-notes-8.13.2]] +== {es} version 8.13.2 + +Also see <>. + +[[bug-8.13.2]] +[float] +=== Bug fixes + +Aggregations:: +* Address concurrency issue in top hits aggregation {es-pull}106990[#106990] + +Application:: +* [Connector API] Support numeric for configuration select option value type {es-pull}107059[#107059] + +Downsampling:: +* Fix a downsample persistent task assignment bug {es-pull}106247[#106247] +* Fix downsample action request serialization {es-pull}106920[#106920] + +ES|QL:: +* ESQL: Fix fully pruned aggregates {es-pull}106673[#106673] (issue: {es-issue}106427[#106427]) + +[[enhancement-8.13.2]] +[float] +=== Enhancements + +Security:: +* Query API Key Information API support for the `typed_keys` request parameter {es-pull}106873[#106873] (issue: {es-issue}106817[#106817]) +* Query API Keys support for both `aggs` and `aggregations` keywords {es-pull}107054[#107054] (issue: {es-issue}106839[#106839]) + + From 49ffa045a694192884d274194fa14100b963e0c2 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Tue, 9 Apr 2024 09:18:58 +0200 Subject: [PATCH 185/264] Cut over stored fields to ZSTD for compression. (#103374) This cuts over stored fields with `index.codec: best_speed` (default) to ZSTD with level 0 and blocks of at most 128 documents or 14kB, and `index.codec: best_compression` to ZSTD with level 3 and blocks of at most 2,048 documents or 240kB. Compared with the current codecs, this would yield similar indexing speed, much better space efficiency and similar retrieval speed. Benchmarks on the `elastic/logs` track suggest 10% better storage efficiency and slightly faster ingestion. The Lucene codec infrastructure records the codec on a per-segment basis and ensures that this change is backward-compatible. Segments will get progressively migrated to ZSTD as they get merged in the background. Bindings for ZSTD are provided by the Panama FFI API on JDK21+ and JNA on older JDKs. ZSTD support is currently behind a feature flag, so it won't be enabled immediately when this feature gets merged, this will need a follow-up change. Co-authored-by: Mark Vieira Co-authored-by: Ryan Ernst --- docs/changelog/103374.yaml | 16 ++ server/src/main/java/module-info.java | 3 + .../index/codec/CodecService.java | 30 ++- .../index/codec/Elasticsearch814Codec.java | 130 +++++++++++ .../codec/LegacyPerFieldMapperCodec.java | 52 +++++ .../index/codec/PerFieldFormatSupplier.java | 123 ++++++++++ .../index/codec/PerFieldMapperCodec.java | 94 +------- .../codec/zstd/Zstd814StoredFieldsFormat.java | 212 ++++++++++++++++++ .../services/org.apache.lucene.codecs.Codec | 1 + .../elasticsearch/index/codec/CodecTests.java | 60 +++-- .../index/codec/PerFieldMapperCodecTests.java | 33 +-- ...estCompressionStoredFieldsFormatTests.java | 23 ++ ...td814BestSpeedStoredFieldsFormatTests.java | 23 ++ .../index/mapper/MapperServiceTestCase.java | 4 +- 14 files changed, 678 insertions(+), 126 deletions(-) create mode 100644 docs/changelog/103374.yaml create mode 100644 server/src/main/java/org/elasticsearch/index/codec/Elasticsearch814Codec.java create mode 100644 server/src/main/java/org/elasticsearch/index/codec/LegacyPerFieldMapperCodec.java create mode 100644 server/src/main/java/org/elasticsearch/index/codec/PerFieldFormatSupplier.java create mode 100644 server/src/main/java/org/elasticsearch/index/codec/zstd/Zstd814StoredFieldsFormat.java create mode 100644 server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec create mode 100644 server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestCompressionStoredFieldsFormatTests.java create mode 100644 server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestSpeedStoredFieldsFormatTests.java diff --git a/docs/changelog/103374.yaml b/docs/changelog/103374.yaml new file mode 100644 index 0000000000000..fcdee9185eb92 --- /dev/null +++ b/docs/changelog/103374.yaml @@ -0,0 +1,16 @@ +pr: 103374 +summary: Cut over stored fields to ZSTD for compression +area: Search +type: enhancement +issues: [] +highlight: + title: Stored fields are now compressed with ZStandard instead of LZ4/DEFLATE + body: |- + Stored fields are now compressed by splitting documents into blocks, which + are then compressed independently with ZStandard. `index.codec: default` + (default) uses blocks of at most 14kB or 128 documents compressed with level + 0, while `index.codec: best_compression` uses blocks of at most 240kB or + 2048 documents compressed at level 3. On most datasets that we tested + against, this yielded storage improvements in the order of 10%, slightly + faster indexing and similar retrieval latencies. + notable: true diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index 83b8606da2997..abfea0b18b9d8 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -6,6 +6,7 @@ * Side Public License, v 1. */ +import org.elasticsearch.index.codec.Elasticsearch814Codec; import org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat; import org.elasticsearch.plugins.internal.RestExtension; @@ -243,6 +244,7 @@ exports org.elasticsearch.index.codec; exports org.elasticsearch.index.codec.tsdb; exports org.elasticsearch.index.codec.bloomfilter; + exports org.elasticsearch.index.codec.zstd; exports org.elasticsearch.index.engine; exports org.elasticsearch.index.fielddata; exports org.elasticsearch.index.fielddata.fieldcomparator; @@ -433,6 +435,7 @@ with org.elasticsearch.index.codec.vectors.ES813FlatVectorFormat, org.elasticsearch.index.codec.vectors.ES813Int8FlatVectorFormat; + provides org.apache.lucene.codecs.Codec with Elasticsearch814Codec; exports org.elasticsearch.cluster.routing.allocation.shards to diff --git a/server/src/main/java/org/elasticsearch/index/codec/CodecService.java b/server/src/main/java/org/elasticsearch/index/codec/CodecService.java index d4771ba74e0fb..3ebcd1cb5b420 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/CodecService.java +++ b/server/src/main/java/org/elasticsearch/index/codec/CodecService.java @@ -11,7 +11,9 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.FeatureFlag; import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.codec.zstd.Zstd814StoredFieldsFormat; import org.elasticsearch.index.mapper.MapperService; import java.util.HashMap; @@ -25,22 +27,40 @@ */ public class CodecService { + public static final FeatureFlag ZSTD_STORED_FIELDS_FEATURE_FLAG = new FeatureFlag("zstd_stored_fields"); + private final Map codecs; public static final String DEFAULT_CODEC = "default"; + public static final String LEGACY_DEFAULT_CODEC = "legacy_default"; // escape hatch public static final String BEST_COMPRESSION_CODEC = "best_compression"; + public static final String LEGACY_BEST_COMPRESSION_CODEC = "legacy_best_compression"; // escape hatch + /** the raw unfiltered lucene default. useful for testing */ public static final String LUCENE_DEFAULT_CODEC = "lucene_default"; public CodecService(@Nullable MapperService mapperService, BigArrays bigArrays) { final var codecs = new HashMap(); - if (mapperService == null) { - codecs.put(DEFAULT_CODEC, new Lucene99Codec()); - codecs.put(BEST_COMPRESSION_CODEC, new Lucene99Codec(Lucene99Codec.Mode.BEST_COMPRESSION)); + + Codec legacyBestSpeedCodec = new LegacyPerFieldMapperCodec(Lucene99Codec.Mode.BEST_SPEED, mapperService, bigArrays); + if (ZSTD_STORED_FIELDS_FEATURE_FLAG.isEnabled()) { + codecs.put(DEFAULT_CODEC, new PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode.BEST_SPEED, mapperService, bigArrays)); } else { - codecs.put(DEFAULT_CODEC, new PerFieldMapperCodec(Lucene99Codec.Mode.BEST_SPEED, mapperService, bigArrays)); - codecs.put(BEST_COMPRESSION_CODEC, new PerFieldMapperCodec(Lucene99Codec.Mode.BEST_COMPRESSION, mapperService, bigArrays)); + codecs.put(DEFAULT_CODEC, legacyBestSpeedCodec); } + codecs.put(LEGACY_DEFAULT_CODEC, legacyBestSpeedCodec); + + Codec legacyBestCompressionCodec = new LegacyPerFieldMapperCodec(Lucene99Codec.Mode.BEST_COMPRESSION, mapperService, bigArrays); + if (ZSTD_STORED_FIELDS_FEATURE_FLAG.isEnabled()) { + codecs.put( + BEST_COMPRESSION_CODEC, + new PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode.BEST_COMPRESSION, mapperService, bigArrays) + ); + } else { + codecs.put(BEST_COMPRESSION_CODEC, legacyBestCompressionCodec); + } + codecs.put(LEGACY_BEST_COMPRESSION_CODEC, legacyBestCompressionCodec); + codecs.put(LUCENE_DEFAULT_CODEC, Codec.getDefault()); for (String codec : Codec.availableCodecs()) { codecs.put(codec, Codec.forName(codec)); diff --git a/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch814Codec.java b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch814Codec.java new file mode 100644 index 0000000000000..e85e05c87b083 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch814Codec.java @@ -0,0 +1,130 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.codec; + +import org.apache.lucene.codecs.DocValuesFormat; +import org.apache.lucene.codecs.FilterCodec; +import org.apache.lucene.codecs.KnnVectorsFormat; +import org.apache.lucene.codecs.PostingsFormat; +import org.apache.lucene.codecs.StoredFieldsFormat; +import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; +import org.apache.lucene.codecs.lucene99.Lucene99Codec; +import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; +import org.apache.lucene.codecs.lucene99.Lucene99PostingsFormat; +import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat; +import org.apache.lucene.codecs.perfield.PerFieldKnnVectorsFormat; +import org.apache.lucene.codecs.perfield.PerFieldPostingsFormat; +import org.elasticsearch.index.codec.zstd.Zstd814StoredFieldsFormat; + +/** + * Elasticsearch codec as of 8.14. This extends the Lucene 9.9 codec to compressed stored fields with ZSTD instead of LZ4/DEFLATE. See + * {@link Zstd814StoredFieldsFormat}. + */ +public class Elasticsearch814Codec extends FilterCodec { + + private final StoredFieldsFormat storedFieldsFormat; + + private final PostingsFormat defaultPostingsFormat; + private final PostingsFormat postingsFormat = new PerFieldPostingsFormat() { + @Override + public PostingsFormat getPostingsFormatForField(String field) { + return Elasticsearch814Codec.this.getPostingsFormatForField(field); + } + }; + + private final DocValuesFormat defaultDVFormat; + private final DocValuesFormat docValuesFormat = new PerFieldDocValuesFormat() { + @Override + public DocValuesFormat getDocValuesFormatForField(String field) { + return Elasticsearch814Codec.this.getDocValuesFormatForField(field); + } + }; + + private final KnnVectorsFormat defaultKnnVectorsFormat; + private final KnnVectorsFormat knnVectorsFormat = new PerFieldKnnVectorsFormat() { + @Override + public KnnVectorsFormat getKnnVectorsFormatForField(String field) { + return Elasticsearch814Codec.this.getKnnVectorsFormatForField(field); + } + }; + + /** Public no-arg constructor, needed for SPI loading at read-time. */ + public Elasticsearch814Codec() { + this(Zstd814StoredFieldsFormat.Mode.BEST_SPEED); + } + + /** + * Constructor. Takes a {@link Zstd814StoredFieldsFormat.Mode} that describes whether to optimize for retrieval speed at the expense of + * worse space-efficiency or vice-versa. + */ + public Elasticsearch814Codec(Zstd814StoredFieldsFormat.Mode mode) { + super("Elasticsearch814", new Lucene99Codec()); + this.storedFieldsFormat = new Zstd814StoredFieldsFormat(mode); + this.defaultPostingsFormat = new Lucene99PostingsFormat(); + this.defaultDVFormat = new Lucene90DocValuesFormat(); + this.defaultKnnVectorsFormat = new Lucene99HnswVectorsFormat(); + } + + @Override + public StoredFieldsFormat storedFieldsFormat() { + return storedFieldsFormat; + } + + @Override + public final PostingsFormat postingsFormat() { + return postingsFormat; + } + + @Override + public final DocValuesFormat docValuesFormat() { + return docValuesFormat; + } + + @Override + public final KnnVectorsFormat knnVectorsFormat() { + return knnVectorsFormat; + } + + /** + * Returns the postings format that should be used for writing new segments of field. + * + *

    The default implementation always returns "Lucene99". + * + *

    WARNING: if you subclass, you are responsible for index backwards compatibility: + * future version of Lucene are only guaranteed to be able to read the default implementation, + */ + public PostingsFormat getPostingsFormatForField(String field) { + return defaultPostingsFormat; + } + + /** + * Returns the docvalues format that should be used for writing new segments of field + * . + * + *

    The default implementation always returns "Lucene99". + * + *

    WARNING: if you subclass, you are responsible for index backwards compatibility: + * future version of Lucene are only guaranteed to be able to read the default implementation. + */ + public DocValuesFormat getDocValuesFormatForField(String field) { + return defaultDVFormat; + } + + /** + * Returns the vectors format that should be used for writing new segments of field + * + *

    The default implementation always returns "Lucene95". + * + *

    WARNING: if you subclass, you are responsible for index backwards compatibility: + * future version of Lucene are only guaranteed to be able to read the default implementation. + */ + public KnnVectorsFormat getKnnVectorsFormatForField(String field) { + return defaultKnnVectorsFormat; + } +} diff --git a/server/src/main/java/org/elasticsearch/index/codec/LegacyPerFieldMapperCodec.java b/server/src/main/java/org/elasticsearch/index/codec/LegacyPerFieldMapperCodec.java new file mode 100644 index 0000000000000..a682d26b094e6 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/codec/LegacyPerFieldMapperCodec.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.codec; + +import org.apache.lucene.codecs.Codec; +import org.apache.lucene.codecs.DocValuesFormat; +import org.apache.lucene.codecs.KnnVectorsFormat; +import org.apache.lucene.codecs.PostingsFormat; +import org.apache.lucene.codecs.lucene99.Lucene99Codec; +import org.elasticsearch.common.lucene.Lucene; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.index.mapper.MapperService; + +/** + * Legacy version of {@link PerFieldMapperCodec}. This codec is preserved to give an escape hatch in case we encounter issues with new + * changes in {@link PerFieldMapperCodec}. + */ +public final class LegacyPerFieldMapperCodec extends Lucene99Codec { + + private final PerFieldFormatSupplier formatSupplier; + + public LegacyPerFieldMapperCodec(Lucene99Codec.Mode compressionMode, MapperService mapperService, BigArrays bigArrays) { + super(compressionMode); + this.formatSupplier = new PerFieldFormatSupplier(mapperService, bigArrays); + // If the below assertion fails, it is a sign that Lucene released a new codec. You must create a copy of the current Elasticsearch + // codec that delegates to this new Lucene codec, and make PerFieldMapperCodec extend this new Elasticsearch codec. + assert Codec.forName(Lucene.LATEST_CODEC).getClass() == getClass().getSuperclass() + : "LegacyPerFieldMapperCodec must be on the latest lucene codec: " + Lucene.LATEST_CODEC; + } + + @Override + public PostingsFormat getPostingsFormatForField(String field) { + return formatSupplier.getPostingsFormatForField(field); + } + + @Override + public KnnVectorsFormat getKnnVectorsFormatForField(String field) { + return formatSupplier.getKnnVectorsFormatForField(field); + } + + @Override + public DocValuesFormat getDocValuesFormatForField(String field) { + return formatSupplier.getDocValuesFormatForField(field); + } + +} diff --git a/server/src/main/java/org/elasticsearch/index/codec/PerFieldFormatSupplier.java b/server/src/main/java/org/elasticsearch/index/codec/PerFieldFormatSupplier.java new file mode 100644 index 0000000000000..81fc2c0b4a065 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/codec/PerFieldFormatSupplier.java @@ -0,0 +1,123 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.codec; + +import org.apache.lucene.codecs.DocValuesFormat; +import org.apache.lucene.codecs.KnnVectorsFormat; +import org.apache.lucene.codecs.PostingsFormat; +import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; +import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.index.IndexMode; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.codec.bloomfilter.ES87BloomFilterPostingsFormat; +import org.elasticsearch.index.codec.postings.ES812PostingsFormat; +import org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat; +import org.elasticsearch.index.mapper.IdFieldMapper; +import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; + +import java.util.Objects; + +/** + * Class that encapsulates the logic of figuring out the most appropriate file format for a given field, across postings, doc values and + * vectors. + */ +public class PerFieldFormatSupplier { + + private final MapperService mapperService; + private final BigArrays bigArrays; + private final DocValuesFormat docValuesFormat = new Lucene90DocValuesFormat(); + private final KnnVectorsFormat knnVectorsFormat = new Lucene99HnswVectorsFormat(); + private final ES87BloomFilterPostingsFormat bloomFilterPostingsFormat; + private final ES87TSDBDocValuesFormat tsdbDocValuesFormat; + + private final ES812PostingsFormat es812PostingsFormat; + + public PerFieldFormatSupplier(MapperService mapperService, BigArrays bigArrays) { + this.mapperService = mapperService; + this.bigArrays = Objects.requireNonNull(bigArrays); + this.bloomFilterPostingsFormat = new ES87BloomFilterPostingsFormat(bigArrays, this::internalGetPostingsFormatForField); + this.tsdbDocValuesFormat = new ES87TSDBDocValuesFormat(); + this.es812PostingsFormat = new ES812PostingsFormat(); + } + + public PostingsFormat getPostingsFormatForField(String field) { + if (useBloomFilter(field)) { + return bloomFilterPostingsFormat; + } + return internalGetPostingsFormatForField(field); + } + + private PostingsFormat internalGetPostingsFormatForField(String field) { + if (mapperService != null) { + final PostingsFormat format = mapperService.mappingLookup().getPostingsFormat(field); + if (format != null) { + return format; + } + } + // return our own posting format using PFOR + return es812PostingsFormat; + } + + boolean useBloomFilter(String field) { + if (mapperService == null) { + return false; + } + IndexSettings indexSettings = mapperService.getIndexSettings(); + if (mapperService.mappingLookup().isDataStreamTimestampFieldEnabled()) { + // In case for time series indices, the _id isn't randomly generated, + // but based on dimension fields and timestamp field, so during indexing + // version/seq_no/term needs to be looked up and having a bloom filter + // can speed this up significantly. + return indexSettings.getMode() == IndexMode.TIME_SERIES + && IdFieldMapper.NAME.equals(field) + && IndexSettings.BLOOM_FILTER_ID_FIELD_ENABLED_SETTING.get(indexSettings.getSettings()); + } else { + return IdFieldMapper.NAME.equals(field) && IndexSettings.BLOOM_FILTER_ID_FIELD_ENABLED_SETTING.get(indexSettings.getSettings()); + } + } + + public KnnVectorsFormat getKnnVectorsFormatForField(String field) { + if (mapperService != null) { + Mapper mapper = mapperService.mappingLookup().getMapper(field); + if (mapper instanceof DenseVectorFieldMapper vectorMapper) { + return vectorMapper.getKnnVectorsFormatForField(knnVectorsFormat); + } + } + return knnVectorsFormat; + } + + public DocValuesFormat getDocValuesFormatForField(String field) { + if (useTSDBDocValuesFormat(field)) { + return tsdbDocValuesFormat; + } + return docValuesFormat; + } + + boolean useTSDBDocValuesFormat(final String field) { + if (excludeFields(field)) { + return false; + } + + return mapperService != null && isTimeSeriesModeIndex() && mapperService.getIndexSettings().isES87TSDBCodecEnabled(); + } + + private boolean excludeFields(String fieldName) { + // Avoid using tsdb codec for fields like _seq_no, _primary_term. + // But _tsid and _ts_routing_hash should always use the tsdb codec. + return fieldName.startsWith("_") && fieldName.equals("_tsid") == false && fieldName.equals("_ts_routing_hash") == false; + } + + private boolean isTimeSeriesModeIndex() { + return mapperService != null && IndexMode.TIME_SERIES == mapperService.getIndexSettings().getMode(); + } + +} diff --git a/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java b/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java index ae497af887d9c..6f88578260db3 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java +++ b/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java @@ -12,19 +12,10 @@ import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.KnnVectorsFormat; import org.apache.lucene.codecs.PostingsFormat; -import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; -import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.index.IndexMode; -import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.codec.bloomfilter.ES87BloomFilterPostingsFormat; -import org.elasticsearch.index.codec.postings.ES812PostingsFormat; -import org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat; -import org.elasticsearch.index.mapper.IdFieldMapper; -import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.codec.zstd.Zstd814StoredFieldsFormat; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; /** * {@link PerFieldMapperCodec This Lucene codec} provides the default @@ -34,93 +25,32 @@ * per index in real time via the mapping API. If no specific postings format or vector format is * configured for a specific field the default postings or vector format is used. */ -public final class PerFieldMapperCodec extends Lucene99Codec { +public final class PerFieldMapperCodec extends Elasticsearch814Codec { - private final MapperService mapperService; - private final DocValuesFormat docValuesFormat = new Lucene90DocValuesFormat(); - private final ES87BloomFilterPostingsFormat bloomFilterPostingsFormat; - private final ES87TSDBDocValuesFormat tsdbDocValuesFormat; + private final PerFieldFormatSupplier formatSupplier; - private final ES812PostingsFormat es812PostingsFormat; - - static { - assert Codec.forName(Lucene.LATEST_CODEC).getClass().isAssignableFrom(PerFieldMapperCodec.class) - : "PerFieldMapperCodec must subclass the latest lucene codec: " + Lucene.LATEST_CODEC; - } - - public PerFieldMapperCodec(Mode compressionMode, MapperService mapperService, BigArrays bigArrays) { + public PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode compressionMode, MapperService mapperService, BigArrays bigArrays) { super(compressionMode); - this.mapperService = mapperService; - this.bloomFilterPostingsFormat = new ES87BloomFilterPostingsFormat(bigArrays, this::internalGetPostingsFormatForField); - this.tsdbDocValuesFormat = new ES87TSDBDocValuesFormat(); - this.es812PostingsFormat = new ES812PostingsFormat(); + this.formatSupplier = new PerFieldFormatSupplier(mapperService, bigArrays); + // If the below assertion fails, it is a sign that Lucene released a new codec. You must create a copy of the current Elasticsearch + // codec that delegates to this new Lucene codec, and make PerFieldMapperCodec extend this new Elasticsearch codec. + assert Codec.forName(Lucene.LATEST_CODEC).getClass() == delegate.getClass() + : "PerFieldMapperCodec must be on the latest lucene codec: " + Lucene.LATEST_CODEC; } @Override public PostingsFormat getPostingsFormatForField(String field) { - if (useBloomFilter(field)) { - return bloomFilterPostingsFormat; - } - return internalGetPostingsFormatForField(field); - } - - private PostingsFormat internalGetPostingsFormatForField(String field) { - final PostingsFormat format = mapperService.mappingLookup().getPostingsFormat(field); - if (format != null) { - return format; - } - // return our own posting format using PFOR - return es812PostingsFormat; - } - - boolean useBloomFilter(String field) { - IndexSettings indexSettings = mapperService.getIndexSettings(); - if (mapperService.mappingLookup().isDataStreamTimestampFieldEnabled()) { - // In case for time series indices, they _id isn't randomly generated, - // but based on dimension fields and timestamp field, so during indexing - // version/seq_no/term needs to be looked up and having a bloom filter - // can speed this up significantly. - return indexSettings.getMode() == IndexMode.TIME_SERIES - && IdFieldMapper.NAME.equals(field) - && IndexSettings.BLOOM_FILTER_ID_FIELD_ENABLED_SETTING.get(indexSettings.getSettings()); - } else { - return IdFieldMapper.NAME.equals(field) && IndexSettings.BLOOM_FILTER_ID_FIELD_ENABLED_SETTING.get(indexSettings.getSettings()); - } + return formatSupplier.getPostingsFormatForField(field); } @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { - Mapper mapper = mapperService.mappingLookup().getMapper(field); - if (mapper instanceof DenseVectorFieldMapper vectorMapper) { - return vectorMapper.getKnnVectorsFormatForField(super.getKnnVectorsFormatForField(field)); - } - return super.getKnnVectorsFormatForField(field); + return formatSupplier.getKnnVectorsFormatForField(field); } @Override public DocValuesFormat getDocValuesFormatForField(String field) { - if (useTSDBDocValuesFormat(field)) { - return tsdbDocValuesFormat; - } - return docValuesFormat; - } - - boolean useTSDBDocValuesFormat(final String field) { - if (excludeFields(field)) { - return false; - } - - return mapperService != null && isTimeSeriesModeIndex() && mapperService.getIndexSettings().isES87TSDBCodecEnabled(); - } - - private boolean excludeFields(String fieldName) { - // Avoid using tsdb codec for fields like _seq_no, _primary_term. - // But _tsid and _ts_routing_hash should always use the tsdb codec. - return fieldName.startsWith("_") && fieldName.equals("_tsid") == false && fieldName.equals("_ts_routing_hash") == false; - } - - private boolean isTimeSeriesModeIndex() { - return IndexMode.TIME_SERIES == mapperService.getIndexSettings().getMode(); + return formatSupplier.getDocValuesFormatForField(field); } } diff --git a/server/src/main/java/org/elasticsearch/index/codec/zstd/Zstd814StoredFieldsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/zstd/Zstd814StoredFieldsFormat.java new file mode 100644 index 0000000000000..b827bb6436f07 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/codec/zstd/Zstd814StoredFieldsFormat.java @@ -0,0 +1,212 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.codec.zstd; + +import org.apache.lucene.codecs.StoredFieldsWriter; +import org.apache.lucene.codecs.compressing.CompressionMode; +import org.apache.lucene.codecs.compressing.Compressor; +import org.apache.lucene.codecs.compressing.Decompressor; +import org.apache.lucene.codecs.lucene90.compressing.Lucene90CompressingStoredFieldsFormat; +import org.apache.lucene.index.CorruptIndexException; +import org.apache.lucene.index.SegmentInfo; +import org.apache.lucene.store.ByteBuffersDataInput; +import org.apache.lucene.store.DataInput; +import org.apache.lucene.store.DataOutput; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.IOContext; +import org.apache.lucene.util.ArrayUtil; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.nativeaccess.CloseableByteBuffer; +import org.elasticsearch.nativeaccess.NativeAccess; +import org.elasticsearch.nativeaccess.Zstd; + +import java.io.IOException; + +/** + * {@link org.apache.lucene.codecs.StoredFieldsFormat} that compresses blocks of data using ZStandard. + * + * Unlike Lucene's default stored fields format, this format does not make use of dictionaries (even though ZStandard has great support for + * dictionaries!). This is mostly due to the fact that LZ4/DEFLATE have short sliding windows that they can use to find duplicate strings + * (64kB and 32kB respectively). In contrast, ZSTD doesn't have such a limitation and can better take advantage of large compression + * buffers. + */ +public final class Zstd814StoredFieldsFormat extends Lucene90CompressingStoredFieldsFormat { + + // ZSTD has special optimizations for inputs that are less than 16kB and less than 256kB. So subtract a bit of memory from 16kB and + // 256kB to make our inputs unlikely to grow beyond 16kB for BEST_SPEED and 256kB for BEST_COMPRESSION. + private static final int BEST_SPEED_BLOCK_SIZE = (16 - 2) * 1_024; + private static final int BEST_COMPRESSION_BLOCK_SIZE = (256 - 16) * 1_024; + + /** Attribute key for compression mode. */ + public static final String MODE_KEY = Zstd814StoredFieldsFormat.class.getSimpleName() + ".mode"; + + public enum Mode { + BEST_SPEED(0, BEST_SPEED_BLOCK_SIZE, 128), + BEST_COMPRESSION(3, BEST_COMPRESSION_BLOCK_SIZE, 2048); + + final int level, blockSizeInBytes, blockDocCount; + + Mode(int level, int blockSizeInBytes, int blockDocCount) { + this.level = level; + this.blockSizeInBytes = blockSizeInBytes; + this.blockDocCount = blockDocCount; + } + } + + private final Mode mode; + + public Zstd814StoredFieldsFormat(Mode mode) { + super("ZstdStoredFields814", new ZstdCompressionMode(mode.level), mode.blockSizeInBytes, mode.blockDocCount, 10); + this.mode = mode; + } + + @Override + public StoredFieldsWriter fieldsWriter(Directory directory, SegmentInfo si, IOContext context) throws IOException { + // Both modes are compatible, we only put an attribute for debug purposes. + String previous = si.putAttribute(MODE_KEY, mode.name()); + if (previous != null && previous.equals(mode.name()) == false) { + throw new IllegalStateException( + "found existing value for " + MODE_KEY + " for segment: " + si.name + "old=" + previous + ", new=" + mode.name() + ); + } + return super.fieldsWriter(directory, si, context); + } + + private static class ZstdCompressionMode extends CompressionMode { + private final int level; + + ZstdCompressionMode(int level) { + this.level = level; + } + + @Override + public Compressor newCompressor() { + return new ZstdCompressor(level); + } + + @Override + public Decompressor newDecompressor() { + return new ZstdDecompressor(); + } + + @Override + public String toString() { + return "ZSTD(level=" + level + ")"; + } + } + + private static final class ZstdDecompressor extends Decompressor { + + // Buffer for copying between the DataInput and native memory. No hard science behind this number, it just tries to be high enough + // to benefit from bulk copying and low enough to keep heap usage under control. + final byte[] copyBuffer = new byte[4096]; + + ZstdDecompressor() {} + + @Override + public void decompress(DataInput in, int originalLength, int offset, int length, BytesRef bytes) throws IOException { + if (originalLength == 0) { + bytes.offset = 0; + bytes.length = 0; + return; + } + + final NativeAccess nativeAccess = NativeAccess.instance(); + final Zstd zstd = nativeAccess.getZstd(); + + final int compressedLength = in.readVInt(); + + try ( + CloseableByteBuffer src = nativeAccess.newBuffer(compressedLength); + CloseableByteBuffer dest = nativeAccess.newBuffer(originalLength) + ) { + + while (src.buffer().position() < compressedLength) { + final int numBytes = Math.min(copyBuffer.length, compressedLength - src.buffer().position()); + in.readBytes(copyBuffer, 0, numBytes); + src.buffer().put(copyBuffer, 0, numBytes); + } + src.buffer().flip(); + + final int decompressedLen = zstd.decompress(dest, src); + if (decompressedLen != originalLength) { + throw new CorruptIndexException("Expected " + originalLength + " decompressed bytes, got " + decompressedLen, in); + } + + bytes.bytes = ArrayUtil.growNoCopy(bytes.bytes, length); + dest.buffer().get(offset, bytes.bytes, 0, length); + bytes.offset = 0; + bytes.length = length; + } + } + + @Override + public Decompressor clone() { + return new ZstdDecompressor(); + } + } + + private static class ZstdCompressor extends Compressor { + + final int level; + // Buffer for copying between the DataInput and native memory. No hard science behind this number, it just tries to be high enough + // to benefit from bulk copying and low enough to keep heap usage under control. + final byte[] copyBuffer = new byte[4096]; + + ZstdCompressor(int level) { + this.level = level; + } + + @Override + public void compress(ByteBuffersDataInput buffersInput, DataOutput out) throws IOException { + final NativeAccess nativeAccess = NativeAccess.instance(); + final Zstd zstd = nativeAccess.getZstd(); + + final int srcLen = Math.toIntExact(buffersInput.length()); + if (srcLen == 0) { + return; + } + + final int compressBound = zstd.compressBound(srcLen); + + // NOTE: We are allocating/deallocating native buffers on each call. We could save allocations by reusing these buffers, though + // this would come at the expense of higher permanent memory usage. Benchmarks suggested that there is some performance to save + // there, but it wouldn't be a game changer either. + // Also note that calls to #compress implicitly allocate memory under the hood for e.g. hash tables and chain tables that help + // identify duplicate strings. So if we wanted to avoid allocating memory on every compress call, we should also look into + // reusing compression contexts, which are not small and would increase permanent memory usage as well. + try ( + CloseableByteBuffer src = nativeAccess.newBuffer(srcLen); + CloseableByteBuffer dest = nativeAccess.newBuffer(compressBound) + ) { + + while (buffersInput.position() < buffersInput.length()) { + final int numBytes = Math.min(copyBuffer.length, (int) (buffersInput.length() - buffersInput.position())); + buffersInput.readBytes(copyBuffer, 0, numBytes); + src.buffer().put(copyBuffer, 0, numBytes); + } + src.buffer().flip(); + + final int compressedLen = zstd.compress(dest, src, level); + out.writeVInt(compressedLen); + + for (int written = 0; written < compressedLen;) { + final int numBytes = Math.min(copyBuffer.length, compressedLen - written); + dest.buffer().get(copyBuffer, 0, numBytes); + out.writeBytes(copyBuffer, 0, numBytes); + written += numBytes; + assert written == dest.buffer().position(); + } + } + } + + @Override + public void close() throws IOException {} + } +} diff --git a/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec b/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec new file mode 100644 index 0000000000000..b99a15507f742 --- /dev/null +++ b/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec @@ -0,0 +1 @@ +org.elasticsearch.index.codec.Elasticsearch814Codec diff --git a/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java b/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java index 7a3d48aad13d3..bd4aa0241cd27 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java @@ -12,10 +12,11 @@ import org.apache.lucene.codecs.lucene90.Lucene90StoredFieldsFormat; import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.apache.lucene.document.Document; +import org.apache.lucene.document.Field; +import org.apache.lucene.document.IntField; +import org.apache.lucene.document.KeywordField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; -import org.apache.lucene.index.IndexWriterConfig; -import org.apache.lucene.index.SegmentReader; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.util.LuceneTestCase.SuppressCodecs; import org.elasticsearch.TransportVersion; @@ -31,6 +32,7 @@ import org.elasticsearch.script.ScriptCompiler; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; +import org.hamcrest.Matchers; import java.io.IOException; import java.util.Collections; @@ -43,35 +45,51 @@ public class CodecTests extends ESTestCase { public void testResolveDefaultCodecs() throws Exception { CodecService codecService = createCodecService(); assertThat(codecService.codec("default"), instanceOf(PerFieldMapperCodec.class)); - assertThat(codecService.codec("default"), instanceOf(Lucene99Codec.class)); + assertThat(codecService.codec("default"), instanceOf(Elasticsearch814Codec.class)); } public void testDefault() throws Exception { Codec codec = createCodecService().codec("default"); - assertStoredFieldsCompressionEquals(Lucene99Codec.Mode.BEST_SPEED, codec); + assertEquals( + "Zstd814StoredFieldsFormat(compressionMode=ZSTD(level=0), chunkSize=14336, maxDocsPerChunk=128, blockShift=10)", + codec.storedFieldsFormat().toString() + ); } public void testBestCompression() throws Exception { Codec codec = createCodecService().codec("best_compression"); - assertStoredFieldsCompressionEquals(Lucene99Codec.Mode.BEST_COMPRESSION, codec); + assertEquals( + "Zstd814StoredFieldsFormat(compressionMode=ZSTD(level=3), chunkSize=245760, maxDocsPerChunk=2048, blockShift=10)", + codec.storedFieldsFormat().toString() + ); + } + + public void testLegacyDefault() throws Exception { + Codec codec = createCodecService().codec("legacy_default"); + assertThat(codec, Matchers.instanceOf(Lucene99Codec.class)); + assertThat(codec.storedFieldsFormat(), Matchers.instanceOf(Lucene90StoredFieldsFormat.class)); + // Make sure the legacy codec is writable + try (Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig().setCodec(codec))) { + Document doc = new Document(); + doc.add(new KeywordField("string_field", "abc", Field.Store.YES)); + doc.add(new IntField("int_field", 42, Field.Store.YES)); + w.addDocument(doc); + try (DirectoryReader r = DirectoryReader.open(w)) {} + } } - // write some docs with it, inspect .si to see this was the used compression - private void assertStoredFieldsCompressionEquals(Lucene99Codec.Mode expected, Codec actual) throws Exception { - Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(null); - iwc.setCodec(actual); - IndexWriter iw = new IndexWriter(dir, iwc); - iw.addDocument(new Document()); - iw.commit(); - iw.close(); - DirectoryReader ir = DirectoryReader.open(dir); - SegmentReader sr = (SegmentReader) ir.leaves().get(0).reader(); - String v = sr.getSegmentInfo().info.getAttribute(Lucene90StoredFieldsFormat.MODE_KEY); - assertNotNull(v); - assertEquals(expected, Lucene99Codec.Mode.valueOf(v)); - ir.close(); - dir.close(); + public void testLegacyBestCompression() throws Exception { + Codec codec = createCodecService().codec("legacy_best_compression"); + assertThat(codec, Matchers.instanceOf(Lucene99Codec.class)); + assertThat(codec.storedFieldsFormat(), Matchers.instanceOf(Lucene90StoredFieldsFormat.class)); + // Make sure the legacy codec is writable + try (Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig().setCodec(codec))) { + Document doc = new Document(); + doc.add(new KeywordField("string_field", "abc", Field.Store.YES)); + doc.add(new IntField("int_field", 42, Field.Store.YES)); + w.addDocument(doc); + try (DirectoryReader r = DirectoryReader.open(w)) {} + } } private CodecService createCodecService() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/index/codec/PerFieldMapperCodecTests.java b/server/src/test/java/org/elasticsearch/index/codec/PerFieldMapperCodecTests.java index 4ce20e35869cb..74657842488b5 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/PerFieldMapperCodecTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/PerFieldMapperCodecTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.index.codec; -import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; @@ -63,7 +62,7 @@ public class PerFieldMapperCodecTests extends ESTestCase { """; public void testUseBloomFilter() throws IOException { - PerFieldMapperCodec perFieldMapperCodec = createCodec(false, randomBoolean(), false); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(false, randomBoolean(), false); assertThat(perFieldMapperCodec.useBloomFilter("_id"), is(true)); assertThat(perFieldMapperCodec.getPostingsFormatForField("_id"), instanceOf(ES87BloomFilterPostingsFormat.class)); assertThat(perFieldMapperCodec.useBloomFilter("another_field"), is(false)); @@ -71,7 +70,7 @@ public void testUseBloomFilter() throws IOException { } public void testUseBloomFilterWithTimestampFieldEnabled() throws IOException { - PerFieldMapperCodec perFieldMapperCodec = createCodec(true, true, false); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(true, true, false); assertThat(perFieldMapperCodec.useBloomFilter("_id"), is(true)); assertThat(perFieldMapperCodec.getPostingsFormatForField("_id"), instanceOf(ES87BloomFilterPostingsFormat.class)); assertThat(perFieldMapperCodec.useBloomFilter("another_field"), is(false)); @@ -79,13 +78,13 @@ public void testUseBloomFilterWithTimestampFieldEnabled() throws IOException { } public void testUseBloomFilterWithTimestampFieldEnabled_noTimeSeriesMode() throws IOException { - PerFieldMapperCodec perFieldMapperCodec = createCodec(true, false, false); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(true, false, false); assertThat(perFieldMapperCodec.useBloomFilter("_id"), is(false)); assertThat(perFieldMapperCodec.getPostingsFormatForField("_id"), instanceOf(ES812PostingsFormat.class)); } public void testUseBloomFilterWithTimestampFieldEnabled_disableBloomFilter() throws IOException { - PerFieldMapperCodec perFieldMapperCodec = createCodec(true, true, true); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(true, true, true); assertThat(perFieldMapperCodec.useBloomFilter("_id"), is(false)); assertThat(perFieldMapperCodec.getPostingsFormatForField("_id"), instanceOf(ES812PostingsFormat.class)); assertWarnings( @@ -94,28 +93,29 @@ public void testUseBloomFilterWithTimestampFieldEnabled_disableBloomFilter() thr } public void testUseES87TSDBEncodingForTimestampField() throws IOException { - PerFieldMapperCodec perFieldMapperCodec = createCodec(true, true, true); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(true, true, true); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("@timestamp")), is(true)); } public void testDoNotUseES87TSDBEncodingForTimestampFieldNonTimeSeriesIndex() throws IOException { - PerFieldMapperCodec perFieldMapperCodec = createCodec(true, false, true); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(true, false, true); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("@timestamp")), is(false)); } public void testEnableES87TSDBCodec() throws IOException { - PerFieldMapperCodec perFieldMapperCodec = createCodec(true, true, MAPPING_1); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(true, true, MAPPING_1); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("gauge")), is(true)); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("@timestamp")), is(true)); } public void testDisableES87TSDBCodec() throws IOException { - PerFieldMapperCodec perFieldMapperCodec = createCodec(false, true, MAPPING_1); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(false, true, MAPPING_1); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("gauge")), is(false)); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("@timestamp")), is(false)); } - private PerFieldMapperCodec createCodec(boolean timestampField, boolean timeSeries, boolean disableBloomFilter) throws IOException { + private PerFieldFormatSupplier createFormatSupplier(boolean timestampField, boolean timeSeries, boolean disableBloomFilter) + throws IOException { Settings.Builder settings = Settings.builder(); if (timeSeries) { settings.put(IndexSettings.MODE.getKey(), "time_series"); @@ -140,31 +140,32 @@ private PerFieldMapperCodec createCodec(boolean timestampField, boolean timeSeri """; mapperService.merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); } - return new PerFieldMapperCodec(Lucene99Codec.Mode.BEST_SPEED, mapperService, BigArrays.NON_RECYCLING_INSTANCE); + return new PerFieldFormatSupplier(mapperService, BigArrays.NON_RECYCLING_INSTANCE); } public void testUseES87TSDBEncodingSettingDisabled() throws IOException { - PerFieldMapperCodec perFieldMapperCodec = createCodec(false, true, MAPPING_2); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(false, true, MAPPING_2); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("@timestamp")), is(false)); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("counter")), is(false)); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("gauge")), is(false)); } public void testUseTimeSeriesModeDisabledCodecDisabled() throws IOException { - PerFieldMapperCodec perFieldMapperCodec = createCodec(true, false, MAPPING_2); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(true, false, MAPPING_2); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("@timestamp")), is(false)); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("counter")), is(false)); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("gauge")), is(false)); } public void testUseTimeSeriesModeAndCodecEnabled() throws IOException { - PerFieldMapperCodec perFieldMapperCodec = createCodec(true, true, MAPPING_2); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(true, true, MAPPING_2); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("@timestamp")), is(true)); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("counter")), is(true)); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("gauge")), is(true)); } - private PerFieldMapperCodec createCodec(boolean enableES87TSDBCodec, boolean timeSeries, String mapping) throws IOException { + private PerFieldFormatSupplier createFormatSupplier(boolean enableES87TSDBCodec, boolean timeSeries, String mapping) + throws IOException { Settings.Builder settings = Settings.builder(); if (timeSeries) { settings.put(IndexSettings.MODE.getKey(), "time_series"); @@ -173,7 +174,7 @@ private PerFieldMapperCodec createCodec(boolean enableES87TSDBCodec, boolean tim settings.put(IndexSettings.TIME_SERIES_ES87TSDB_CODEC_ENABLED_SETTING.getKey(), enableES87TSDBCodec); MapperService mapperService = MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), settings.build(), "test"); mapperService.merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); - return new PerFieldMapperCodec(Lucene99Codec.Mode.BEST_SPEED, mapperService, BigArrays.NON_RECYCLING_INSTANCE); + return new PerFieldFormatSupplier(mapperService, BigArrays.NON_RECYCLING_INSTANCE); } } diff --git a/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestCompressionStoredFieldsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestCompressionStoredFieldsFormatTests.java new file mode 100644 index 0000000000000..1679813ed1340 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestCompressionStoredFieldsFormatTests.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.codec.zstd; + +import org.apache.lucene.codecs.Codec; +import org.apache.lucene.tests.index.BaseStoredFieldsFormatTestCase; +import org.elasticsearch.index.codec.Elasticsearch814Codec; + +public class Zstd814BestCompressionStoredFieldsFormatTests extends BaseStoredFieldsFormatTestCase { + + private final Codec codec = new Elasticsearch814Codec(Zstd814StoredFieldsFormat.Mode.BEST_COMPRESSION); + + @Override + protected Codec getCodec() { + return codec; + } +} diff --git a/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestSpeedStoredFieldsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestSpeedStoredFieldsFormatTests.java new file mode 100644 index 0000000000000..5acdd4f5730e9 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestSpeedStoredFieldsFormatTests.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.codec.zstd; + +import org.apache.lucene.codecs.Codec; +import org.apache.lucene.tests.index.BaseStoredFieldsFormatTestCase; +import org.elasticsearch.index.codec.Elasticsearch814Codec; + +public class Zstd814BestSpeedStoredFieldsFormatTests extends BaseStoredFieldsFormatTestCase { + + private final Codec codec = new Elasticsearch814Codec(Zstd814StoredFieldsFormat.Mode.BEST_SPEED); + + @Override + protected Codec getCodec() { + return codec; + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java index 09c6eed08bf28..620db8dc83510 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java @@ -10,7 +10,6 @@ import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.standard.StandardAnalyzer; -import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriterConfig; @@ -43,6 +42,7 @@ import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.codec.PerFieldMapperCodec; +import org.elasticsearch.index.codec.zstd.Zstd814StoredFieldsFormat; import org.elasticsearch.index.fielddata.FieldDataContext; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldDataCache; @@ -243,7 +243,7 @@ protected static void withLuceneIndex( CheckedConsumer test ) throws IOException { IndexWriterConfig iwc = new IndexWriterConfig(IndexShard.buildIndexAnalyzer(mapperService)).setCodec( - new PerFieldMapperCodec(Lucene99Codec.Mode.BEST_SPEED, mapperService, BigArrays.NON_RECYCLING_INSTANCE) + new PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode.BEST_SPEED, mapperService, BigArrays.NON_RECYCLING_INSTANCE) ); try (Directory dir = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc)) { builder.accept(iw); From 658f7aa21c4a4165e9814aada63bc74b2366e403 Mon Sep 17 00:00:00 2001 From: Chris Hegarty <62058229+ChrisHegarty@users.noreply.github.com> Date: Tue, 9 Apr 2024 08:38:13 +0100 Subject: [PATCH 186/264] Unwrap exceptions in ES|QL Async Query GET (#107227) This commit updates the implementation of the ES|QL Async Query Get transport action so that exceptions are unwrapped before being exposed. This ensures that the exceptions seen by the client remain the same between sync and async. Specifically, only ParsingException and VerificationException are unwrapped, since these are currently the only ones that are returned. --- .../NotSerializableExceptionWrapper.java | 2 +- .../xpack/esql/action/EsqlActionIT.java | 16 +---- .../xpack/esql/action/EsqlAsyncActionIT.java | 24 -------- .../TransportEsqlAsyncGetResultsAction.java | 58 +++++++++++++++++++ 4 files changed, 62 insertions(+), 38 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/io/stream/NotSerializableExceptionWrapper.java b/server/src/main/java/org/elasticsearch/common/io/stream/NotSerializableExceptionWrapper.java index bd97ec0c2f63f..056fc59b4fdd5 100644 --- a/server/src/main/java/org/elasticsearch/common/io/stream/NotSerializableExceptionWrapper.java +++ b/server/src/main/java/org/elasticsearch/common/io/stream/NotSerializableExceptionWrapper.java @@ -58,7 +58,7 @@ protected void writeTo(StreamOutput out, Writer nestedExceptionsWrite } @Override - protected String getExceptionName() { + public String getExceptionName() { return name; } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java index 17082e9855761..686fb831aa042 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java @@ -994,29 +994,19 @@ public void testOverlappingIndexPatterns() throws Exception { .add(new IndexRequest("test_overlapping_index_patterns_2").id("1").source("field", "foo")) .get(); - assertVerificationException("from test_overlapping_index_patterns_* | sort field"); + assertThrows(VerificationException.class, () -> run("from test_overlapping_index_patterns_* | sort field")); } public void testErrorMessageForUnknownColumn() { - var e = assertVerificationException("row a = 1 | eval x = b"); + var e = expectThrows(VerificationException.class, () -> run("row a = 1 | eval x = b")); assertThat(e.getMessage(), containsString("Unknown column [b]")); } - // Straightforward verification. Subclasses can override. - protected Exception assertVerificationException(String esqlCommand) { - return expectThrows(VerificationException.class, () -> run(esqlCommand)); - } - public void testErrorMessageForEmptyParams() { - var e = assertParsingException("row a = 1 | eval x = ?"); + var e = expectThrows(ParsingException.class, () -> run("row a = 1 | eval x = ?")); assertThat(e.getMessage(), containsString("Not enough actual parameters 0")); } - // Straightforward verification. Subclasses can override. - protected Exception assertParsingException(String esqlCommand) { - return expectThrows(ParsingException.class, () -> run(esqlCommand)); - } - public void testEmptyIndex() { assertAcked(client().admin().indices().prepareCreate("test_empty").setMapping("k", "type=keyword", "v", "type=long").get()); try (EsqlQueryResponse results = run("from test_empty")) { diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java index e884b67fb5d24..e2e635917ed1c 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java @@ -10,7 +10,6 @@ import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.common.io.stream.NotSerializableExceptionWrapper; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; @@ -25,8 +24,6 @@ import org.elasticsearch.xpack.core.async.TransportDeleteAsyncResultAction; import org.elasticsearch.xpack.core.esql.action.ColumnInfo; import org.elasticsearch.xpack.esql.TestBlockFactory; -import org.elasticsearch.xpack.esql.VerificationException; -import org.elasticsearch.xpack.esql.parser.ParsingException; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; import java.nio.file.Path; @@ -37,7 +34,6 @@ import java.util.concurrent.TimeUnit; import static org.elasticsearch.core.TimeValue.timeValueSeconds; -import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.is; import static org.hamcrest.core.IsEqual.equalTo; @@ -122,26 +118,6 @@ AcknowledgedResponse deleteAsyncId(String id) { } } - // Overridden to allow for not-serializable wrapper. - @Override - protected Exception assertVerificationException(String esqlCommand) { - var e = expectThrowsAnyOf(List.of(NotSerializableExceptionWrapper.class, VerificationException.class), () -> run(esqlCommand)); - if (e instanceof NotSerializableExceptionWrapper wrapper) { - assertThat(wrapper.unwrapCause().getMessage(), containsString("verification_exception")); - } - return e; - } - - // Overridden to allow for not-serializable wrapper. - @Override - protected Exception assertParsingException(String esqlCommand) { - var e = expectThrowsAnyOf(List.of(NotSerializableExceptionWrapper.class, ParsingException.class), () -> run(esqlCommand)); - if (e instanceof NotSerializableExceptionWrapper wrapper) { - assertThat(wrapper.unwrapCause().getMessage(), containsString("parsing_exception")); - } - return e; - } - public static class LocalStateEsqlAsync extends LocalStateCompositeXPackPlugin { public LocalStateEsqlAsync(final Settings settings, final Path configPath) { super(settings, configPath); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncGetResultsAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncGetResultsAction.java index 8785b8f5de887..afb7ee6f53029 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncGetResultsAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncGetResultsAction.java @@ -7,20 +7,29 @@ package org.elasticsearch.xpack.esql.plugin; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchWrapperException; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.NotSerializableExceptionWrapper; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.async.GetAsyncResultRequest; +import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.action.EsqlAsyncGetResultAction; import org.elasticsearch.xpack.esql.action.EsqlQueryResponse; import org.elasticsearch.xpack.esql.action.EsqlQueryTask; +import org.elasticsearch.xpack.esql.parser.ParsingException; import org.elasticsearch.xpack.ql.plugin.AbstractTransportQlAsyncGetResultsAction; +import org.elasticsearch.xpack.ql.tree.Source; public class TransportEsqlAsyncGetResultsAction extends AbstractTransportQlAsyncGetResultsAction { @@ -51,8 +60,57 @@ public TransportEsqlAsyncGetResultsAction( this.blockFactory = blockFactory; } + @Override + protected void doExecute(Task task, GetAsyncResultRequest request, ActionListener listener) { + super.doExecute(task, request, unwrapListener(listener)); + } + @Override public Writeable.Reader responseReader() { return EsqlQueryResponse.reader(blockFactory); } + + static final String PARSE_EX_NAME = ElasticsearchException.getExceptionName(new ParsingException(Source.EMPTY, "")); + static final String VERIFY_EX_NAME = ElasticsearchException.getExceptionName(new VerificationException("")); + + /** + * Unwraps the exception in the case of failure. This keeps the exception types + * the same as the sync API, namely ParsingException and ParsingException. + */ + static ActionListener unwrapListener(ActionListener listener) { + return new ActionListener<>() { + @Override + public void onResponse(R o) { + listener.onResponse(o); + } + + @Override + public void onFailure(Exception e) { + if (e instanceof ElasticsearchWrapperException && e instanceof ElasticsearchException ee) { + e = unwrapEsException(ee); + } + if (e instanceof NotSerializableExceptionWrapper wrapper) { + String name = wrapper.getExceptionName(); + if (PARSE_EX_NAME.equals(name)) { + e = new ParsingException(Source.EMPTY, e.getMessage()); + e.setStackTrace(wrapper.getStackTrace()); + e.addSuppressed(wrapper); + } else if (VERIFY_EX_NAME.contains(name)) { + e = new VerificationException(e.getMessage()); + e.setStackTrace(wrapper.getStackTrace()); + e.addSuppressed(wrapper); + } + } + listener.onFailure(e); + } + }; + } + + static RuntimeException unwrapEsException(ElasticsearchException esEx) { + Throwable root = esEx.unwrapCause(); + if (root instanceof RuntimeException runtimeException) { + return runtimeException; + } + return esEx; + } } From 36049730fde3bd672c1904497bd44f237ed8ca80 Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 9 Apr 2024 09:03:36 +0100 Subject: [PATCH 187/264] Remove executor lookups from `TransportBulkAction` (#106941) Replaces the `String` names with proper `Executor` instances. Relates #106279 (removes another usage of `SAME`) Relates #106940, #106938, #105460, #99787, #97879 etc. --- .../action/bulk/BulkOperation.java | 17 +-- .../action/bulk/TransportBulkAction.java | 48 +++---- .../bulk/TransportSimulateBulkAction.java | 3 +- .../elasticsearch/ingest/IngestService.java | 7 +- .../action/bulk/BulkOperationTests.java | 4 +- ...ActionIndicesThatCannotBeCreatedTests.java | 3 +- .../bulk/TransportBulkActionIngestTests.java | 119 +++++++++++++++--- .../action/bulk/TransportBulkActionTests.java | 28 ++++- .../bulk/TransportBulkActionTookTests.java | 9 +- .../TransportSimulateBulkActionTests.java | 2 +- .../ingest/IngestServiceTests.java | 49 ++++---- .../threadpool/TestThreadPool.java | 77 ------------ 12 files changed, 202 insertions(+), 164 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java index 1e9b1446850af..412e4f3c875e8 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java @@ -49,6 +49,7 @@ import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentLinkedQueue; +import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; import java.util.function.BiConsumer; import java.util.function.Consumer; @@ -74,16 +75,16 @@ final class BulkOperation extends ActionRunnable { private final long startTimeNanos; private final ClusterStateObserver observer; private final Map indicesThatCannotBeCreated; - private final String executorName; + private final Executor executor; private final LongSupplier relativeTimeProvider; private final FailureStoreDocumentConverter failureStoreDocumentConverter; - private IndexNameExpressionResolver indexNameExpressionResolver; - private NodeClient client; + private final IndexNameExpressionResolver indexNameExpressionResolver; + private final NodeClient client; BulkOperation( Task task, ThreadPool threadPool, - String executorName, + Executor executor, ClusterService clusterService, BulkRequest bulkRequest, NodeClient client, @@ -97,7 +98,7 @@ final class BulkOperation extends ActionRunnable { this( task, threadPool, - executorName, + executor, clusterService, bulkRequest, client, @@ -115,7 +116,7 @@ final class BulkOperation extends ActionRunnable { BulkOperation( Task task, ThreadPool threadPool, - String executorName, + Executor executor, ClusterService clusterService, BulkRequest bulkRequest, NodeClient client, @@ -137,7 +138,7 @@ final class BulkOperation extends ActionRunnable { this.listener = listener; this.startTimeNanos = startTimeNanos; this.indicesThatCannotBeCreated = indicesThatCannotBeCreated; - this.executorName = executorName; + this.executor = executor; this.relativeTimeProvider = relativeTimeProvider; this.indexNameExpressionResolver = indexNameExpressionResolver; this.client = client; @@ -543,7 +544,7 @@ public void onTimeout(TimeValue timeout) { } private void dispatchRetry() { - threadPool.executor(executorName).submit(operation); + executor.execute(operation); } }); } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java index bf50fd06d056b..3494701cf5b7a 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java @@ -70,6 +70,7 @@ import java.util.Optional; import java.util.Set; import java.util.SortedMap; +import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; import java.util.function.LongSupplier; import java.util.stream.Collectors; @@ -101,6 +102,9 @@ public class TransportBulkAction extends HandledTransportAction releasingListener = ActionListener.runBefore(listener, releasable::close); - final String executorName = isOnlySystem ? Names.SYSTEM_WRITE : Names.WRITE; - ensureClusterStateThenForkAndExecute(task, bulkRequest, executorName, releasingListener); + final Executor executor = isOnlySystem ? systemWriteExecutor : writeExecutor; + ensureClusterStateThenForkAndExecute(task, bulkRequest, executor, releasingListener); } private void ensureClusterStateThenForkAndExecute( Task task, BulkRequest bulkRequest, - String executorName, + Executor executor, ActionListener releasingListener ) { final ClusterState initialState = clusterService.state(); @@ -274,7 +280,7 @@ private void ensureClusterStateThenForkAndExecute( clusterStateObserver.waitForNextChange(new ClusterStateObserver.Listener() { @Override public void onNewClusterState(ClusterState state) { - forkAndExecute(task, bulkRequest, executorName, releasingListener); + forkAndExecute(task, bulkRequest, executor, releasingListener); } @Override @@ -288,20 +294,20 @@ public void onTimeout(TimeValue timeout) { } }, newState -> false == newState.blocks().hasGlobalBlockWithLevel(ClusterBlockLevel.WRITE)); } else { - forkAndExecute(task, bulkRequest, executorName, releasingListener); + forkAndExecute(task, bulkRequest, executor, releasingListener); } } - private void forkAndExecute(Task task, BulkRequest bulkRequest, String executorName, ActionListener releasingListener) { - threadPool.executor(executorName).execute(new ActionRunnable<>(releasingListener) { + private void forkAndExecute(Task task, BulkRequest bulkRequest, Executor executor, ActionListener releasingListener) { + executor.execute(new ActionRunnable<>(releasingListener) { @Override protected void doRun() { - doInternalExecute(task, bulkRequest, executorName, releasingListener); + doInternalExecute(task, bulkRequest, executor, releasingListener); } }); } - protected void doInternalExecute(Task task, BulkRequest bulkRequest, String executorName, ActionListener listener) { + protected void doInternalExecute(Task task, BulkRequest bulkRequest, Executor executor, ActionListener listener) { final long startTime = relativeTime(); boolean hasIndexRequestsWithPipelines = false; @@ -334,7 +340,7 @@ protected void doInternalExecute(Task task, BulkRequest bulkRequest, String exec assert arePipelinesResolved : bulkRequest; } if (clusterService.localNode().isIngestNode()) { - processBulkIndexIngestRequest(task, bulkRequest, executorName, metadata, l); + processBulkIndexIngestRequest(task, bulkRequest, executor, metadata, l); } else { ingestForwarder.forwardIngestRequest(bulkAction, bulkRequest, l); } @@ -385,7 +391,7 @@ protected void doInternalExecute(Task task, BulkRequest bulkRequest, String exec createMissingIndicesAndIndexData( task, bulkRequest, - executorName, + executor, listener, indicesToAutoCreate, dataStreamsToBeRolledOver, @@ -401,7 +407,7 @@ protected void doInternalExecute(Task task, BulkRequest bulkRequest, String exec protected void createMissingIndicesAndIndexData( Task task, BulkRequest bulkRequest, - String executorName, + Executor executor, ActionListener listener, Map indicesToAutoCreate, Set dataStreamsToBeRolledOver, @@ -411,13 +417,13 @@ protected void createMissingIndicesAndIndexData( final AtomicArray responses = new AtomicArray<>(bulkRequest.requests.size()); // Optimizing when there are no prerequisite actions if (indicesToAutoCreate.isEmpty() && dataStreamsToBeRolledOver.isEmpty()) { - executeBulk(task, bulkRequest, startTime, listener, executorName, responses, indicesThatCannotBeCreated); + executeBulk(task, bulkRequest, startTime, listener, executor, responses, indicesThatCannotBeCreated); return; } - Runnable executeBulkRunnable = () -> threadPool.executor(executorName).execute(new ActionRunnable<>(listener) { + Runnable executeBulkRunnable = () -> executor.execute(new ActionRunnable<>(listener) { @Override protected void doRun() { - executeBulk(task, bulkRequest, startTime, listener, executorName, responses, indicesThatCannotBeCreated); + executeBulk(task, bulkRequest, startTime, listener, executor, responses, indicesThatCannotBeCreated); } }); try (RefCountingRunnable refs = new RefCountingRunnable(executeBulkRunnable)) { @@ -636,14 +642,14 @@ void executeBulk( BulkRequest bulkRequest, long startTimeNanos, ActionListener listener, - String executorName, + Executor executor, AtomicArray responses, Map indicesThatCannotBeCreated ) { new BulkOperation( task, threadPool, - executorName, + executor, clusterService, bulkRequest, client, @@ -663,7 +669,7 @@ private long relativeTime() { private void processBulkIndexIngestRequest( Task task, BulkRequest original, - String executorName, + Executor executor, Metadata metadata, ActionListener listener ) { @@ -696,7 +702,7 @@ private void processBulkIndexIngestRequest( ActionRunnable runnable = new ActionRunnable<>(actionListener) { @Override protected void doRun() { - doInternalExecute(task, bulkRequest, executorName, actionListener); + doInternalExecute(task, bulkRequest, executor, actionListener); } @Override @@ -713,12 +719,12 @@ public boolean isForceExecution() { if (originalThread == Thread.currentThread()) { runnable.run(); } else { - threadPool.executor(executorName).execute(runnable); + executor.execute(runnable); } } } }, - executorName + executor ); } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java index f65d0f462fde6..1b3949f3c00ac 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java @@ -30,6 +30,7 @@ import java.util.Map; import java.util.Set; +import java.util.concurrent.Executor; public class TransportSimulateBulkAction extends TransportBulkAction { @Inject @@ -70,7 +71,7 @@ public TransportSimulateBulkAction( protected void createMissingIndicesAndIndexData( Task task, BulkRequest bulkRequest, - String executorName, + Executor executor, ActionListener listener, Map indicesToAutoCreate, Set dataStreamsToRollover, diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestService.java b/server/src/main/java/org/elasticsearch/ingest/IngestService.java index f406684c50948..be1906ab8d05e 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestService.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestService.java @@ -83,6 +83,7 @@ import java.util.Set; import java.util.TreeMap; import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.Executor; import java.util.function.BiConsumer; import java.util.function.BiFunction; import java.util.function.Consumer; @@ -696,7 +697,7 @@ private static IngestPipelinesExecutionResult failAndStoreFor(String index, Exce * @param onCompletion A callback executed once all documents have been processed. Accepts the thread * that ingestion completed on or an exception in the event that the entire operation * has failed. - * @param executorName Which executor the bulk request should be executed on. + * @param executor Which executor the bulk request should be executed on. */ public void executeBulkRequest( final int numberOfActionRequests, @@ -706,11 +707,11 @@ public void executeBulkRequest( final TriConsumer onStoreFailure, final BiConsumer onFailure, final BiConsumer onCompletion, - final String executorName + final Executor executor ) { assert numberOfActionRequests > 0 : "numberOfActionRequests must be greater than 0 but was [" + numberOfActionRequests + "]"; - threadPool.executor(executorName).execute(new AbstractRunnable() { + executor.execute(new AbstractRunnable() { @Override public void onFailure(Exception e) { diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkOperationTests.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkOperationTests.java index 2226c40b618f4..23395556761f1 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkOperationTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkOperationTests.java @@ -36,6 +36,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AtomicArray; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.index.IndexNotFoundException; @@ -48,7 +49,6 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.client.NoOpNodeClient; import org.elasticsearch.threadpool.TestThreadPool; -import org.elasticsearch.threadpool.ThreadPool; import org.junit.After; import org.junit.Assume; import org.junit.Before; @@ -843,7 +843,7 @@ private BulkOperation newBulkOperation( return new BulkOperation( null, threadPool, - ThreadPool.Names.SAME, + EsExecutors.DIRECT_EXECUTOR_SERVICE, clusterService, request, client, diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIndicesThatCannotBeCreatedTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIndicesThatCannotBeCreatedTests.java index 3057b00553a22..20d826b11c1e7 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIndicesThatCannotBeCreatedTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIndicesThatCannotBeCreatedTests.java @@ -39,6 +39,7 @@ import java.util.Map; import java.util.Set; +import java.util.concurrent.Executor; import java.util.function.Consumer; import java.util.function.Function; @@ -137,7 +138,7 @@ void executeBulk( BulkRequest bulkRequest, long startTimeNanos, ActionListener listener, - String executorName, + Executor executor, AtomicArray responses, Map indicesThatCannotBeCreated ) { diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java index b97e8303a8eb5..52d50b3a23a0d 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java @@ -47,9 +47,7 @@ import org.elasticsearch.ingest.IngestService; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.MockUtils; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.threadpool.ThreadPool.Names; import org.elasticsearch.transport.TransportResponseHandler; import org.elasticsearch.transport.TransportService; import org.junit.Before; @@ -57,13 +55,18 @@ import org.mockito.Captor; import org.mockito.MockitoAnnotations; -import java.io.IOException; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.concurrent.Callable; +import java.util.concurrent.Executor; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.BiConsumer; import java.util.function.Predicate; @@ -73,6 +76,7 @@ import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyInt; import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.ArgumentMatchers.same; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; @@ -95,6 +99,9 @@ public class TransportBulkActionIngestTests extends ESTestCase { private static final Thread DUMMY_WRITE_THREAD = new Thread(ThreadPool.Names.WRITE); private FeatureService mockFeatureService; + private static final ExecutorService writeExecutor = new NamedDirectExecutorService("write"); + private static final ExecutorService systemWriteExecutor = new NamedDirectExecutorService("system_write"); + /** Services needed by bulk action */ TransportService transportService; ClusterService clusterService; @@ -158,7 +165,7 @@ void executeBulk( BulkRequest bulkRequest, long startTimeNanos, ActionListener listener, - String executorName, + Executor executor, AtomicArray responses, Map indicesThatCannotBeCreated ) { @@ -186,13 +193,95 @@ class TestSingleItemBulkWriteAction extends TransportSingleItemBulkWriteAction shutdownNow() { + return fail(null, "shutdown not supported"); + } + + @Override + public boolean isShutdown() { + return fail(null, "shutdown not supported"); + } + + @Override + public boolean isTerminated() { + return fail(null, "shutdown not supported"); + } + + @Override + public boolean awaitTermination(long timeout, TimeUnit unit) { + return fail(null, "shutdown not supported"); + } + + @Override + public Future submit(Callable task) { + return fail(null, "shutdown not supported"); + } + + @Override + public Future submit(Runnable task, T result) { + return fail(null, "shutdown not supported"); + } + + @Override + public Future submit(Runnable task) { + return fail(null, "shutdown not supported"); + } + + @Override + public List> invokeAll(Collection> tasks) { + return null; + } + + @Override + public List> invokeAll(Collection> tasks, long timeout, TimeUnit unit) { + return fail(null, "shutdown not supported"); + } + + @Override + public T invokeAny(Collection> tasks) { + return fail(null, "shutdown not supported"); + } + + @Override + public T invokeAny(Collection> tasks, long timeout, TimeUnit unit) { + return fail(null, "shutdown not supported"); + } + } + @Before - public void setupAction() throws IOException { + public void setupAction() { // initialize captors, which must be members to use @Capture because of generics threadPool = mock(ThreadPool.class); + when(threadPool.executor(eq(ThreadPool.Names.WRITE))).thenReturn(writeExecutor); + when(threadPool.executor(eq(ThreadPool.Names.SYSTEM_WRITE))).thenReturn(systemWriteExecutor); MockitoAnnotations.openMocks(this); // setup services that will be called by action - transportService = MockUtils.setupTransportServiceWithThreadpoolExecutor(threadPool); + transportService = mock(TransportService.class); + when(transportService.getThreadPool()).thenReturn(threadPool); clusterService = mock(ClusterService.class); localIngest = true; // setup nodes for local and remote @@ -312,7 +401,7 @@ public void testIngestLocal() throws Exception { redirectHandler.capture(), failureHandler.capture(), completionHandler.capture(), - eq(Names.WRITE) + same(writeExecutor) ); completionHandler.getValue().accept(null, exception); assertTrue(failureCalled.get()); @@ -360,7 +449,7 @@ public void testSingleItemBulkActionIngestLocal() throws Exception { any(), failureHandler.capture(), completionHandler.capture(), - eq(Names.WRITE) + same(writeExecutor) ); completionHandler.getValue().accept(null, exception); assertTrue(failureCalled.get()); @@ -408,7 +497,7 @@ public void testIngestSystemLocal() throws Exception { any(), failureHandler.capture(), completionHandler.capture(), - eq(Names.SYSTEM_WRITE) + same(systemWriteExecutor) ); completionHandler.getValue().accept(null, exception); assertTrue(failureCalled.get()); @@ -567,7 +656,7 @@ private void validatePipelineWithBulkUpsert(@Nullable String indexRequestIndexNa any(), failureHandler.capture(), completionHandler.capture(), - eq(Names.WRITE) + same(writeExecutor) ); assertEquals(indexRequest1.getPipeline(), "default_pipeline"); assertEquals(indexRequest2.getPipeline(), "default_pipeline"); @@ -617,7 +706,7 @@ public void testDoExecuteCalledTwiceCorrectly() throws Exception { any(), failureHandler.capture(), completionHandler.capture(), - eq(Names.WRITE) + same(writeExecutor) ); completionHandler.getValue().accept(null, exception); assertFalse(action.indexCreated); // still no index yet, the ingest node failed. @@ -713,7 +802,7 @@ public void testFindDefaultPipelineFromTemplateMatch() { any(), failureHandler.capture(), completionHandler.capture(), - eq(Names.WRITE) + same(writeExecutor) ); } @@ -753,7 +842,7 @@ public void testFindDefaultPipelineFromV2TemplateMatch() { any(), failureHandler.capture(), completionHandler.capture(), - eq(Names.WRITE) + same(writeExecutor) ); } @@ -782,7 +871,7 @@ public void testIngestCallbackExceptionHandled() throws Exception { any(), failureHandler.capture(), completionHandler.capture(), - eq(Names.WRITE) + same(writeExecutor) ); indexRequest1.autoGenerateId(); completionHandler.getValue().accept(Thread.currentThread(), null); @@ -821,7 +910,7 @@ private void validateDefaultPipeline(IndexRequest indexRequest) { any(), failureHandler.capture(), completionHandler.capture(), - eq(Names.WRITE) + same(writeExecutor) ); assertEquals(indexRequest.getPipeline(), "default_pipeline"); completionHandler.getValue().accept(null, exception); diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java index 1a16d9083df55..960397033f602 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java @@ -59,12 +59,14 @@ import java.util.Map; import java.util.SortedMap; import java.util.TreeMap; +import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import static org.elasticsearch.action.bulk.TransportBulkAction.prohibitCustomRoutingOnDataStream; import static org.elasticsearch.cluster.metadata.MetadataCreateDataStreamServiceTests.createDataStream; import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.is; import static org.junit.Assume.assumeThat; import static org.mockito.ArgumentMatchers.any; @@ -321,31 +323,45 @@ public void testOnlySystem() { assertFalse(TransportBulkAction.isOnlySystem(buildBulkRequest(mixed), indicesLookup, systemIndices)); } - public void testRejectCoordination() throws Exception { + private void blockWriteThreadPool(CountDownLatch blockingLatch) { + assertThat(blockingLatch.getCount(), greaterThan(0L)); + final var executor = threadPool.executor(ThreadPool.Names.WRITE); + // Add tasks repeatedly until we get an EsRejectedExecutionException which indicates that the threadpool and its queue are full. + expectThrows(EsRejectedExecutionException.class, () -> { + // noinspection InfiniteLoopStatement + while (true) { + executor.execute(() -> safeAwait(blockingLatch)); + } + }); + } + + public void testRejectCoordination() { BulkRequest bulkRequest = new BulkRequest().add(new IndexRequest("index").id("id").source(Collections.emptyMap())); + final var blockingLatch = new CountDownLatch(1); try { - threadPool.startForcingRejections(); + blockWriteThreadPool(blockingLatch); PlainActionFuture future = new PlainActionFuture<>(); ActionTestUtils.execute(bulkAction, null, bulkRequest, future); expectThrows(EsRejectedExecutionException.class, future); } finally { - threadPool.stopForcingRejections(); + blockingLatch.countDown(); } } - public void testRejectionAfterCreateIndexIsPropagated() throws Exception { + public void testRejectionAfterCreateIndexIsPropagated() { BulkRequest bulkRequest = new BulkRequest().add(new IndexRequest("index").id("id").source(Collections.emptyMap())); bulkAction.failIndexCreation = randomBoolean(); + final var blockingLatch = new CountDownLatch(1); try { - bulkAction.beforeIndexCreation = threadPool::startForcingRejections; + bulkAction.beforeIndexCreation = () -> blockWriteThreadPool(blockingLatch); PlainActionFuture future = new PlainActionFuture<>(); ActionTestUtils.execute(bulkAction, null, bulkRequest, future); expectThrows(EsRejectedExecutionException.class, future); assertTrue(bulkAction.indexCreated); } finally { - threadPool.stopForcingRejections(); + blockingLatch.countDown(); } } diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java index cb9bdd1f3a827..09513351652b8 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java @@ -50,6 +50,7 @@ import java.util.Collections; import java.util.HashSet; import java.util.Map; +import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import java.util.function.LongSupplier; @@ -140,12 +141,12 @@ void executeBulk( BulkRequest bulkRequest, long startTimeNanos, ActionListener listener, - String executorName, + Executor executor, AtomicArray responses, Map indicesThatCannotBeCreated ) { expected.set(1000000); - super.executeBulk(task, bulkRequest, startTimeNanos, listener, executorName, responses, indicesThatCannotBeCreated); + super.executeBulk(task, bulkRequest, startTimeNanos, listener, executor, responses, indicesThatCannotBeCreated); } }; } else { @@ -165,13 +166,13 @@ void executeBulk( BulkRequest bulkRequest, long startTimeNanos, ActionListener listener, - String executorName, + Executor executor, AtomicArray responses, Map indicesThatCannotBeCreated ) { long elapsed = spinForAtLeastOneMillisecond(); expected.set(elapsed); - super.executeBulk(task, bulkRequest, startTimeNanos, listener, executorName, responses, indicesThatCannotBeCreated); + super.executeBulk(task, bulkRequest, startTimeNanos, listener, executor, responses, indicesThatCannotBeCreated); } }; } diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java index 2657bdef8c09d..fc9e9f05542c9 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java @@ -200,7 +200,7 @@ public void onFailure(Exception e) { bulkAction.createMissingIndicesAndIndexData( task, bulkRequest, - randomAlphaOfLength(10), + r -> fail("executor is unused"), listener, indicesToAutoCreate, dataStreamsToRollover, diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java index 9582a6e76d539..084eb94852524 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java @@ -66,7 +66,6 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.MockLogAppender; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.threadpool.ThreadPool.Names; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; @@ -219,7 +218,7 @@ public void testExecuteIndexPipelineDoesNotExist() { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); assertTrue(failure.get()); @@ -1127,7 +1126,7 @@ public String getType() { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); assertTrue(failure.get()); @@ -1172,7 +1171,7 @@ public void testExecuteBulkPipelineDoesNotExist() { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(failureHandler, times(1)).accept( argThat(item -> item == 2), @@ -1249,7 +1248,7 @@ public DocumentSizeObserver newFixedSizeDocumentObserver(long normalisedBytesPar (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); assertThat(wrappedObserverWasUsed.get(), equalTo(2)); assertThat(parsedValueWasUsed.get(), equalTo(2)); @@ -1284,7 +1283,7 @@ public void testExecuteSuccess() { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(failureHandler, never()).accept(any(), any()); verify(completionHandler, times(1)).accept(Thread.currentThread(), null); @@ -1326,7 +1325,7 @@ public void testDynamicTemplates() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); latch.await(); assertThat(indexRequest.getDynamicTemplates(), equalTo(Map.of("foo", "bar", "foo.bar", "baz"))); @@ -1356,7 +1355,7 @@ public void testExecuteEmptyPipeline() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(failureHandler, never()).accept(any(), any()); verify(completionHandler, times(1)).accept(Thread.currentThread(), null); @@ -1419,7 +1418,7 @@ public void testExecutePropagateAllMetadataUpdates() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(processor).execute(any(), any()); verify(failureHandler, never()).accept(any(), any()); @@ -1477,7 +1476,7 @@ public void testExecuteFailure() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(processor).execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), Map.of()), any()); verify(failureHandler, times(1)).accept(eq(0), any(RuntimeException.class)); @@ -1535,7 +1534,7 @@ public void testExecuteSuccessWithOnFailure() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(failureHandler, never()).accept(eq(0), any(IngestProcessorException.class)); verify(completionHandler, times(1)).accept(Thread.currentThread(), null); @@ -1587,7 +1586,7 @@ public void testExecuteFailureWithNestedOnFailure() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(processor).execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), Map.of()), any()); verify(failureHandler, times(1)).accept(eq(0), any(RuntimeException.class)); @@ -1650,7 +1649,7 @@ public void testBulkRequestExecutionWithFailures() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), requestItemErrorHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(requestItemErrorHandler, times(numIndexRequests)).accept(anyInt(), argThat(e -> e.getCause().equals(error))); @@ -1704,7 +1703,7 @@ public void testExecuteFailureRedirection() throws Exception { redirectHandler, failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(processor).execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), Map.of()), any()); verify(redirectHandler, times(1)).apply(eq(0), eq(indexRequest.index()), any(RuntimeException.class)); @@ -1761,7 +1760,7 @@ public void testExecuteFailureRedirectionWithNestedOnFailure() throws Exception redirectHandler, failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(processor).execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), Map.of()), any()); verify(redirectHandler, times(1)).apply(eq(0), eq(indexRequest.index()), any(RuntimeException.class)); @@ -1827,7 +1826,7 @@ public void testBulkRequestExecutionWithRedirectedFailures() throws Exception { requestItemRedirectHandler, requestItemErrorHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(requestItemRedirectHandler, times(numIndexRequests)).apply(anyInt(), anyString(), argThat(e -> e.getCause().equals(error))); @@ -1888,7 +1887,7 @@ public void testBulkRequestExecution() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), requestItemErrorHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(requestItemErrorHandler, never()).accept(any(), any()); @@ -2003,7 +2002,7 @@ public String execute() { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), (integer, e) -> {}, (thread, e) -> {}, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); { @@ -2083,7 +2082,7 @@ public void testStats() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); final IngestStats afterFirstRequestStats = ingestService.stats(); assertThat(afterFirstRequestStats.pipelineStats().size(), equalTo(2)); @@ -2109,7 +2108,7 @@ public void testStats() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); final IngestStats afterSecondRequestStats = ingestService.stats(); assertThat(afterSecondRequestStats.pipelineStats().size(), equalTo(2)); @@ -2140,7 +2139,7 @@ public void testStats() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); final IngestStats afterThirdRequestStats = ingestService.stats(); assertThat(afterThirdRequestStats.pipelineStats().size(), equalTo(2)); @@ -2172,7 +2171,7 @@ public void testStats() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); final IngestStats afterForthRequestStats = ingestService.stats(); assertThat(afterForthRequestStats.pipelineStats().size(), equalTo(2)); @@ -2269,7 +2268,7 @@ public String getDescription() { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(failureHandler, never()).accept(any(), any()); verify(completionHandler, times(1)).accept(Thread.currentThread(), null); @@ -2359,7 +2358,7 @@ public void testCBORParsing() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), (integer, e) -> {}, (thread, e) -> {}, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); } @@ -2439,7 +2438,7 @@ public void testSetsRawTimestamp() { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), (integer, e) -> {}, (thread, e) -> {}, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); assertThat(indexRequest1.getRawTimestamp(), nullValue()); diff --git a/test/framework/src/main/java/org/elasticsearch/threadpool/TestThreadPool.java b/test/framework/src/main/java/org/elasticsearch/threadpool/TestThreadPool.java index ce8e3a2574f3e..e2fa31c31a46f 100644 --- a/test/framework/src/main/java/org/elasticsearch/threadpool/TestThreadPool.java +++ b/test/framework/src/main/java/org/elasticsearch/threadpool/TestThreadPool.java @@ -9,23 +9,14 @@ package org.elasticsearch.threadpool; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.Releasable; import org.elasticsearch.node.Node; import org.elasticsearch.telemetry.metric.MeterRegistry; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.ThreadFactory; -import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; public class TestThreadPool extends ThreadPool implements Releasable { - private final CountDownLatch blockingLatch = new CountDownLatch(1); - private volatile boolean returnRejectingExecutor = false; - private volatile ThreadPoolExecutor rejectingExecutor; - public TestThreadPool(String name, ExecutorBuilder... customBuilders) { this(name, Settings.EMPTY, customBuilders); } @@ -34,74 +25,6 @@ public TestThreadPool(String name, Settings settings, ExecutorBuilder... cust super(Settings.builder().put(Node.NODE_NAME_SETTING.getKey(), name).put(settings).build(), MeterRegistry.NOOP, customBuilders); } - @Override - public ExecutorService executor(String name) { - if (returnRejectingExecutor) { - return rejectingExecutor; - } else { - return super.executor(name); - } - } - - public void startForcingRejections() { - if (rejectingExecutor == null) { - createRejectingExecutor(); - } - returnRejectingExecutor = true; - } - - public void stopForcingRejections() { - returnRejectingExecutor = false; - } - - @Override - public void shutdown() { - blockingLatch.countDown(); - if (rejectingExecutor != null) { - rejectingExecutor.shutdown(); - } - super.shutdown(); - } - - @Override - public void shutdownNow() { - blockingLatch.countDown(); - if (rejectingExecutor != null) { - rejectingExecutor.shutdownNow(); - } - super.shutdownNow(); - } - - private synchronized void createRejectingExecutor() { - if (rejectingExecutor != null) { - return; - } - ThreadFactory factory = EsExecutors.daemonThreadFactory("reject_thread"); - rejectingExecutor = EsExecutors.newFixed( - "rejecting", - 1, - 0, - factory, - getThreadContext(), - EsExecutors.TaskTrackingConfig.DO_NOT_TRACK - ); - - CountDownLatch startedLatch = new CountDownLatch(1); - rejectingExecutor.execute(() -> { - try { - startedLatch.countDown(); - blockingLatch.await(); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - }); - try { - startedLatch.await(); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - } - @Override public void close() { ThreadPool.terminate(this, 10, TimeUnit.SECONDS); From d99323e827c1f0e4a51b4b2bbd8feec69decc50c Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Tue, 9 Apr 2024 11:34:58 +0300 Subject: [PATCH 188/264] Add retries in concurrent downsampling action (#107213) --- .../xpack/downsample/DownsampleActionSingleNodeTests.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java index 2057518307fc0..d23f1e4b89a8c 100644 --- a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java +++ b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java @@ -553,7 +553,10 @@ public void onFailure(Exception e) { fail("downsample index has not been created"); } }); - downsample(sourceIndex, downsampleIndex, config); + + // Downsample with retries, in case the downsampled index is not ready. + assertBusy(() -> downsample(sourceIndex, downsampleIndex, config), 120, TimeUnit.SECONDS); + // We must wait until the in-progress downsample ends, otherwise data will not be cleaned up assertBusy(() -> assertTrue("In progress downsample did not complete", downsampleListener.success), 60, TimeUnit.SECONDS); } From 2588c72a5218756aaaacb81ec9adfb625fc76921 Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Tue, 9 Apr 2024 10:41:34 +0200 Subject: [PATCH 189/264] ES|QL: Add unit tests and docs for DATE_TRUNC() (#107145) --- .../functions/date-time-functions.asciidoc | 2 +- .../functions/description/date_trunc.asciidoc | 5 + .../{ => examples}/date_trunc.asciidoc | 29 +--- .../esql/functions/layout/date_trunc.asciidoc | 15 ++ .../functions/parameters/date_trunc.asciidoc | 9 ++ .../esql/functions/signature/date_trunc.svg | 1 + .../esql/functions/types/date_trunc.asciidoc | 10 ++ .../src/main/resources/meta.csv-spec | 4 +- .../esql/expression/function/Example.java | 6 + .../function/scalar/date/DateTrunc.java | 23 ++- .../xpack/esql/analysis/AnalyzerTests.java | 16 +- .../function/AbstractFunctionTestCase.java | 14 +- .../function/scalar/date/DateTruncTests.java | 152 +++++++++++------- 13 files changed, 187 insertions(+), 99 deletions(-) create mode 100644 docs/reference/esql/functions/description/date_trunc.asciidoc rename docs/reference/esql/functions/{ => examples}/date_trunc.asciidoc (68%) create mode 100644 docs/reference/esql/functions/layout/date_trunc.asciidoc create mode 100644 docs/reference/esql/functions/parameters/date_trunc.asciidoc create mode 100644 docs/reference/esql/functions/signature/date_trunc.svg create mode 100644 docs/reference/esql/functions/types/date_trunc.asciidoc diff --git a/docs/reference/esql/functions/date-time-functions.asciidoc b/docs/reference/esql/functions/date-time-functions.asciidoc index e9d6628c63894..c1cd36e376a1c 100644 --- a/docs/reference/esql/functions/date-time-functions.asciidoc +++ b/docs/reference/esql/functions/date-time-functions.asciidoc @@ -22,5 +22,5 @@ include::date_diff.asciidoc[] include::date_extract.asciidoc[] include::date_format.asciidoc[] include::date_parse.asciidoc[] -include::date_trunc.asciidoc[] +include::layout/date_trunc.asciidoc[] include::now.asciidoc[] diff --git a/docs/reference/esql/functions/description/date_trunc.asciidoc b/docs/reference/esql/functions/description/date_trunc.asciidoc new file mode 100644 index 0000000000000..1fb874e3bd9cd --- /dev/null +++ b/docs/reference/esql/functions/description/date_trunc.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Rounds down a date to the closest interval. diff --git a/docs/reference/esql/functions/date_trunc.asciidoc b/docs/reference/esql/functions/examples/date_trunc.asciidoc similarity index 68% rename from docs/reference/esql/functions/date_trunc.asciidoc rename to docs/reference/esql/functions/examples/date_trunc.asciidoc index 4aa228dc14e65..d7cece9aff58b 100644 --- a/docs/reference/esql/functions/date_trunc.asciidoc +++ b/docs/reference/esql/functions/examples/date_trunc.asciidoc @@ -1,26 +1,4 @@ -[discrete] -[[esql-date_trunc]] -=== `DATE_TRUNC` - -*Syntax* - -[source,esql] ----- -DATE_TRUNC(interval, date) ----- - -*Parameters* - -`interval`:: -Interval, expressed using the <>. If `null`, the function returns `null`. - -`date`:: -Date expression. If `null`, the function returns `null`. - -*Description* - -Rounds down a date to the closest interval. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Examples* @@ -32,10 +10,8 @@ include::{esql-specs}/date.csv-spec[tag=docsDateTrunc] |=== include::{esql-specs}/date.csv-spec[tag=docsDateTrunc-result] |=== - Combine `DATE_TRUNC` with <> to create date histograms. For example, the number of hires per year: - [source.merge.styled,esql] ---- include::{esql-specs}/date.csv-spec[tag=docsDateTruncHistogram] @@ -44,9 +20,7 @@ include::{esql-specs}/date.csv-spec[tag=docsDateTruncHistogram] |=== include::{esql-specs}/date.csv-spec[tag=docsDateTruncHistogram-result] |=== - Or an hourly error rate: - [source.merge.styled,esql] ---- include::{esql-specs}/conditional.csv-spec[tag=docsCaseHourlyErrorRate] @@ -55,3 +29,4 @@ include::{esql-specs}/conditional.csv-spec[tag=docsCaseHourlyErrorRate] |=== include::{esql-specs}/conditional.csv-spec[tag=docsCaseHourlyErrorRate-result] |=== + diff --git a/docs/reference/esql/functions/layout/date_trunc.asciidoc b/docs/reference/esql/functions/layout/date_trunc.asciidoc new file mode 100644 index 0000000000000..0bd9ce4b4dbe4 --- /dev/null +++ b/docs/reference/esql/functions/layout/date_trunc.asciidoc @@ -0,0 +1,15 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-date_trunc]] +=== `DATE_TRUNC` + +*Syntax* + +[.text-center] +image::esql/functions/signature/date_trunc.svg[Embedded,opts=inline] + +include::../parameters/date_trunc.asciidoc[] +include::../description/date_trunc.asciidoc[] +include::../types/date_trunc.asciidoc[] +include::../examples/date_trunc.asciidoc[] diff --git a/docs/reference/esql/functions/parameters/date_trunc.asciidoc b/docs/reference/esql/functions/parameters/date_trunc.asciidoc new file mode 100644 index 0000000000000..19f7cb6cd7c74 --- /dev/null +++ b/docs/reference/esql/functions/parameters/date_trunc.asciidoc @@ -0,0 +1,9 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Parameters* + +`interval`:: +Interval; expressed using the timespan literal syntax. + +`date`:: +Date expression diff --git a/docs/reference/esql/functions/signature/date_trunc.svg b/docs/reference/esql/functions/signature/date_trunc.svg new file mode 100644 index 0000000000000..c82cd04ed5c88 --- /dev/null +++ b/docs/reference/esql/functions/signature/date_trunc.svg @@ -0,0 +1 @@ +DATE_TRUNC(interval,date) \ No newline at end of file diff --git a/docs/reference/esql/functions/types/date_trunc.asciidoc b/docs/reference/esql/functions/types/date_trunc.asciidoc new file mode 100644 index 0000000000000..8df45cfef54a8 --- /dev/null +++ b/docs/reference/esql/functions/types/date_trunc.asciidoc @@ -0,0 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +interval | date | result +date_period | datetime | datetime +time_duration | datetime | datetime +|=== diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec index 33b61c95ed0ed..d344b50c0364f 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -22,7 +22,7 @@ synopsis:keyword "long date_extract(datePart:keyword|text, date:date)" "keyword date_format(?dateFormat:keyword|text, date:date)" "date date_parse(?datePattern:keyword|text, dateString:keyword|text)" -"date date_trunc(interval:keyword, date:date)" +"date date_trunc(interval:date_period|time_duration, date:date)" double e() "boolean ends_with(str:keyword|text, suffix:keyword|text)" "double|integer|long|unsigned_long floor(number:double|integer|long|unsigned_long)" @@ -132,7 +132,7 @@ date_diff |[unit, startTimestamp, endTimestamp]|["keyword|text", date, date] date_extract |[datePart, date] |["keyword|text", date] |[Part of the date to extract. Can be: aligned_day_of_week_in_month; aligned_day_of_week_in_year; aligned_week_of_month; aligned_week_of_year; ampm_of_day; clock_hour_of_ampm; clock_hour_of_day; day_of_month; day_of_week; day_of_year; epoch_day; era; hour_of_ampm; hour_of_day; instant_seconds; micro_of_day; micro_of_second; milli_of_day; milli_of_second; minute_of_day; minute_of_hour; month_of_year; nano_of_day; nano_of_second; offset_seconds; proleptic_month; second_of_day; second_of_minute; year; or year_of_era., Date expression] date_format |[dateFormat, date] |["keyword|text", date] |[A valid date pattern, Date expression] date_parse |[datePattern, dateString] |["keyword|text", "keyword|text"] |[A valid date pattern, A string representing a date] -date_trunc |[interval, date] |[keyword, date] |[Interval; expressed using the timespan literal syntax., Date expression] +date_trunc |[interval, date] |["date_period|time_duration", date] |[Interval; expressed using the timespan literal syntax., Date expression] e |null |null |null ends_with |[str, suffix] |["keyword|text", "keyword|text"] |[, ] floor |number |"double|integer|long|unsigned_long" |[""] diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/Example.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/Example.java index 0cee9d2c53cde..7c9a788eed36c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/Example.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/Example.java @@ -18,6 +18,12 @@ @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.CONSTRUCTOR) public @interface Example { + + /** + * The description that will appear before the example + */ + String description() default ""; + /** * The test file that contains the example. */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java index 0f35b95a287ad..39ad0351b199f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java @@ -13,6 +13,7 @@ import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; @@ -34,14 +35,26 @@ public class DateTrunc extends BinaryDateTimeFunction implements EvaluatorMapper { - @FunctionInfo(returnType = "date", description = "Rounds down a date to the closest interval.") + @FunctionInfo( + returnType = "date", + description = "Rounds down a date to the closest interval.", + examples = { + @Example(file = "date", tag = "docsDateTrunc"), + @Example( + description = "Combine `DATE_TRUNC` with <> to create date histograms. For\n" + + "example, the number of hires per year:", + file = "date", + tag = "docsDateTruncHistogram" + ), + @Example(description = "Or an hourly error rate:", file = "conditional", tag = "docsCaseHourlyErrorRate") } + ) public DateTrunc( Source source, // Need to replace the commas in the description here with semi-colon as there's a bug in the CSV parser // used in the CSVTests and fixing it is not trivial @Param( name = "interval", - type = { "keyword" }, + type = { "date_period", "time_duration" }, description = "Interval; expressed using the timespan literal syntax." ) Expression interval, @Param(name = "date", type = { "date" }, description = "Date expression") Expression field @@ -55,8 +68,8 @@ protected TypeResolution resolveType() { return new TypeResolution("Unresolved children"); } - return isDate(timestampField(), sourceText(), FIRST).and( - isType(interval(), EsqlDataTypes::isTemporalAmount, sourceText(), SECOND, "dateperiod", "timeduration") + return isType(interval(), EsqlDataTypes::isTemporalAmount, sourceText(), FIRST, "dateperiod", "timeduration").and( + isDate(timestampField(), sourceText(), SECOND) ); } @@ -105,7 +118,7 @@ private static Rounding.Prepared createRounding(final Period period, final ZoneI long periods = period.getUnits().stream().filter(unit -> period.get(unit) != 0).count(); if (periods != 1) { - throw new IllegalArgumentException("Time interval is not supported"); + throw new IllegalArgumentException("Time interval with multiple periods is not supported"); } final Rounding.Builder rounding; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index aedc789620480..f4ecf38915a29 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -1118,36 +1118,36 @@ public void testDateParseOnIntPattern() { public void testDateTruncOnInt() { verifyUnsupported(""" from test - | eval date_trunc("1M", int) - """, "first argument of [date_trunc(\"1M\", int)] must be [datetime], found value [int] type [integer]"); + | eval date_trunc(1 month, int) + """, "second argument of [date_trunc(1 month, int)] must be [datetime], found value [int] type [integer]"); } public void testDateTruncOnFloat() { verifyUnsupported(""" from test - | eval date_trunc("1M", float) - """, "first argument of [date_trunc(\"1M\", float)] must be [datetime], found value [float] type [double]"); + | eval date_trunc(1 month, float) + """, "second argument of [date_trunc(1 month, float)] must be [datetime], found value [float] type [double]"); } public void testDateTruncOnText() { verifyUnsupported(""" from test - | eval date_trunc("1M", keyword) - """, "first argument of [date_trunc(\"1M\", keyword)] must be [datetime], found value [keyword] type [keyword]"); + | eval date_trunc(1 month, keyword) + """, "second argument of [date_trunc(1 month, keyword)] must be [datetime], found value [keyword] type [keyword]"); } public void testDateTruncWithNumericInterval() { verifyUnsupported(""" from test | eval date_trunc(1, date) - """, "second argument of [date_trunc(1, date)] must be [dateperiod or timeduration], found value [1] type [integer]"); + """, "first argument of [date_trunc(1, date)] must be [dateperiod or timeduration], found value [1] type [integer]"); } public void testDateTruncWithDateInterval() { verifyUnsupported(""" from test | eval date_trunc(date, date) - """, "second argument of [date_trunc(date, date)] must be [dateperiod or timeduration], found value [date] type [datetime]"); + """, "first argument of [date_trunc(date, date)] must be [dateperiod or timeduration], found value [date] type [datetime]"); } // check field declaration is validated even across duplicated declarations diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index b97622f28520c..0772b03bf3210 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -618,7 +618,7 @@ public static void testFunctionInfo() { for (Map.Entry, DataType> entry : signatures.entrySet()) { List types = entry.getKey(); for (int i = 0; i < args.size() && i < types.size(); i++) { - typesFromSignature.get(i).add(types.get(i).esType()); + typesFromSignature.get(i).add(signatureType(types.get(i))); } returnFromSignature.add(entry.getValue().esType()); } @@ -637,6 +637,10 @@ public static void testFunctionInfo() { } + private static String signatureType(DataType type) { + return type.esType() != null ? type.esType() : type.typeName(); + } + /** * Adds cases with {@code null} and asserts that the result is {@code null}. *

    @@ -894,6 +898,7 @@ protected static String typeErrorMessage(boolean includeOrdinal, List 0) { + builder.append(example.description()); + builder.append("\n"); + } builder.append(""" [source.merge.styled,esql] ---- diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncTests.java index 4f897c47d73b8..98fbff6a816c3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncTests.java @@ -7,28 +7,56 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.date; +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + import org.elasticsearch.common.Rounding; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.esql.SerializationTestUtils; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; -import org.elasticsearch.xpack.ql.expression.FieldAttribute; -import org.elasticsearch.xpack.ql.expression.Literal; +import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataTypes; -import org.elasticsearch.xpack.ql.type.DateEsField; -import org.elasticsearch.xpack.ql.type.EsField; import java.time.Duration; import java.time.Instant; import java.time.Period; -import java.util.Collections; -import java.util.Map; +import java.util.List; +import java.util.function.Supplier; import static org.elasticsearch.xpack.esql.expression.function.scalar.date.DateTrunc.createRounding; import static org.elasticsearch.xpack.esql.expression.function.scalar.date.DateTrunc.process; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; + +public class DateTruncTests extends AbstractFunctionTestCase { -public class DateTruncTests extends ESTestCase { + public DateTruncTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + long ts = toMillis("2023-02-17T10:25:33.38Z"); + List suppliers = List.of( + ofDatePeriod(Period.ofDays(1), ts, "2023-02-17T00:00:00.00Z"), + ofDatePeriod(Period.ofMonths(1), ts, "2023-02-01T00:00:00.00Z"), + ofDatePeriod(Period.ofYears(1), ts, "2023-01-01T00:00:00.00Z"), + ofDatePeriod(Period.ofDays(10), ts, "2023-02-12T00:00:00.00Z"), + // 7 days period should return weekly rounding + ofDatePeriod(Period.ofDays(7), ts, "2023-02-13T00:00:00.00Z"), + // 3 months period should return quarterly + ofDatePeriod(Period.ofMonths(3), ts, "2023-01-01T00:00:00.00Z"), + ofDuration(Duration.ofHours(1), ts, "2023-02-17T10:00:00.00Z"), + ofDuration(Duration.ofMinutes(1), ts, "2023-02-17T10:25:00.00Z"), + ofDuration(Duration.ofSeconds(1), ts, "2023-02-17T10:25:33.00Z"), + ofDuration(Duration.ofHours(3), ts, "2023-02-17T09:00:00.00Z"), + ofDuration(Duration.ofMinutes(15), ts, "2023-02-17T10:15:00.00Z"), + ofDuration(Duration.ofSeconds(30), ts, "2023-02-17T10:25:30.00Z"), + randomSecond() + ); + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); + } public void testCreateRoundingDuration() { Rounding.Prepared rounding; @@ -71,7 +99,7 @@ public void testCreateRoundingPeriod() { assertThat(e.getMessage(), containsString("Zero or negative time interval is not supported")); e = expectThrows(IllegalArgumentException.class, () -> createRounding(Period.of(0, 1, 1))); - assertThat(e.getMessage(), containsString("Time interval is not supported")); + assertThat(e.getMessage(), containsString("Time interval with multiple periods is not supported")); rounding = createRounding(Period.ofDays(1)); assertEquals(1, rounding.roundingSize(Rounding.DateTimeUnit.DAY_OF_MONTH), 0d); @@ -103,25 +131,6 @@ public void testCreateRoundingNullInterval() { public void testDateTruncFunction() { long ts = toMillis("2023-02-17T10:25:33.38Z"); - assertEquals(toMillis("2023-02-17T00:00:00.00Z"), process(ts, createRounding(Period.ofDays(1)))); - assertEquals(toMillis("2023-02-01T00:00:00.00Z"), process(ts, createRounding(Period.ofMonths(1)))); - assertEquals(toMillis("2023-01-01T00:00:00.00Z"), process(ts, createRounding(Period.ofYears(1)))); - - assertEquals(toMillis("2023-02-12T00:00:00.00Z"), process(ts, createRounding(Period.ofDays(10)))); - // 7 days period should return weekly rounding - assertEquals(toMillis("2023-02-13T00:00:00.00Z"), process(ts, createRounding(Period.ofDays(7)))); - // 3 months period should return quarterly - assertEquals(toMillis("2023-01-01T00:00:00.00Z"), process(ts, createRounding(Period.ofMonths(3)))); - - assertEquals(toMillis("2023-02-17T10:00:00.00Z"), process(ts, createRounding(Duration.ofHours(1)))); - assertEquals(toMillis("2023-02-17T10:25:00.00Z"), process(ts, createRounding(Duration.ofMinutes(1)))); - assertEquals(toMillis("2023-02-17T10:25:33.00Z"), process(ts, createRounding(Duration.ofSeconds(1)))); - - assertEquals(toMillis("2023-02-17T09:00:00.00Z"), process(ts, createRounding(Duration.ofHours(3)))); - assertEquals(toMillis("2023-02-17T10:15:00.00Z"), process(ts, createRounding(Duration.ofMinutes(15)))); - assertEquals(toMillis("2023-02-17T10:25:30.00Z"), process(ts, createRounding(Duration.ofSeconds(30)))); - assertEquals(toMillis("2023-02-17T10:25:30.00Z"), process(ts, createRounding(Duration.ofSeconds(30)))); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> process(ts, createRounding(Period.ofDays(-1)))); assertThat(e.getMessage(), containsString("Zero or negative time interval is not supported")); @@ -129,36 +138,71 @@ public void testDateTruncFunction() { assertThat(e.getMessage(), containsString("Zero or negative time interval is not supported")); } - private static long toMillis(String timestamp) { - return Instant.parse(timestamp).toEpochMilli(); + private static TestCaseSupplier ofDatePeriod(Period period, long value, String expectedDate) { + return new TestCaseSupplier( + List.of(EsqlDataTypes.DATE_PERIOD, DataTypes.DATETIME), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(period, EsqlDataTypes.DATE_PERIOD, "interval"), + new TestCaseSupplier.TypedData(value, DataTypes.DATETIME, "date") + ), + "DateTruncEvaluator[date=Attribute[channel=1], interval=Attribute[channel=0]]", + DataTypes.DATETIME, + equalTo(toMillis(expectedDate)) + ) + ); + } + + private static TestCaseSupplier ofDuration(Duration duration, long value, String expectedDate) { + return new TestCaseSupplier( + List.of(EsqlDataTypes.TIME_DURATION, DataTypes.DATETIME), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(duration, EsqlDataTypes.TIME_DURATION, "interval"), + new TestCaseSupplier.TypedData(value, DataTypes.DATETIME, "date") + ), + "DateTruncEvaluator[date=Attribute[channel=1], interval=Attribute[channel=0]]", + DataTypes.DATETIME, + equalTo(toMillis(expectedDate)) + ) + ); } - public void testSerialization() { - var dateTrunc = new DateTrunc(Source.EMPTY, randomDateIntervalLiteral(), randomDateField()); - SerializationTestUtils.assertSerialization(dateTrunc); + private static TestCaseSupplier randomSecond() { + return new TestCaseSupplier("random second", List.of(EsqlDataTypes.TIME_DURATION, DataTypes.DATETIME), () -> { + String dateFragment = randomIntBetween(2000, 2050) + + "-" + + pad(randomIntBetween(1, 12)) + + "-" + + pad(randomIntBetween(1, 28)) + + "T" + + pad(randomIntBetween(0, 23)) + + ":" + + pad(randomIntBetween(0, 59)) + + ":" + + pad(randomIntBetween(0, 59)); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(Duration.ofSeconds(1), EsqlDataTypes.TIME_DURATION, "interval"), + new TestCaseSupplier.TypedData(toMillis(dateFragment + ".38Z"), DataTypes.DATETIME, "date") + ), + "DateTruncEvaluator[date=Attribute[channel=1], interval=Attribute[channel=0]]", + DataTypes.DATETIME, + equalTo(toMillis(dateFragment + ".00Z")) + ); + }); } - private static FieldAttribute randomDateField() { - String fieldName = randomAlphaOfLength(randomIntBetween(1, 25)); - String dateName = randomAlphaOfLength(randomIntBetween(1, 25)); - boolean hasDocValues = randomBoolean(); - if (randomBoolean()) { - return new FieldAttribute(Source.EMPTY, fieldName, new EsField(dateName, DataTypes.DATETIME, Map.of(), hasDocValues)); - } else { - return new FieldAttribute(Source.EMPTY, fieldName, DateEsField.dateEsField(dateName, Collections.emptyMap(), hasDocValues)); - } + private static String pad(int i) { + return i > 9 ? "" + i : "0" + i; + } + + private static long toMillis(String timestamp) { + return Instant.parse(timestamp).toEpochMilli(); } - private static Literal randomDateIntervalLiteral() { - Duration duration = switch (randomInt(5)) { - case 0 -> Duration.ofNanos(randomIntBetween(1, 100000)); - case 1 -> Duration.ofMillis(randomIntBetween(1, 1000)); - case 2 -> Duration.ofSeconds(randomIntBetween(1, 1000)); - case 3 -> Duration.ofMinutes(randomIntBetween(1, 1000)); - case 4 -> Duration.ofHours(randomIntBetween(1, 100)); - case 5 -> Duration.ofDays(randomIntBetween(1, 60)); - default -> throw new AssertionError(); - }; - return new Literal(Source.EMPTY, duration, EsqlDataTypes.TIME_DURATION); + @Override + protected Expression build(Source source, List args) { + return new DateTrunc(source, args.get(0), args.get(1)); } } From bdc98737862ed5da0fb7a9a92af386dc583b48f0 Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 9 Apr 2024 10:28:08 +0100 Subject: [PATCH 190/264] Remove unused `ThreadPool.Names#SAME` (#107249) `SAME` is a distinguished threadpool name that callers could use to obtain a special `ExecutorService` that runs tasks immediately, directly on the calling thread. In fact there are no callers that use this name any more, so we can remove it and all the associated special handling. Relates #106279 --- .../elasticsearch/threadpool/ThreadPool.java | 21 +++++++------------ .../threadpool/ESThreadPoolTestCase.java | 7 +++---- .../UpdateThreadPoolSettingsTests.java | 10 ++------- .../concurrent/DeterministicTaskQueue.java | 2 +- .../DeterministicTaskQueueTests.java | 16 -------------- .../MlDailyMaintenanceServiceIT.java | 3 --- .../MlInitializationServiceIT.java | 4 +--- 7 files changed, 14 insertions(+), 49 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java index 507eff05780b8..9679bc88319d0 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java +++ b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.node.Node; import org.elasticsearch.node.ReportingService; import org.elasticsearch.telemetry.metric.Instrument; @@ -64,7 +65,6 @@ public class ThreadPool implements ReportingService, Scheduler { private static final Logger logger = LogManager.getLogger(ThreadPool.class); public static class Names { - public static final String SAME = "same"; public static final String GENERIC = "generic"; public static final String CLUSTER_COORDINATION = "cluster_coordination"; public static final String GET = "get"; @@ -99,9 +99,13 @@ public static class Names { public static final String THREAD_POOL_METRIC_NAME_REJECTED = ".threads.rejected.total"; public enum ThreadPoolType { + @Deprecated(forRemoval = true) + @UpdateForV9 // no longer used, remove in v9 DIRECT("direct"), FIXED("fixed"), - FIXED_AUTO_QUEUE_SIZE("fixed_auto_queue_size"), // TODO: remove in 9.0 + @Deprecated(forRemoval = true) + @UpdateForV9 // no longer used, remove in v9 + FIXED_AUTO_QUEUE_SIZE("fixed_auto_queue_size"), SCALING("scaling"); private final String type; @@ -127,7 +131,6 @@ public static ThreadPoolType fromType(String type) { } public static final Map THREAD_POOL_TYPES = Map.ofEntries( - entry(Names.SAME, ThreadPoolType.DIRECT), entry(Names.GENERIC, ThreadPoolType.SCALING), entry(Names.GET, ThreadPoolType.FIXED), entry(Names.ANALYZE, ThreadPoolType.FIXED), @@ -335,16 +338,10 @@ public ThreadPool(final Settings settings, MeterRegistry meterRegistry, final Ex executors.put(entry.getKey(), executorHolder); } - executors.put(Names.SAME, new ExecutorHolder(EsExecutors.DIRECT_EXECUTOR_SERVICE, new Info(Names.SAME, ThreadPoolType.DIRECT))); this.executors = Map.copyOf(executors); this.executors.forEach((k, v) -> instruments.put(k, setupMetrics(meterRegistry, k, v))); this.instruments = instruments; - final List infos = executors.values() - .stream() - .filter(holder -> holder.info.getName().equals("same") == false) - .map(holder -> holder.info) - .toList(); - this.threadPoolInfo = new ThreadPoolInfo(infos); + this.threadPoolInfo = new ThreadPoolInfo(executors.values().stream().map(holder -> holder.info).toList()); this.scheduler = Scheduler.initScheduler(settings, "scheduler"); this.slowSchedulerWarnThresholdNanos = SLOW_SCHEDULER_TASK_WARN_THRESHOLD_SETTING.get(settings).nanos(); this.cachedTimeThread = new CachedTimeThread( @@ -481,10 +478,6 @@ public ThreadPoolStats stats() { List stats = new ArrayList<>(); for (ExecutorHolder holder : executors.values()) { final String name = holder.info.getName(); - // no need to have info on "same" thread pool - if ("same".equals(name)) { - continue; - } int threads = -1; int queue = -1; int active = -1; diff --git a/server/src/test/java/org/elasticsearch/threadpool/ESThreadPoolTestCase.java b/server/src/test/java/org/elasticsearch/threadpool/ESThreadPoolTestCase.java index 40115e1402495..4f7d900f7cdb8 100644 --- a/server/src/test/java/org/elasticsearch/threadpool/ESThreadPoolTestCase.java +++ b/server/src/test/java/org/elasticsearch/threadpool/ESThreadPoolTestCase.java @@ -20,8 +20,7 @@ protected final ThreadPool.Info info(final ThreadPool threadPool, final String n return info; } } - assert "same".equals(name); - return null; + return fail(null, "unknown threadpool name: " + name); } protected final ThreadPoolStats.Stats stats(final ThreadPool threadPool, final String name) { @@ -30,10 +29,10 @@ protected final ThreadPoolStats.Stats stats(final ThreadPool threadPool, final S return stats; } } - throw new IllegalArgumentException(name); + return fail(null, "unknown threadpool name: " + name); } - protected final void terminateThreadPoolIfNeeded(final ThreadPool threadPool) throws InterruptedException { + protected final void terminateThreadPoolIfNeeded(final ThreadPool threadPool) { if (threadPool != null) { terminate(threadPool); } diff --git a/server/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java b/server/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java index 5644e0b613651..b68f3ef76bbac 100644 --- a/server/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java +++ b/server/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java @@ -29,7 +29,7 @@ public class UpdateThreadPoolSettingsTests extends ESThreadPoolTestCase { - public void testCorrectThreadPoolTypePermittedInSettings() throws InterruptedException { + public void testCorrectThreadPoolTypePermittedInSettings() { String threadPoolName = randomThreadPoolName(); ThreadPool.ThreadPoolType correctThreadPoolType = ThreadPool.THREAD_POOL_TYPES.get(threadPoolName); ThreadPool threadPool = null; @@ -41,13 +41,7 @@ public void testCorrectThreadPoolTypePermittedInSettings() throws InterruptedExc .build(), MeterRegistry.NOOP ); - ThreadPool.Info info = info(threadPool, threadPoolName); - if (ThreadPool.Names.SAME.equals(threadPoolName)) { - assertNull(info); // we don't report on the "same" thread pool - } else { - // otherwise check we have the expected type - assertEquals(info.getThreadPoolType(), correctThreadPoolType); - } + assertEquals(info(threadPool, threadPoolName).getThreadPoolType(), correctThreadPoolType); } finally { terminateThreadPoolIfNeeded(threadPool); } diff --git a/test/framework/src/main/java/org/elasticsearch/common/util/concurrent/DeterministicTaskQueue.java b/test/framework/src/main/java/org/elasticsearch/common/util/concurrent/DeterministicTaskQueue.java index e89a6c8a84bf7..1fac5a9917807 100644 --- a/test/framework/src/main/java/org/elasticsearch/common/util/concurrent/DeterministicTaskQueue.java +++ b/test/framework/src/main/java/org/elasticsearch/common/util/concurrent/DeterministicTaskQueue.java @@ -379,7 +379,7 @@ public ExecutorService generic() { @Override public ExecutorService executor(String name) { - return Names.SAME.equals(name) ? EsExecutors.DIRECT_EXECUTOR_SERVICE : forkingExecutor; + return forkingExecutor; } @Override diff --git a/test/framework/src/test/java/org/elasticsearch/common/util/concurrent/DeterministicTaskQueueTests.java b/test/framework/src/test/java/org/elasticsearch/common/util/concurrent/DeterministicTaskQueueTests.java index f4677dc603e64..0e79dfa6e1e79 100644 --- a/test/framework/src/test/java/org/elasticsearch/common/util/concurrent/DeterministicTaskQueueTests.java +++ b/test/framework/src/test/java/org/elasticsearch/common/util/concurrent/DeterministicTaskQueueTests.java @@ -443,20 +443,4 @@ public void testThreadPoolSchedulesPeriodicFutureTasks() { assertThat(strings, contains("periodic-0", "periodic-1", "periodic-2")); } - public void testSameExecutor() { - final DeterministicTaskQueue taskQueue = new DeterministicTaskQueue(); - final ThreadPool threadPool = taskQueue.getThreadPool(); - final AtomicBoolean executed = new AtomicBoolean(false); - final AtomicBoolean executedNested = new AtomicBoolean(false); - threadPool.generic().execute(() -> { - final var executor = threadPool.executor(ThreadPool.Names.SAME); - assertSame(EsExecutors.DIRECT_EXECUTOR_SERVICE, executor); - executor.execute(() -> assertTrue(executedNested.compareAndSet(false, true))); - assertThat(executedNested.get(), is(true)); - assertTrue(executed.compareAndSet(false, true)); - }); - taskQueue.runAllRunnableTasks(); - assertThat(executed.get(), is(true)); - } - } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlDailyMaintenanceServiceIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlDailyMaintenanceServiceIT.java index 20ca6d8847d79..4fe3ed61114c3 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlDailyMaintenanceServiceIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlDailyMaintenanceServiceIT.java @@ -9,7 +9,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.threadpool.ThreadPool; @@ -35,7 +34,6 @@ import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.is; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; public class MlDailyMaintenanceServiceIT extends MlNativeAutodetectIntegTestCase { @@ -46,7 +44,6 @@ public class MlDailyMaintenanceServiceIT extends MlNativeAutodetectIntegTestCase public void setUpMocks() { jobConfigProvider = new JobConfigProvider(client(), xContentRegistry()); threadPool = mock(ThreadPool.class); - when(threadPool.executor(ThreadPool.Names.SAME)).thenReturn(EsExecutors.DIRECT_EXECUTOR_SERVICE); } public void testTriggerDeleteJobsInStateDeletingWithoutDeletionTask() throws InterruptedException { diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlInitializationServiceIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlInitializationServiceIT.java index 0a7cee96df145..30f84a97bcfb0 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlInitializationServiceIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlInitializationServiceIT.java @@ -39,13 +39,11 @@ public class MlInitializationServiceIT extends MlNativeAutodetectIntegTestCase { - private ThreadPool threadPool; private MlInitializationService mlInitializationService; @Before public void setUpMocks() { - threadPool = mock(ThreadPool.class); - when(threadPool.executor(ThreadPool.Names.SAME)).thenReturn(EsExecutors.DIRECT_EXECUTOR_SERVICE); + final var threadPool = mock(ThreadPool.class); when(threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME)).thenReturn(EsExecutors.DIRECT_EXECUTOR_SERVICE); MlDailyMaintenanceService mlDailyMaintenanceService = mock(MlDailyMaintenanceService.class); ClusterService clusterService = mock(ClusterService.class); From 51aa92090dd01139f8071df2d8c175e50a2d9c0d Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Tue, 9 Apr 2024 12:04:43 +0200 Subject: [PATCH 191/264] ES|QL: More deterministic tests (#107248) --- .../elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java index ab288de4ad27d..5aa48234cb11a 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java @@ -478,7 +478,8 @@ public void testWarningHeadersOnFailedConversions() throws IOException { bulkLoadTestData(count); Request request = prepareRequest(SYNC); - var query = fromIndex() + " | eval asInt = to_int(case(integer % 2 == 0, to_str(integer), keyword)) | limit 1000"; + var query = fromIndex() + + " | sort integer asc | eval asInt = to_int(case(integer % 2 == 0, to_str(integer), keyword)) | limit 1000"; var mediaType = attachBody(new RequestObjectBuilder().query(query).build(), request); RequestOptions.Builder options = request.getOptions().toBuilder(); @@ -493,7 +494,7 @@ public void testWarningHeadersOnFailedConversions() throws IOException { int expectedWarnings = Math.min(count / 2, 20); var warnings = response.getWarnings(); assertThat(warnings.size(), is(1 + expectedWarnings)); - var firstHeader = "Line 1:36: evaluation of [to_int(case(integer %25 2 == 0, to_str(integer), keyword))] failed, " + var firstHeader = "Line 1:55: evaluation of [to_int(case(integer %25 2 == 0, to_str(integer), keyword))] failed, " + "treating result as null. Only first 20 failures recorded."; assertThat(warnings.get(0), containsString(firstHeader)); for (int i = 1; i <= expectedWarnings; i++) { From c1ef120fe211544c96a79b1540939780764460c7 Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Tue, 9 Apr 2024 14:15:54 +0200 Subject: [PATCH 192/264] [Docs][ESQL] Make functions reference more digestible (#107258) * [Docs][ESQL] Make functions reference more digestible * Remove redundant links --- .../esql/esql-functions-operators.asciidoc | 53 +++++++++++++++---- 1 file changed, 42 insertions(+), 11 deletions(-) diff --git a/docs/reference/esql/esql-functions-operators.asciidoc b/docs/reference/esql/esql-functions-operators.asciidoc index a1ad512fbe512..ddc077f3b8ff8 100644 --- a/docs/reference/esql/esql-functions-operators.asciidoc +++ b/docs/reference/esql/esql-functions-operators.asciidoc @@ -1,40 +1,71 @@ [[esql-functions-operators]] === {esql} functions and operators - ++++ Functions and operators ++++ {esql} provides a comprehensive set of functions and operators for working with data. -The functions are divided into the following categories: +The reference documentation is divided into the following categories: [[esql-functions]] -<>:: +==== Functions overview + +.*Aggregate functions* +[%collapsible] +==== include::functions/aggregation-functions.asciidoc[tag=agg_list] +==== -<>:: +.*Math functions* +[%collapsible] +==== include::functions/math-functions.asciidoc[tag=math_list] +==== -<>:: +.*String functions* +[%collapsible] +==== include::functions/string-functions.asciidoc[tag=string_list] +==== -<>:: +.*Date and time functions* +[%collapsible] +==== include::functions/date-time-functions.asciidoc[tag=date_list] +==== -<>:: +.*Spatial functions* +[%collapsible] +==== include::functions/spatial-functions.asciidoc[tag=spatial_list] +==== -<>:: +.*Type conversion functions* +[%collapsible] +==== include::functions/type-conversion-functions.asciidoc[tag=type_list] +==== -<>:: +.*Conditional functions and expressions* +[%collapsible] +==== include::functions/conditional-functions-and-expressions.asciidoc[tag=cond_list] +==== -<>:: +.*Multi value functions* +[%collapsible] +==== include::functions/mv-functions.asciidoc[tag=mv_list] +==== + +[[esql-operators-overview]] +==== Operators overview -<>:: +.*Operators* +[%collapsible] +==== include::functions/operators.asciidoc[tag=op_list] +==== include::functions/aggregation-functions.asciidoc[] include::functions/math-functions.asciidoc[] From 3dccc66c2cd79418c8473076ef3721ea3b8048eb Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Tue, 9 Apr 2024 14:16:24 +0200 Subject: [PATCH 193/264] [DOCS][ESQL] Rename _Learning ESQL_ to _ESQL reference_ (#107259) --- docs/reference/esql/esql-language.asciidoc | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/docs/reference/esql/esql-language.asciidoc b/docs/reference/esql/esql-language.asciidoc index e4c873457b21b..77f5e79753fdd 100644 --- a/docs/reference/esql/esql-language.asciidoc +++ b/docs/reference/esql/esql-language.asciidoc @@ -1,11 +1,10 @@ [[esql-language]] -== Learning {esql} - +== {esql} reference ++++ -Learning {esql} +{esql} reference ++++ -Detailed information about the {esql} language: +Detailed reference documentation for the {esql} language: * <> * <> From 73e8984164b7e25d2360dbe7f2eea4057b7513e1 Mon Sep 17 00:00:00 2001 From: Dianna Hohensee Date: Tue, 9 Apr 2024 08:27:47 -0400 Subject: [PATCH 194/264] Add documentation to thread pool and executor code (#106208) --- .../s3/RepositoryCredentialsTests.java | 2 +- .../repositories/s3/S3RepositoryTests.java | 2 +- .../repositories/url/URLRepositoryTests.java | 2 +- .../common/util/concurrent/EsExecutors.java | 3 ++ .../blobstore/BlobStoreRepository.java | 8 ++--- .../threadpool/FixedExecutorBuilder.java | 3 ++ .../threadpool/ScalingExecutorBuilder.java | 4 +++ .../elasticsearch/threadpool/ThreadPool.java | 36 +++++++++++++++++++ .../BlobStoreRepositoryRestoreTests.java | 2 +- .../snapshots/SnapshotResiliencyTests.java | 2 +- ...archableSnapshotsPrewarmingIntegTests.java | 4 +-- ...SnapshotRecoveryStateIntegrationTests.java | 2 +- .../SearchableSnapshotDirectoryTests.java | 2 +- 13 files changed, 59 insertions(+), 13 deletions(-) diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java index cf3bc21526bf6..13e582598a2d2 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java @@ -267,7 +267,7 @@ protected S3Repository createRepository( ) { return new S3Repository(metadata, registry, getService(), clusterService, bigArrays, recoverySettings, s3RepositoriesMetrics) { @Override - protected void assertSnapshotOrGenericThread() { + protected void assertSnapshotOrStatelessPermittedThreadPool() { // eliminate thread name check as we create repo manually on test/main threads } }; diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java index 50470ec499ef6..ff61504d6c525 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java @@ -132,7 +132,7 @@ private S3Repository createS3Repo(RepositoryMetadata metadata) { S3RepositoriesMetrics.NOOP ) { @Override - protected void assertSnapshotOrGenericThread() { + protected void assertSnapshotOrStatelessPermittedThreadPool() { // eliminate thread name check as we create repo manually on test/main threads } }; diff --git a/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/URLRepositoryTests.java b/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/URLRepositoryTests.java index 00abf1e77fd57..a02bff59988d8 100644 --- a/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/URLRepositoryTests.java +++ b/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/URLRepositoryTests.java @@ -43,7 +43,7 @@ private URLRepository createRepository(Settings baseSettings, RepositoryMetadata mock(URLHttpClient.Factory.class) ) { @Override - protected void assertSnapshotOrGenericThread() { + protected void assertSnapshotOrStatelessPermittedThreadPool() { // eliminate thread name check as we create repo manually on test/main threads } }; diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java index 5fcb4684d3f8d..14c1d1e9ef6aa 100644 --- a/server/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java +++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java @@ -33,6 +33,9 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; +/** + * A collection of static methods to help create different ES Executor types. + */ public class EsExecutors { // although the available processors may technically change, for node sizing we use the number available at launch diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 41e849b4d2ebd..5a33a958646df 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -680,7 +680,7 @@ protected BlobStore getBlobStore() { * maintains single lazy instance of {@link BlobContainer} */ protected BlobContainer blobContainer() { - assertSnapshotOrGenericThread(); + assertSnapshotOrStatelessPermittedThreadPool(); if (lifecycle.started() == false) { throw notStartedException(); @@ -705,7 +705,7 @@ protected BlobContainer blobContainer() { * Public for testing. */ public BlobStore blobStore() { - assertSnapshotOrGenericThread(); + assertSnapshotOrStatelessPermittedThreadPool(); BlobStore store = blobStore.get(); if (store == null) { @@ -1994,7 +1994,7 @@ public long getRestoreThrottleTimeInNanos() { return restoreRateLimitingTimeInNanos.count(); } - protected void assertSnapshotOrGenericThread() { + protected void assertSnapshotOrStatelessPermittedThreadPool() { // The Stateless plugin adds custom thread pools for object store operations assert ThreadPool.assertCurrentThreadPool( ThreadPool.Names.SNAPSHOT, @@ -3539,7 +3539,7 @@ public IndexShardSnapshotStatus.Copy getShardSnapshotStatus(SnapshotId snapshotI @Override public void verify(String seed, DiscoveryNode localNode) { - assertSnapshotOrGenericThread(); + assertSnapshotOrStatelessPermittedThreadPool(); if (isReadOnly()) { try { latestIndexBlobId(); diff --git a/server/src/main/java/org/elasticsearch/threadpool/FixedExecutorBuilder.java b/server/src/main/java/org/elasticsearch/threadpool/FixedExecutorBuilder.java index 9668228ac0ec3..544b085a7006d 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/FixedExecutorBuilder.java +++ b/server/src/main/java/org/elasticsearch/threadpool/FixedExecutorBuilder.java @@ -24,6 +24,9 @@ /** * A builder for fixed executors. + * + * Builds an Executor with a static number of threads, as opposed to {@link ScalingExecutorBuilder} that dynamically scales the number of + * threads in the pool up and down based on request load. */ public final class FixedExecutorBuilder extends ExecutorBuilder { diff --git a/server/src/main/java/org/elasticsearch/threadpool/ScalingExecutorBuilder.java b/server/src/main/java/org/elasticsearch/threadpool/ScalingExecutorBuilder.java index 07504bc5f9d2e..29a7d5df08b7b 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/ScalingExecutorBuilder.java +++ b/server/src/main/java/org/elasticsearch/threadpool/ScalingExecutorBuilder.java @@ -24,6 +24,10 @@ /** * A builder for scaling executors. + * + * The {@link #build} method will instantiate a java {@link ExecutorService} thread pool that starts with the specified minimum number of + * threads and then scales up to the specified max number of threads as needed for excess work, scaling back when the burst of activity + * stops. As opposed to the {@link FixedExecutorBuilder} that keeps a fixed number of threads alive. */ public final class ScalingExecutorBuilder extends ExecutorBuilder { diff --git a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java index 9679bc88319d0..ceda140827527 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java +++ b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java @@ -60,12 +60,28 @@ import static java.util.Map.entry; import static org.elasticsearch.core.Strings.format; +/** + * Manages all the Java thread pools we create. {@link Names} contains a list of the thread pools, but plugins can dynamically add more + * thread pools to instantiate. + */ public class ThreadPool implements ReportingService, Scheduler { private static final Logger logger = LogManager.getLogger(ThreadPool.class); + /** + * List of names that identify Java thread pools that are created in {@link ThreadPool#ThreadPool}. + */ public static class Names { + /** + * All the tasks that do not relate to the purpose of one of the other thread pools should use this thread pool. Try to pick one of + * the other more specific thread pools where possible. + */ public static final String GENERIC = "generic"; + /** + * Important management tasks that keep the cluster from falling apart. + * This thread pool ensures cluster coordination tasks do not get blocked by less critical tasks and can continue to make progress. + * This thread pool also defaults to a single thread, reducing contention on the Coordinator mutex. + */ public static final String CLUSTER_COORDINATION = "cluster_coordination"; public static final String GET = "get"; public static final String ANALYZE = "analyze"; @@ -75,6 +91,10 @@ public static class Names { public static final String SEARCH_COORDINATION = "search_coordination"; public static final String AUTO_COMPLETE = "auto_complete"; public static final String SEARCH_THROTTLED = "search_throttled"; + /** + * Cluster management tasks. Tasks that manage data, and tasks that report on cluster health via statistics etc. + * Not a latency sensitive thread pool: some tasks may time be long-running; and the thread pool size is limited / relatively small. + */ public static final String MANAGEMENT = "management"; public static final String FLUSH = "flush"; public static final String REFRESH = "refresh"; @@ -199,6 +219,13 @@ public Collection builders() { Setting.Property.NodeScope ); + /** + * Defines and builds the many thread pools delineated in {@link Names}. + * + * @param settings + * @param meterRegistry + * @param customBuilders a list of additional thread pool builders that were defined elsewhere (like a Plugin). + */ @SuppressWarnings({ "rawtypes", "unchecked" }) public ThreadPool(final Settings settings, MeterRegistry meterRegistry, final ExecutorBuilder... customBuilders) { assert Node.NODE_NAME_SETTING.exists(settings); @@ -327,6 +354,7 @@ public ThreadPool(final Settings settings, MeterRegistry meterRegistry, final Ex threadContext = new ThreadContext(settings); + // Now that all the thread pools have been defined, actually build them. final Map executors = new HashMap<>(); for (final Map.Entry entry : builders.entrySet()) { final ExecutorBuilder.ExecutorSettings executorSettings = entry.getValue().getSettings(settings); @@ -902,6 +930,11 @@ void check(long newAbsoluteMillis, long newRelativeNanos) { } } + /** + * Holds a thread pool and additional ES information ({@link Info}) about that Java thread pool ({@link ExecutorService}) instance. + * + * See {@link Names} for a list of thread pools, though there can be more dynamically added via plugins. + */ static class ExecutorHolder { private final ExecutorService executor; public final Info info; @@ -917,6 +950,9 @@ ExecutorService executor() { } } + /** + * The settings used to create a Java ExecutorService thread pool. + */ public static class Info implements Writeable, ToXContentFragment { private final String name; diff --git a/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java b/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java index 327dc3d4f5fd0..0e4818701c5f5 100644 --- a/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java +++ b/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java @@ -227,7 +227,7 @@ private Repository createRepository() { new RecoverySettings(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)) ) { @Override - protected void assertSnapshotOrGenericThread() { + protected void assertSnapshotOrStatelessPermittedThreadPool() { // eliminate thread name check as we create repo manually } }; diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index dafe994b502f0..185f4582e7377 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -2130,7 +2130,7 @@ public RecyclerBytesStreamOutput newNetworkBytesStream() { recoverySettings ) { @Override - protected void assertSnapshotOrGenericThread() { + protected void assertSnapshotOrStatelessPermittedThreadPool() { // eliminate thread name check as we create repo in the test thread } } diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/SearchableSnapshotsPrewarmingIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/SearchableSnapshotsPrewarmingIntegTests.java index 0cf6cb93c865b..207df0faddd07 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/SearchableSnapshotsPrewarmingIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/SearchableSnapshotsPrewarmingIntegTests.java @@ -449,9 +449,9 @@ public Map getRepositories( (metadata) -> new FsRepository(metadata, env, namedXContentRegistry, clusterService, bigArrays, recoverySettings) { @Override - protected void assertSnapshotOrGenericThread() { + protected void assertSnapshotOrStatelessPermittedThreadPool() { if (enabled.get()) { - super.assertSnapshotOrGenericThread(); + super.assertSnapshotOrStatelessPermittedThreadPool(); } } diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/recovery/SearchableSnapshotRecoveryStateIntegrationTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/recovery/SearchableSnapshotRecoveryStateIntegrationTests.java index 6800dea01863a..4a15d00bc8168 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/recovery/SearchableSnapshotRecoveryStateIntegrationTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/recovery/SearchableSnapshotRecoveryStateIntegrationTests.java @@ -248,7 +248,7 @@ public Map getRepositories( "test-fs", (metadata) -> new FsRepository(metadata, env, namedXContentRegistry, clusterService, bigArrays, recoverySettings) { @Override - protected void assertSnapshotOrGenericThread() { + protected void assertSnapshotOrStatelessPermittedThreadPool() { // ignore } } diff --git a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectoryTests.java b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectoryTests.java index 9c36d7b762871..c54ead2bdbc45 100644 --- a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectoryTests.java +++ b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectoryTests.java @@ -609,7 +609,7 @@ private void testDirectories( ) { @Override - protected void assertSnapshotOrGenericThread() { + protected void assertSnapshotOrStatelessPermittedThreadPool() { // eliminate thread name check as we create repo manually on test/main threads } }; From 9ae414ebda41f7a6aeb2fbbdbeb5965ec2616df0 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 9 Apr 2024 09:39:41 -0400 Subject: [PATCH 195/264] ESQL: Merge randomize a test attribute (#107239) In ESQL we test our functions with values read directly from a `Page` and values that are modified by some other operation - called `floating`. This caused us some trouble when we were working on reference counting but we've mostly got this handled now. Our tests were running lots of cases in "floating" and "non-floating" versions. Replaces three such cases with randomization - so we have a 50/50 shot of floting on each test. We want to do this because ESQL's builds generate a *ton* of tests. Enough to upset gradle enterprise. This change cuts the number of test cases we run from 227811 to 159271. About a 30% reduction in test cases. The actual runtime of the tests doesn't change a ton. These tests are all fairly fast. --- .../scalar/string/LocateNoStartEvaluator.java | 139 ++++++++++++++++++ .../function/AbstractFunctionTestCase.java | 81 ++-------- 2 files changed, 150 insertions(+), 70 deletions(-) create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateNoStartEvaluator.java diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateNoStartEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateNoStartEvaluator.java new file mode 100644 index 0000000000000..947b1ecb49d0c --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateNoStartEvaluator.java @@ -0,0 +1,139 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Locate}. + * This class is generated. Do not edit it. + */ +public final class LocateNoStartEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator str; + + private final EvalOperator.ExpressionEvaluator substr; + + private final DriverContext driverContext; + + public LocateNoStartEvaluator(Source source, EvalOperator.ExpressionEvaluator str, + EvalOperator.ExpressionEvaluator substr, DriverContext driverContext) { + this.warnings = new Warnings(source); + this.str = str; + this.substr = substr; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock strBlock = (BytesRefBlock) str.eval(page)) { + try (BytesRefBlock substrBlock = (BytesRefBlock) substr.eval(page)) { + BytesRefVector strVector = strBlock.asVector(); + if (strVector == null) { + return eval(page.getPositionCount(), strBlock, substrBlock); + } + BytesRefVector substrVector = substrBlock.asVector(); + if (substrVector == null) { + return eval(page.getPositionCount(), strBlock, substrBlock); + } + return eval(page.getPositionCount(), strVector, substrVector).asBlock(); + } + } + } + + public IntBlock eval(int positionCount, BytesRefBlock strBlock, BytesRefBlock substrBlock) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + BytesRef strScratch = new BytesRef(); + BytesRef substrScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (strBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (strBlock.getValueCount(p) != 1) { + if (strBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (substrBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (substrBlock.getValueCount(p) != 1) { + if (substrBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendInt(Locate.process(strBlock.getBytesRef(strBlock.getFirstValueIndex(p), strScratch), substrBlock.getBytesRef(substrBlock.getFirstValueIndex(p), substrScratch))); + } + return result.build(); + } + } + + public IntVector eval(int positionCount, BytesRefVector strVector, BytesRefVector substrVector) { + try(IntVector.Builder result = driverContext.blockFactory().newIntVectorBuilder(positionCount)) { + BytesRef strScratch = new BytesRef(); + BytesRef substrScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + result.appendInt(Locate.process(strVector.getBytesRef(p, strScratch), substrVector.getBytesRef(p, substrScratch))); + } + return result.build(); + } + } + + @Override + public String toString() { + return "LocateNoStartEvaluator[" + "str=" + str + ", substr=" + substr + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(str, substr); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory str; + + private final EvalOperator.ExpressionEvaluator.Factory substr; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory str, + EvalOperator.ExpressionEvaluator.Factory substr) { + this.source = source; + this.str = str; + this.substr = substr; + } + + @Override + public LocateNoStartEvaluator get(DriverContext context) { + return new LocateNoStartEvaluator(source, str.get(context), substr.get(context), context); + } + + @Override + public String toString() { + return "LocateNoStartEvaluator[" + "str=" + str + ", substr=" + substr + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 0772b03bf3210..12c141cc7c8a7 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -242,18 +242,11 @@ protected final void assertResolveTypeValid(Expression expression, DataType expe } public final void testEvaluate() { - testEvaluate(false); - } - - public final void testEvaluateFloating() { - testEvaluate(true); - } - - private void testEvaluate(boolean readFloating) { assumeTrue("All test data types must be representable in order to build fields", testCase.allTypesAreRepresentable()); logger.info( "Test Values: " + testCase.getData().stream().map(TestCaseSupplier.TypedData::toString).collect(Collectors.joining(",")) ); + boolean readFloating = randomBoolean(); Expression expression = readFloating ? buildDeepCopyOfFieldExpression(testCase) : buildFieldExpression(testCase); if (testCase.getExpectedTypeError() != null) { assertTrue("expected unresolved", expression.typeResolved().unresolved()); @@ -296,47 +289,27 @@ private Object toJavaObjectUnsignedLongAware(Block block, int position) { } /** - * Evaluates a {@link Block} of values, all copied from the input pattern, read directly from the page. + * Evaluates a {@link Block} of values, all copied from the input pattern.. *

    * Note that this'll sometimes be a {@link Vector} of values if the * input pattern contained only a single value. *

    */ public final void testEvaluateBlockWithoutNulls() { - testEvaluateBlock(driverContext().blockFactory(), driverContext(), false, false); - } - - /** - * Evaluates a {@link Block} of values, all copied from the input pattern, read from an intermediate operator. - *

    - * Note that this'll sometimes be a {@link Vector} of values if the - * input pattern contained only a single value. - *

    - */ - public final void testEvaluateBlockWithoutNullsFloating() { - testEvaluateBlock(driverContext().blockFactory(), driverContext(), false, true); + testEvaluateBlock(driverContext().blockFactory(), driverContext(), false); } /** * Evaluates a {@link Block} of values, all copied from the input pattern with - * some null values inserted between, read directly from the page. + * some null values inserted between. */ public final void testEvaluateBlockWithNulls() { - testEvaluateBlock(driverContext().blockFactory(), driverContext(), true, false); - } - - /** - * Evaluates a {@link Block} of values, all copied from the input pattern with - * some null values inserted between, read from an intermediate operator. - */ - public final void testEvaluateBlockWithNullsFloating() { - testEvaluateBlock(driverContext().blockFactory(), driverContext(), true, true); + testEvaluateBlock(driverContext().blockFactory(), driverContext(), true); } /** * Evaluates a {@link Block} of values, all copied from the input pattern, - * read directly from the {@link Page}, using the - * {@link CrankyCircuitBreakerService} which fails randomly. + * using the {@link CrankyCircuitBreakerService} which fails randomly. *

    * Note that this'll sometimes be a {@link Vector} of values if the * input pattern contained only a single value. @@ -345,25 +318,7 @@ public final void testEvaluateBlockWithNullsFloating() { public final void testCrankyEvaluateBlockWithoutNulls() { assumeTrue("sometimes the cranky breaker silences warnings, just skip these cases", testCase.getExpectedWarnings() == null); try { - testEvaluateBlock(driverContext().blockFactory(), crankyContext(), false, false); - } catch (CircuitBreakingException ex) { - assertThat(ex.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); - } - } - - /** - * Evaluates a {@link Block} of values, all copied from the input pattern, - * read from an intermediate operator, using the - * {@link CrankyCircuitBreakerService} which fails randomly. - *

    - * Note that this'll sometimes be a {@link Vector} of values if the - * input pattern contained only a single value. - *

    - */ - public final void testCrankyEvaluateBlockWithoutNullsFloating() { - assumeTrue("sometimes the cranky breaker silences warnings, just skip these cases", testCase.getExpectedWarnings() == null); - try { - testEvaluateBlock(driverContext().blockFactory(), crankyContext(), false, true); + testEvaluateBlock(driverContext().blockFactory(), crankyContext(), false); } catch (CircuitBreakingException ex) { assertThat(ex.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); } @@ -371,27 +326,12 @@ public final void testCrankyEvaluateBlockWithoutNullsFloating() { /** * Evaluates a {@link Block} of values, all copied from the input pattern with - * some null values inserted between, read directly from the page, - * using the {@link CrankyCircuitBreakerService} which fails randomly. + * some null values inserted between, using the {@link CrankyCircuitBreakerService} which fails randomly. */ public final void testCrankyEvaluateBlockWithNulls() { assumeTrue("sometimes the cranky breaker silences warnings, just skip these cases", testCase.getExpectedWarnings() == null); try { - testEvaluateBlock(driverContext().blockFactory(), crankyContext(), true, false); - } catch (CircuitBreakingException ex) { - assertThat(ex.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); - } - } - - /** - * Evaluates a {@link Block} of values, all copied from the input pattern with - * some null values inserted between, read from an intermediate operator, - * using the {@link CrankyCircuitBreakerService} which fails randomly. - */ - public final void testCrankyEvaluateBlockWithNullsFloating() { - assumeTrue("sometimes the cranky breaker silences warnings, just skip these cases", testCase.getExpectedWarnings() == null); - try { - testEvaluateBlock(driverContext().blockFactory(), crankyContext(), true, true); + testEvaluateBlock(driverContext().blockFactory(), crankyContext(), true); } catch (CircuitBreakingException ex) { assertThat(ex.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); } @@ -404,9 +344,10 @@ protected Matcher allNullsMatcher() { return nullValue(); } - private void testEvaluateBlock(BlockFactory inputBlockFactory, DriverContext context, boolean insertNulls, boolean readFloating) { + private void testEvaluateBlock(BlockFactory inputBlockFactory, DriverContext context, boolean insertNulls) { assumeTrue("can only run on representable types", testCase.allTypesAreRepresentable()); assumeTrue("must build evaluator to test sending it blocks", testCase.getExpectedTypeError() == null); + boolean readFloating = randomBoolean(); int positions = between(1, 1024); List data = testCase.getData(); Page onePositionPage = row(testCase.getDataValues()); From c50fcb95edc7ee9e9a687b56681098ec03518c4b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Tue, 9 Apr 2024 16:55:33 +0200 Subject: [PATCH 196/264] [DOCS] Expands the list of possible values of the result parameter of the bulk API. (#107265) --- docs/reference/docs/bulk.asciidoc | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/docs/reference/docs/bulk.asciidoc b/docs/reference/docs/bulk.asciidoc index a055c278b41d9..fc25e811807a9 100644 --- a/docs/reference/docs/bulk.asciidoc +++ b/docs/reference/docs/bulk.asciidoc @@ -414,9 +414,7 @@ This parameter is only returned for successful actions. `result`:: (string) Result of the operation. Successful values are `created`, `deleted`, and -`updated`. -+ -This parameter is only returned for successful operations. +`updated`. Other valid values are `noop` and `not found`. `_shards`:: (object) From 62f19e3a0c6aa313a68f75900aaa1427fc854197 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Tue, 9 Apr 2024 17:01:32 +0200 Subject: [PATCH 197/264] Disable dynamic pruning on unindexed fields. (#107194) In order to know whether it can apply dynamic pruning using the points index, Lucene simply looks at whether a field has points. Unfortunately, this doesn't work well with our support for archive indexes, where numeric/date fields report that they have points, but they only support metadata operations on these points (min/max values, doc count), with the goal of quickly filtering out such archive indexes during the `can_match` phase. In order to address this discrepancy, dynamic pruning is now disabled when mappings report that a field is not indexed. This works because archive indexes automatically set `index: false` to make sure that filters run on doc values and not points. However, this is not a great fix as this increases our reliance on disabling dynamic pruning, which is currently marked as deprecated and scheduled for removal in the next Lucene major. So we'll need to either add it back to Lucene or find another approach. Closes #107168 --- .../mapper/extras/ScaledFloatFieldMapper.java | 8 +- .../mapper/murmur3/Murmur3FieldMapper.java | 2 +- .../fielddata/BooleanScriptFieldData.java | 5 ++ .../index/fielddata/DateScriptFieldData.java | 5 ++ .../fielddata/DoubleScriptFieldData.java | 5 ++ .../fielddata/IndexNumericFieldData.java | 13 +++- .../index/fielddata/LongScriptFieldData.java | 5 ++ .../plain/SortedDoublesIndexFieldData.java | 17 ++++- .../plain/SortedNumericIndexFieldData.java | 26 +++++-- .../index/mapper/BooleanFieldMapper.java | 4 +- .../index/mapper/DateFieldMapper.java | 3 +- .../index/mapper/NumberFieldMapper.java | 74 +++++++++++++++---- .../index/mapper/SeqNoFieldMapper.java | 2 +- .../index/mapper/VersionFieldMapper.java | 2 +- .../index/mapper/DateFieldTypeTests.java | 3 +- .../functionscore/FunctionScoreTests.java | 5 ++ .../sampler/DiversifiedSamplerTests.java | 3 +- .../fetch/subphase/FetchFieldsPhaseTests.java | 2 +- .../unsignedlong/UnsignedLongFieldMapper.java | 5 +- .../UnsignedLongIndexFieldData.java | 10 ++- 20 files changed, 160 insertions(+), 39 deletions(-) diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java index e2b932b01a516..09507ae926f44 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java @@ -340,7 +340,8 @@ public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext valuesSourceType, (dv, n) -> { throw new UnsupportedOperationException(); - } + }, + isIndexed() ).build(cache, breakerService); return new ScaledFloatIndexFieldData(scaledValues, scalingFactor, ScaledFloatDocValuesField::new); }; @@ -608,6 +609,11 @@ protected boolean sortRequiresCustomComparator() { return true; } + @Override + protected boolean isIndexed() { + return false; // We don't know how to take advantage of the index with half floats anyway + } + @Override public NumericType getNumericType() { /* diff --git a/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java b/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java index 08a133bcb69c8..44f52105f64c9 100644 --- a/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java +++ b/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java @@ -80,7 +80,7 @@ public String typeName() { @Override public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { failIfNoDocValues(); - return new SortedNumericIndexFieldData.Builder(name(), NumericType.LONG, Murmur3DocValueField::new); + return new SortedNumericIndexFieldData.Builder(name(), NumericType.LONG, Murmur3DocValueField::new, isIndexed()); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/BooleanScriptFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/BooleanScriptFieldData.java index f3dcda813a39d..bc83f85edcf7d 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/BooleanScriptFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/BooleanScriptFieldData.java @@ -90,6 +90,11 @@ protected boolean sortRequiresCustomComparator() { return true; } + @Override + protected boolean isIndexed() { + return false; + } + public static class BooleanScriptLeafFieldData extends LeafLongFieldData { private final BooleanScriptDocValues booleanScriptDocValues; protected final ToScriptFieldFactory toScriptFieldFactory; diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/DateScriptFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/DateScriptFieldData.java index 1199583f89766..a9fdf72e23a31 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/DateScriptFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/DateScriptFieldData.java @@ -90,6 +90,11 @@ protected boolean sortRequiresCustomComparator() { return true; } + @Override + protected boolean isIndexed() { + return false; + } + public static class DateScriptLeafFieldData extends LeafLongFieldData { private final LongScriptDocValues longScriptDocValues; protected final ToScriptFieldFactory toScriptFieldFactory; diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/DoubleScriptFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/DoubleScriptFieldData.java index 9307233f99161..e08a62eee8fb0 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/DoubleScriptFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/DoubleScriptFieldData.java @@ -89,6 +89,11 @@ protected boolean sortRequiresCustomComparator() { return true; } + @Override + protected boolean isIndexed() { + return false; + } + public static class DoubleScriptLeafFieldData extends LeafDoubleFieldData { private final DoubleScriptDocValues doubleScriptDocValues; protected final ToScriptFieldFactory toScriptFieldFactory; diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/IndexNumericFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/IndexNumericFieldData.java index dce94649e0088..391e9e285807f 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/IndexNumericFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/IndexNumericFieldData.java @@ -121,6 +121,7 @@ public final SortField sortField( case LONG: case DOUBLE: // longs, doubles and dates use the same type for doc-values and points. + sortField.setOptimizeSortWithPoints(isIndexed()); break; default: @@ -132,12 +133,18 @@ public final SortField sortField( } /** - * Does {@link #sortField} require a custom comparator because of the way - * the data is stored in doc values ({@code true}) or are the docs values - * stored such that they can be sorted without decoding ({@code false}). + * Should sorting use a custom comparator source vs. rely on a Lucene {@link SortField}. Using a Lucene {@link SortField} when possible + * is important because index sorting cannot be configured with a custom comparator, and because it gives better performance by + * dynamically pruning irrelevant hits. On the other hand, Lucene {@link SortField}s are less flexible and make stronger assumptions + * about how the data is indexed. Therefore, they cannot be used in all cases. */ protected abstract boolean sortRequiresCustomComparator(); + /** + * Return true if, and only if the field is indexed with points that match the content of doc values. + */ + protected abstract boolean isIndexed(); + @Override public final SortField sortField(Object missingValue, MultiValueMode sortMode, Nested nested, boolean reverse) { return sortField(getNumericType(), missingValue, sortMode, nested, reverse); diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/LongScriptFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/LongScriptFieldData.java index 6be5eb9514918..85850b530a1de 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/LongScriptFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/LongScriptFieldData.java @@ -92,6 +92,11 @@ protected boolean sortRequiresCustomComparator() { return true; } + @Override + protected boolean isIndexed() { + return false; + } + public static class LongScriptLeafFieldData extends LeafLongFieldData { private final LongScriptDocValues longScriptDocValues; protected final ToScriptFieldFactory toScriptFieldFactory; diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/plain/SortedDoublesIndexFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/plain/SortedDoublesIndexFieldData.java index a1686344b9309..b7654dfa5569f 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/plain/SortedDoublesIndexFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/plain/SortedDoublesIndexFieldData.java @@ -42,22 +42,25 @@ public static class Builder implements IndexFieldData.Builder { private final NumericType numericType; private final ValuesSourceType valuesSourceType; protected final ToScriptFieldFactory toScriptFieldFactory; + private final boolean indexed; public Builder( String name, NumericType numericType, ValuesSourceType valuesSourceType, - ToScriptFieldFactory toScriptFieldFactory + ToScriptFieldFactory toScriptFieldFactory, + boolean indexed ) { this.name = name; this.numericType = numericType; this.valuesSourceType = valuesSourceType; this.toScriptFieldFactory = toScriptFieldFactory; + this.indexed = indexed; } @Override public SortedDoublesIndexFieldData build(IndexFieldDataCache cache, CircuitBreakerService breakerService) { - return new SortedDoublesIndexFieldData(name, numericType, valuesSourceType, toScriptFieldFactory); + return new SortedDoublesIndexFieldData(name, numericType, valuesSourceType, toScriptFieldFactory, indexed); } } @@ -65,18 +68,21 @@ public SortedDoublesIndexFieldData build(IndexFieldDataCache cache, CircuitBreak protected final String fieldName; protected final ValuesSourceType valuesSourceType; protected final ToScriptFieldFactory toScriptFieldFactory; + protected final boolean indexed; public SortedDoublesIndexFieldData( String fieldName, NumericType numericType, ValuesSourceType valuesSourceType, - ToScriptFieldFactory toScriptFieldFactory + ToScriptFieldFactory toScriptFieldFactory, + boolean indexed ) { this.fieldName = fieldName; this.numericType = Objects.requireNonNull(numericType); assert this.numericType.isFloatingPoint(); this.valuesSourceType = valuesSourceType; this.toScriptFieldFactory = toScriptFieldFactory; + this.indexed = indexed; } @Override @@ -94,6 +100,11 @@ protected boolean sortRequiresCustomComparator() { return numericType == NumericType.HALF_FLOAT; } + @Override + public boolean isIndexed() { + return indexed; + } + @Override public NumericType getNumericType() { return numericType; diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/plain/SortedNumericIndexFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/plain/SortedNumericIndexFieldData.java index c2507dd2470a5..9c871ac822625 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/plain/SortedNumericIndexFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/plain/SortedNumericIndexFieldData.java @@ -42,26 +42,34 @@ public static class Builder implements IndexFieldData.Builder { private final NumericType numericType; private final ValuesSourceType valuesSourceType; protected final ToScriptFieldFactory toScriptFieldFactory; + private final boolean indexed; - public Builder(String name, NumericType numericType, ToScriptFieldFactory toScriptFieldFactory) { - this(name, numericType, numericType.getValuesSourceType(), toScriptFieldFactory); + public Builder( + String name, + NumericType numericType, + ToScriptFieldFactory toScriptFieldFactory, + boolean indexed + ) { + this(name, numericType, numericType.getValuesSourceType(), toScriptFieldFactory, indexed); } public Builder( String name, NumericType numericType, ValuesSourceType valuesSourceType, - ToScriptFieldFactory toScriptFieldFactory + ToScriptFieldFactory toScriptFieldFactory, + boolean indexed ) { this.name = name; this.numericType = numericType; this.valuesSourceType = valuesSourceType; this.toScriptFieldFactory = toScriptFieldFactory; + this.indexed = indexed; } @Override public SortedNumericIndexFieldData build(IndexFieldDataCache cache, CircuitBreakerService breakerService) { - return new SortedNumericIndexFieldData(name, numericType, valuesSourceType, toScriptFieldFactory); + return new SortedNumericIndexFieldData(name, numericType, valuesSourceType, toScriptFieldFactory, indexed); } } @@ -69,18 +77,21 @@ public SortedNumericIndexFieldData build(IndexFieldDataCache cache, CircuitBreak protected final String fieldName; protected final ValuesSourceType valuesSourceType; protected final ToScriptFieldFactory toScriptFieldFactory; + protected final boolean indexed; public SortedNumericIndexFieldData( String fieldName, NumericType numericType, ValuesSourceType valuesSourceType, - ToScriptFieldFactory toScriptFieldFactory + ToScriptFieldFactory toScriptFieldFactory, + boolean indexed ) { this.fieldName = fieldName; this.numericType = Objects.requireNonNull(numericType); assert this.numericType.isFloatingPoint() == false; this.valuesSourceType = valuesSourceType; this.toScriptFieldFactory = toScriptFieldFactory; + this.indexed = indexed; } @Override @@ -98,6 +109,11 @@ protected boolean sortRequiresCustomComparator() { return false; } + @Override + public boolean isIndexed() { + return indexed; + } + @Override protected XFieldComparatorSource dateComparatorSource(Object missingValue, MultiValueMode sortMode, Nested nested) { if (numericType == NumericType.DATE_NANOSECONDS) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java index 968c48abc54d8..f07cd1cc32076 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java @@ -276,7 +276,9 @@ public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext } if ((operation == FielddataOperation.SEARCH || operation == FielddataOperation.SCRIPT) && hasDocValues()) { - return new SortedNumericIndexFieldData.Builder(name(), NumericType.BOOLEAN, BooleanDocValuesField::new); + // boolean fields are indexed, but not with points + boolean indexed = false; + return new SortedNumericIndexFieldData.Builder(name(), NumericType.BOOLEAN, BooleanDocValuesField::new, indexed); } if (operation == FielddataOperation.SCRIPT) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java index 1b926734c1713..3092ed1e827df 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java @@ -795,7 +795,8 @@ public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext return new SortedNumericIndexFieldData.Builder( name(), resolution.numericType(), - resolution.getDefaultToScriptFieldFactory() + resolution.getDefaultToScriptFieldFactory(), + isIndexed() ); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java index 1f7a3bf2106ae..ebb6672cbab18 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java @@ -403,8 +403,14 @@ public void addFields(LuceneDocument document, String name, Number value, boolea } @Override - public IndexFieldData.Builder getFieldDataBuilder(String name, ValuesSourceType valuesSourceType) { - return new SortedDoublesIndexFieldData.Builder(name, numericType(), valuesSourceType, HalfFloatDocValuesField::new); + public IndexFieldData.Builder getFieldDataBuilder(MappedFieldType ft, ValuesSourceType valuesSourceType) { + return new SortedDoublesIndexFieldData.Builder( + ft.name(), + numericType(), + valuesSourceType, + HalfFloatDocValuesField::new, + ft.isIndexed() + ); } @Override @@ -577,8 +583,14 @@ public void addFields(LuceneDocument document, String name, Number value, boolea } @Override - public IndexFieldData.Builder getFieldDataBuilder(String name, ValuesSourceType valuesSourceType) { - return new SortedDoublesIndexFieldData.Builder(name, numericType(), valuesSourceType, FloatDocValuesField::new); + public IndexFieldData.Builder getFieldDataBuilder(MappedFieldType ft, ValuesSourceType valuesSourceType) { + return new SortedDoublesIndexFieldData.Builder( + ft.name(), + numericType(), + valuesSourceType, + FloatDocValuesField::new, + ft.isIndexed() + ); } @Override @@ -717,8 +729,14 @@ public void addFields(LuceneDocument document, String name, Number value, boolea } @Override - public IndexFieldData.Builder getFieldDataBuilder(String name, ValuesSourceType valuesSourceType) { - return new SortedDoublesIndexFieldData.Builder(name, numericType(), valuesSourceType, DoubleDocValuesField::new); + public IndexFieldData.Builder getFieldDataBuilder(MappedFieldType ft, ValuesSourceType valuesSourceType) { + return new SortedDoublesIndexFieldData.Builder( + ft.name(), + numericType(), + valuesSourceType, + DoubleDocValuesField::new, + ft.isIndexed() + ); } @Override @@ -831,8 +849,14 @@ Number valueForSearch(Number value) { } @Override - public IndexFieldData.Builder getFieldDataBuilder(String name, ValuesSourceType valuesSourceType) { - return new SortedNumericIndexFieldData.Builder(name, numericType(), valuesSourceType, ByteDocValuesField::new); + public IndexFieldData.Builder getFieldDataBuilder(MappedFieldType ft, ValuesSourceType valuesSourceType) { + return new SortedNumericIndexFieldData.Builder( + ft.name(), + numericType(), + valuesSourceType, + ByteDocValuesField::new, + ft.isIndexed() + ); } @Override @@ -930,8 +954,14 @@ Number valueForSearch(Number value) { } @Override - public IndexFieldData.Builder getFieldDataBuilder(String name, ValuesSourceType valuesSourceType) { - return new SortedNumericIndexFieldData.Builder(name, numericType(), valuesSourceType, ShortDocValuesField::new); + public IndexFieldData.Builder getFieldDataBuilder(MappedFieldType ft, ValuesSourceType valuesSourceType) { + return new SortedNumericIndexFieldData.Builder( + ft.name(), + numericType(), + valuesSourceType, + ShortDocValuesField::new, + ft.isIndexed() + ); } @Override @@ -1097,8 +1127,14 @@ public void addFields(LuceneDocument document, String name, Number value, boolea } @Override - public IndexFieldData.Builder getFieldDataBuilder(String name, ValuesSourceType valuesSourceType) { - return new SortedNumericIndexFieldData.Builder(name, numericType(), valuesSourceType, IntegerDocValuesField::new); + public IndexFieldData.Builder getFieldDataBuilder(MappedFieldType ft, ValuesSourceType valuesSourceType) { + return new SortedNumericIndexFieldData.Builder( + ft.name(), + numericType(), + valuesSourceType, + IntegerDocValuesField::new, + ft.isIndexed() + ); } @Override @@ -1234,8 +1270,14 @@ public void addFields(LuceneDocument document, String name, Number value, boolea } @Override - public IndexFieldData.Builder getFieldDataBuilder(String name, ValuesSourceType valuesSourceType) { - return new SortedNumericIndexFieldData.Builder(name, numericType(), valuesSourceType, LongDocValuesField::new); + public IndexFieldData.Builder getFieldDataBuilder(MappedFieldType ft, ValuesSourceType valuesSourceType) { + return new SortedNumericIndexFieldData.Builder( + ft.name(), + numericType(), + valuesSourceType, + LongDocValuesField::new, + ft.isIndexed() + ); } @Override @@ -1494,7 +1536,7 @@ public static Query longRangeQuery( return builder.apply(l, u); } - public abstract IndexFieldData.Builder getFieldDataBuilder(String name, ValuesSourceType valuesSourceType); + public abstract IndexFieldData.Builder getFieldDataBuilder(MappedFieldType ft, ValuesSourceType valuesSourceType); public IndexFieldData.Builder getValueFetcherFieldDataBuilder( String name, @@ -1693,7 +1735,7 @@ public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext : type.numericType.getValuesSourceType(); if ((operation == FielddataOperation.SEARCH || operation == FielddataOperation.SCRIPT) && hasDocValues()) { - return type.getFieldDataBuilder(name(), valuesSourceType); + return type.getFieldDataBuilder(this, valuesSourceType); } if (operation == FielddataOperation.SCRIPT) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SeqNoFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/SeqNoFieldMapper.java index 2635c1c11be8e..a46a310d0770f 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SeqNoFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SeqNoFieldMapper.java @@ -216,7 +216,7 @@ public Query rangeQuery( @Override public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { failIfNoDocValues(); - return new SortedNumericIndexFieldData.Builder(name(), NumericType.LONG, SeqNoDocValuesField::new); + return new SortedNumericIndexFieldData.Builder(name(), NumericType.LONG, SeqNoDocValuesField::new, isIndexed()); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/VersionFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/VersionFieldMapper.java index 0d4f5562d3046..1d4f56b02ed74 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/VersionFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/VersionFieldMapper.java @@ -66,7 +66,7 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { @Override public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { failIfNoDocValues(); - return new SortedNumericIndexFieldData.Builder(name(), NumericType.LONG, VersionDocValuesField::new); + return new SortedNumericIndexFieldData.Builder(name(), NumericType.LONG, VersionDocValuesField::new, isIndexed()); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java index d83c75455292f..726ec8561535e 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java @@ -317,7 +317,8 @@ public void testDateNanoDocValues() throws IOException { "my_date", IndexNumericFieldData.NumericType.DATE_NANOSECONDS, CoreValuesSourceType.DATE, - DateNanosDocValuesField::new + DateNanosDocValuesField::new, + false ); // Read index and check the doc values DirectoryReader reader = DirectoryReader.open(w); diff --git a/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreTests.java b/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreTests.java index 20d5fdae5e4cf..f11d3f9b70d23 100644 --- a/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreTests.java @@ -242,6 +242,11 @@ public LeafNumericFieldData loadDirect(LeafReaderContext context) throws Excepti protected boolean sortRequiresCustomComparator() { return false; } + + @Override + protected boolean isIndexed() { + return false; + } } private static final ScoreFunction RANDOM_SCORE_FUNCTION = new RandomScoreFunction(0, 0, new IndexFieldDataStub()); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerTests.java index 6ac538f6c7ce9..96ad3cd5afb22 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerTests.java @@ -163,7 +163,8 @@ private void testCase( "price", IndexNumericFieldData.NumericType.DOUBLE, CoreValuesSourceType.NUMERIC, - (dv, n) -> new DelegateDocValuesField(new Doubles(new DoublesSupplier(dv)), n) + (dv, n) -> new DelegateDocValuesField(new Doubles(new DoublesSupplier(dv)), n), + false ); FunctionScoreQuery query = new FunctionScoreQuery( new MatchAllDocsQuery(), diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhaseTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhaseTests.java index a5371e7b0b00a..39e73837c83ea 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhaseTests.java @@ -65,7 +65,7 @@ public void testDocValueFetcher() throws IOException { when(fieldType.valueFetcher(any(), any())).thenReturn( new DocValueFetcher( DocValueFormat.RAW, - new SortedNumericIndexFieldData("field", IndexNumericFieldData.NumericType.LONG, CoreValuesSourceType.NUMERIC, null) + new SortedNumericIndexFieldData("field", IndexNumericFieldData.NumericType.LONG, CoreValuesSourceType.NUMERIC, null, false) ) ); when(sec.getFieldType(any())).thenReturn(fieldType); diff --git a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java index e0ce1f92b2a37..d30c249813cd2 100644 --- a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java +++ b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java @@ -362,9 +362,10 @@ public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext valuesSourceType, (dv, n) -> { throw new UnsupportedOperationException(); - } + }, + isIndexed() ).build(cache, breakerService); - return new UnsignedLongIndexFieldData(signedLongValues, UnsignedLongDocValuesField::new); + return new UnsignedLongIndexFieldData(signedLongValues, UnsignedLongDocValuesField::new, isIndexed()); }; } diff --git a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongIndexFieldData.java b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongIndexFieldData.java index 0a312933768fb..2f936531f8c72 100644 --- a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongIndexFieldData.java +++ b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongIndexFieldData.java @@ -17,13 +17,16 @@ public class UnsignedLongIndexFieldData extends IndexNumericFieldData { private final IndexNumericFieldData signedLongIFD; protected final ToScriptFieldFactory toScriptFieldFactory; + protected final boolean indexed; UnsignedLongIndexFieldData( IndexNumericFieldData signedLongFieldData, - ToScriptFieldFactory toScriptFieldFactory + ToScriptFieldFactory toScriptFieldFactory, + boolean indexed ) { this.signedLongIFD = signedLongFieldData; this.toScriptFieldFactory = toScriptFieldFactory; + this.indexed = indexed; } @Override @@ -51,6 +54,11 @@ protected boolean sortRequiresCustomComparator() { return false; } + @Override + protected boolean isIndexed() { + return indexed; + } + @Override public NumericType getNumericType() { return NumericType.LONG; From 3674a6f1f64a87f12283daffabe8b56a30824db2 Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Tue, 9 Apr 2024 17:10:58 +0200 Subject: [PATCH 198/264] ESQL: Commit LocateNoStartEvaluator (#107270) This is automatically generated and was created as part of #106899. From 4a5329d29b2720fe7c5b293607cf63bf592c6d9b Mon Sep 17 00:00:00 2001 From: Sean Story Date: Tue, 9 Apr 2024 10:21:06 -0500 Subject: [PATCH 199/264] typo: "not found" -> "not_found" (#107276) --- docs/reference/docs/bulk.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/docs/bulk.asciidoc b/docs/reference/docs/bulk.asciidoc index fc25e811807a9..1a32e64cedb1f 100644 --- a/docs/reference/docs/bulk.asciidoc +++ b/docs/reference/docs/bulk.asciidoc @@ -414,7 +414,7 @@ This parameter is only returned for successful actions. `result`:: (string) Result of the operation. Successful values are `created`, `deleted`, and -`updated`. Other valid values are `noop` and `not found`. +`updated`. Other valid values are `noop` and `not_found`. `_shards`:: (object) From 31c05e9528772731aa64c8558c25ab8ef1165d51 Mon Sep 17 00:00:00 2001 From: Andrei Stefan Date: Tue, 9 Apr 2024 18:57:34 +0300 Subject: [PATCH 200/264] ESQL: allow sorting by expressions and not only regular fields (#107158) * Support expressions in sort commands --- docs/changelog/107158.yaml | 5 + .../src/main/resources/eval.csv-spec | 66 +++++ .../src/main/resources/stats.csv-spec | 24 ++ .../esql/optimizer/LogicalPlanOptimizer.java | 33 ++- .../optimizer/LogicalPlanOptimizerTests.java | 250 ++++++++++++++++-- 5 files changed, 357 insertions(+), 21 deletions(-) create mode 100644 docs/changelog/107158.yaml diff --git a/docs/changelog/107158.yaml b/docs/changelog/107158.yaml new file mode 100644 index 0000000000000..9589fe7e7264b --- /dev/null +++ b/docs/changelog/107158.yaml @@ -0,0 +1,5 @@ +pr: 107158 +summary: "ESQL: allow sorting by expressions and not only regular fields" +area: ES|QL +type: feature +issues: [] diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec index 9b06e9a0a8b23..85b665d717449 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec @@ -424,3 +424,69 @@ emp_no:i -10002 -10003 ; + +sortExpression1#[skip:-8.13.99,reason:supported in 8.14] +FROM employees +| SORT emp_no + salary ASC +| EVAL emp_no = -emp_no +| LIMIT 10 +| EVAL sum = -emp_no + salary +| KEEP emp_no, salary, sum +; + + emp_no:i | salary:i | sum:i +-10015 |25324 |35339 +-10035 |25945 |35980 +-10092 |25976 |36068 +-10048 |26436 |36484 +-10057 |27215 |37272 +-10084 |28035 |38119 +-10026 |28336 |38362 +-10068 |28941 |39009 +-10060 |29175 |39235 +-10042 |30404 |40446 +; + +sortConcat1#[skip:-8.13.99,reason:supported in 8.14] +from employees +| sort concat(left(last_name, 1), left(first_name, 1)), salary desc +| keep first_name, last_name, salary +| eval ll = left(last_name, 1), lf = left(first_name, 1) +| limit 10 +; + + first_name:keyword | last_name:keyword | salary:integer|ll:keyword|lf:keyword +Mona |Azuma |46595 |A |M +Satosi |Awdeh |50249 |A |S +Brendon |Bernini |33370 |B |B +Breannda |Billingsley |29175 |B |B +Cristinel |Bouloucos |58715 |B |C +Charlene |Brattka |28941 |B |C +Margareta |Bierman |41933 |B |M +Mokhtar |Bernatsky |38992 |B |M +Parto |Bamford |61805 |B |P +Premal |Baek |52833 |B |P +; + +sortConcat2#[skip:-8.13.99,reason:supported in 8.14] +from employees +| eval ln = last_name, fn = first_name, concat = concat(left(last_name, 1), left(first_name, 1)) +| sort concat(left(ln, 1), left(fn, 1)), salary desc +| keep f*, l*, salary +| eval c = concat(left(last_name, 1), left(first_name, 1)) +| drop *name, lan* +| limit 10 +; + + fn:keyword | ln:keyword | salary:integer| c:keyword +Mona |Azuma |46595 |AM +Satosi |Awdeh |50249 |AS +Brendon |Bernini |33370 |BB +Breannda |Billingsley |29175 |BB +Cristinel |Bouloucos |58715 |BC +Charlene |Brattka |28941 |BC +Margareta |Bierman |41933 |BM +Mokhtar |Bernatsky |38992 |BM +Parto |Bamford |61805 |BP +Premal |Baek |52833 |BP +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index fb2d46baf27ff..867ff127c90e8 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -1585,3 +1585,27 @@ c:l | k1:i | languages:i 21 | 5 | 5 10 | null | null ; + +minWithSortExpression1#[skip:-8.13.99,reason:supported in 8.14] +FROM employees | STATS min = min(salary) by languages | SORT min + languages; + + min:i | languages:i +25324 |5 +25976 |1 +26436 |3 +27215 |4 +29175 |2 +28336 |null +; + +minWithSortExpression2#[skip:-8.13.99,reason:supported in 8.14] +FROM employees | STATS min = min(salary) by languages | SORT min + CASE(languages == 5, 655, languages); + + min:i | languages:i +25976 |1 +25324 |5 +26436 |3 +27215 |4 +29175 |2 +28336 |null +; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java index 7fb2784bb044f..2aaf34a1dd1d8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java @@ -84,6 +84,7 @@ import static java.util.Arrays.asList; import static java.util.Collections.singleton; import static org.elasticsearch.xpack.esql.expression.NamedExpressions.mergeOutputExpressions; +import static org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizer.SubstituteSurrogates.rawTemporaryName; import static org.elasticsearch.xpack.ql.expression.Expressions.asAttributes; import static org.elasticsearch.xpack.ql.optimizer.OptimizerRules.TransformDirection; import static org.elasticsearch.xpack.ql.optimizer.OptimizerRules.TransformDirection.DOWN; @@ -125,7 +126,8 @@ protected static Batch substitutions() { new ReplaceRegexMatch(), new ReplaceAliasingEvalWithProject(), new SkipQueryOnEmptyMappings(), - new SubstituteSpatialSurrogates() + new SubstituteSpatialSurrogates(), + new ReplaceOrderByExpressionWithEval() // new NormalizeAggregate(), - waits on https://github.com/elastic/elasticsearch/issues/100634 ); } @@ -321,6 +323,35 @@ protected SpatialRelatesFunction rule(SpatialRelatesFunction function) { } } + static class ReplaceOrderByExpressionWithEval extends OptimizerRules.OptimizerRule { + private static int counter = 0; + + @Override + protected LogicalPlan rule(OrderBy orderBy) { + int size = orderBy.order().size(); + List evals = new ArrayList<>(size); + List newOrders = new ArrayList<>(size); + + for (int i = 0; i < size; i++) { + var order = orderBy.order().get(i); + if (order.child() instanceof Attribute == false) { + var name = rawTemporaryName("order_by", String.valueOf(i), String.valueOf(counter++)); + var eval = new Alias(order.child().source(), name, order.child()); + newOrders.add(order.replaceChildren(List.of(eval.toAttribute()))); + evals.add(eval); + } else { + newOrders.add(order); + } + } + if (evals.isEmpty()) { + return orderBy; + } else { + var newOrderBy = new OrderBy(orderBy.source(), new Eval(orderBy.source(), orderBy.child(), evals), newOrders); + return new Project(orderBy.source(), newOrderBy, orderBy.output()); + } + } + } + static class ConvertStringToByteRef extends OptimizerRules.OptimizerExpressionRule { ConvertStringToByteRef() { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index eb3901f37b99a..a60999baba9fe 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -149,6 +149,7 @@ import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; import static org.elasticsearch.xpack.ql.type.DataTypes.VERSION; +import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; @@ -156,6 +157,7 @@ import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.emptyArray; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; @@ -3832,12 +3834,11 @@ public void testNoWrongIsNotNullPruning() { * * For DISSECT expects the following; the others are similar. * - * EsqlProject[[first_name{f}#37, emp_no{r}#33, salary{r}#34]] - * \_TopN[[Order[$$emp_no$temp_name$36{r}#46 + $$salary$temp_name$41{r}#47 * 13[INTEGER],ASC,LAST], Order[NEG($$salary$t - * emp_name$41{r}#47),DESC,FIRST]],3[INTEGER]] - * \_Dissect[first_name{f}#37,Parser[pattern=%{emp_no} %{salary}, appendSeparator=, parser=org.elasticsearch.dissect.Dissect - * Parser@b6858b],[emp_no{r}#33, salary{r}#34]] - * \_Eval[[emp_no{f}#36 AS $$emp_no$temp_name$36, salary{f}#41 AS $$salary$temp_name$41]] + * Project[[first_name{f}#37, emp_no{r}#30, salary{r}#31]] + * \_TopN[[Order[$$order_by$temp_name$0{r}#46,ASC,LAST], Order[$$order_by$temp_name$1{r}#47,DESC,FIRST]],3[INTEGER]] + * \_Dissect[first_name{f}#37,Parser[pattern=%{emp_no} %{salary}, appendSeparator=, + * parser=org.elasticsearch.dissect.DissectParser@87f460f],[emp_no{r}#30, salary{r}#31]] + * \_Eval[[emp_no{f}#36 + salary{f}#41 * 13[INTEGER] AS $$order_by$temp_name$0, NEG(salary{f}#41) AS $$order_by$temp_name$1]] * \_EsRelation[test][_meta_field{f}#42, emp_no{f}#36, first_name{f}#37, ..] */ public void testPushdownWithOverwrittenName() { @@ -3850,7 +3851,7 @@ public void testPushdownWithOverwrittenName() { String queryTemplateKeepAfter = """ FROM test - | SORT 13*(emp_no+salary) ASC, -salary DESC + | SORT emp_no ASC nulls first, salary DESC nulls last, emp_no | {} | KEEP first_name, emp_no, salary | LIMIT 3 @@ -3859,7 +3860,7 @@ public void testPushdownWithOverwrittenName() { String queryTemplateKeepFirst = """ FROM test | KEEP emp_no, salary, first_name - | SORT 13*(emp_no+salary) ASC, -salary DESC + | SORT emp_no ASC nulls first, salary DESC nulls last, emp_no | {} | LIMIT 3 """; @@ -3876,20 +3877,27 @@ public void testPushdownWithOverwrittenName() { assertThat(projections.get(2).name(), equalTo("salary")); var topN = as(project.child(), TopN.class); - assertThat(topN.order().size(), is(2)); + assertThat(topN.order().size(), is(3)); - var firstOrderExpr = as(topN.order().get(0), Order.class); - var mul = as(firstOrderExpr.child(), Mul.class); - var add = as(mul.left(), Add.class); - var renamed_emp_no = as(add.left(), ReferenceAttribute.class); - var renamed_salary = as(add.right(), ReferenceAttribute.class); + var firstOrder = as(topN.order().get(0), Order.class); + assertThat(firstOrder.direction(), equalTo(org.elasticsearch.xpack.ql.expression.Order.OrderDirection.ASC)); + assertThat(firstOrder.nullsPosition(), equalTo(org.elasticsearch.xpack.ql.expression.Order.NullsPosition.FIRST)); + var renamed_emp_no = as(firstOrder.child(), ReferenceAttribute.class); assertThat(renamed_emp_no.toString(), startsWith("$$emp_no$temp_name")); + + var secondOrder = as(topN.order().get(1), Order.class); + assertThat(secondOrder.direction(), equalTo(org.elasticsearch.xpack.ql.expression.Order.OrderDirection.DESC)); + assertThat(secondOrder.nullsPosition(), equalTo(org.elasticsearch.xpack.ql.expression.Order.NullsPosition.LAST)); + var renamed_salary = as(secondOrder.child(), ReferenceAttribute.class); assertThat(renamed_salary.toString(), startsWith("$$salary$temp_name")); - var secondOrderExpr = as(topN.order().get(1), Order.class); - var neg = as(secondOrderExpr.child(), Neg.class); - var renamed_salary2 = as(neg.field(), ReferenceAttribute.class); - assert (renamed_salary2.semanticEquals(renamed_salary) && renamed_salary2.equals(renamed_salary)); + var thirdOrder = as(topN.order().get(2), Order.class); + assertThat(thirdOrder.direction(), equalTo(org.elasticsearch.xpack.ql.expression.Order.OrderDirection.ASC)); + assertThat(thirdOrder.nullsPosition(), equalTo(org.elasticsearch.xpack.ql.expression.Order.NullsPosition.LAST)); + var renamed_emp_no2 = as(thirdOrder.child(), ReferenceAttribute.class); + assertThat(renamed_emp_no2.toString(), startsWith("$$emp_no$temp_name")); + + assert (renamed_emp_no2.semanticEquals(renamed_emp_no) && renamed_emp_no2.equals(renamed_emp_no)); Eval renamingEval = null; if (overwritingCommand.startsWith("EVAL")) { @@ -3913,8 +3921,210 @@ public void testPushdownWithOverwrittenName() { for (Alias field : renamingEval.fields()) { attributesCreatedInEval.add(field.toAttribute()); } - assert (attributesCreatedInEval.contains(renamed_emp_no)); - assert (attributesCreatedInEval.contains(renamed_salary)); + assertThat(attributesCreatedInEval, allOf(hasItem(renamed_emp_no), hasItem(renamed_salary), hasItem(renamed_emp_no2))); + + assertThat(renamingEval.fields().size(), anyOf(equalTo(2), equalTo(4))); // 4 for EVAL, 3 for the other overwritingCommands + // emp_no ASC nulls first + Alias empNoAsc = renamingEval.fields().get(0); + assertThat(empNoAsc.toAttribute(), equalTo(renamed_emp_no)); + var emp_no = as(empNoAsc.child(), FieldAttribute.class); + assertThat(emp_no.name(), equalTo("emp_no")); + + // salary DESC nulls last + Alias salaryDesc = renamingEval.fields().get(1); + assertThat(salaryDesc.toAttribute(), equalTo(renamed_salary)); + var salary_desc = as(salaryDesc.child(), FieldAttribute.class); + assertThat(salary_desc.name(), equalTo("salary")); + + assertThat(renamingEval.child(), instanceOf(EsRelation.class)); + } + } + + /** + * Expects + * Project[[min{r}#4, languages{f}#11]] + * \_TopN[[Order[$$order_by$temp_name$0{r}#18,ASC,LAST]],1000[INTEGER]] + * \_Eval[[min{r}#4 + languages{f}#11 AS $$order_by$temp_name$0]] + * \_Aggregate[[languages{f}#11],[MIN(salary{f}#13) AS min, languages{f}#11]] + * \_EsRelation[test][_meta_field{f}#14, emp_no{f}#8, first_name{f}#9, ge..] + */ + public void testReplaceSortByExpressionsWithStats() { + var plan = optimizedPlan(""" + from test + | stats min = min(salary) by languages + | sort min + languages + """); + + var project = as(plan, Project.class); + assertThat(Expressions.names(project.projections()), contains("min", "languages")); + var topN = as(project.child(), TopN.class); + assertThat(topN.order().size(), is(1)); + + var order = as(topN.order().get(0), Order.class); + assertThat(order.direction(), equalTo(org.elasticsearch.xpack.ql.expression.Order.OrderDirection.ASC)); + assertThat(order.nullsPosition(), equalTo(org.elasticsearch.xpack.ql.expression.Order.NullsPosition.LAST)); + var expression = as(order.child(), ReferenceAttribute.class); + assertThat(expression.toString(), startsWith("$$order_by$0$")); + + var eval = as(topN.child(), Eval.class); + var fields = eval.fields(); + assertThat(Expressions.attribute(fields.get(0)), is(Expressions.attribute(expression))); + var aggregate = as(eval.child(), Aggregate.class); + var aggregates = aggregate.aggregates(); + assertThat(Expressions.names(aggregates), contains("min", "languages")); + var unwrapped = Alias.unwrap(aggregates.get(0)); + var min = as(unwrapped, Min.class); + as(aggregate.child(), EsRelation.class); + } + + /** + * Expects + * + * Project[[salary{f}#19, languages{f}#17, emp_no{f}#14]] + * \_TopN[[Order[$$order_by$0$0{r}#24,ASC,LAST], Order[emp_no{f}#14,DESC,FIRST]],1000[INTEGER]] + * \_Eval[[salary{f}#19 / 10000[INTEGER] + languages{f}#17 AS $$order_by$0$0]] + * \_EsRelation[test][_meta_field{f}#20, emp_no{f}#14, first_name{f}#15, ..] + */ + public void testReplaceSortByExpressionsMultipleSorts() { + var plan = optimizedPlan(""" + from test + | sort salary/10000 + languages, emp_no desc + | eval d = emp_no + | sort salary/10000 + languages, d desc + | keep salary, languages, emp_no + """); + + var project = as(plan, Project.class); + assertThat(Expressions.names(project.projections()), contains("salary", "languages", "emp_no")); + var topN = as(project.child(), TopN.class); + assertThat(topN.order().size(), is(2)); + + var order = as(topN.order().get(0), Order.class); + assertThat(order.direction(), equalTo(org.elasticsearch.xpack.ql.expression.Order.OrderDirection.ASC)); + assertThat(order.nullsPosition(), equalTo(org.elasticsearch.xpack.ql.expression.Order.NullsPosition.LAST)); + ReferenceAttribute expression = as(order.child(), ReferenceAttribute.class); + assertThat(expression.toString(), startsWith("$$order_by$0$")); + + order = as(topN.order().get(1), Order.class); + assertThat(order.direction(), equalTo(org.elasticsearch.xpack.ql.expression.Order.OrderDirection.DESC)); + assertThat(order.nullsPosition(), equalTo(org.elasticsearch.xpack.ql.expression.Order.NullsPosition.FIRST)); + FieldAttribute empNo = as(order.child(), FieldAttribute.class); + assertThat(empNo.name(), equalTo("emp_no")); + + var eval = as(topN.child(), Eval.class); + var fields = eval.fields(); + assertThat(fields.size(), equalTo(1)); + assertThat(Expressions.attribute(fields.get(0)), is(Expressions.attribute(expression))); + Alias salaryAddLanguages = eval.fields().get(0); + var add = as(salaryAddLanguages.child(), Add.class); + var div = as(add.left(), Div.class); + var salary = as(div.left(), FieldAttribute.class); + assertThat(salary.name(), equalTo("salary")); + var _10000 = as(div.right(), Literal.class); + assertThat(_10000.value(), equalTo(10000)); + var languages = as(add.right(), FieldAttribute.class); + assertThat(languages.name(), equalTo("languages")); + + as(eval.child(), EsRelation.class); + } + + /** + * For DISSECT expects the following; the others are similar. + * + * Project[[first_name{f}#37, emp_no{r}#30, salary{r}#31]] + * \_TopN[[Order[$$order_by$temp_name$0{r}#46,ASC,LAST], Order[$$order_by$temp_name$1{r}#47,DESC,FIRST]],3[INTEGER]] + * \_Dissect[first_name{f}#37,Parser[pattern=%{emp_no} %{salary}, appendSeparator=, + * parser=org.elasticsearch.dissect.DissectParser@87f460f],[emp_no{r}#30, salary{r}#31]] + * \_Eval[[emp_no{f}#36 + salary{f}#41 * 13[INTEGER] AS $$order_by$temp_name$0, NEG(salary{f}#41) AS $$order_by$temp_name$1]] + * \_EsRelation[test][_meta_field{f}#42, emp_no{f}#36, first_name{f}#37, ..] + */ + public void testReplaceSortByExpressions() { + List overwritingCommands = List.of( + "EVAL emp_no = 3*emp_no, salary = -2*emp_no-salary", + "DISSECT first_name \"%{emp_no} %{salary}\"", + "GROK first_name \"%{WORD:emp_no} %{WORD:salary}\"", + "ENRICH languages_idx ON first_name WITH emp_no = language_code, salary = language_code" + ); + + String queryTemplateKeepAfter = """ + FROM test + | SORT 13*(emp_no+salary) ASC, -salary DESC + | {} + | KEEP first_name, emp_no, salary + | LIMIT 3 + """; + // Equivalent but with KEEP first - ensures that attributes in the final projection are correct after pushdown rules were applied. + String queryTemplateKeepFirst = """ + FROM test + | KEEP emp_no, salary, first_name + | SORT 13*(emp_no+salary) ASC, -salary DESC + | {} + | LIMIT 3 + """; + + for (String overwritingCommand : overwritingCommands) { + String queryTemplate = randomBoolean() ? queryTemplateKeepFirst : queryTemplateKeepAfter; + var plan = optimizedPlan(LoggerMessageFormat.format(null, queryTemplate, overwritingCommand)); + + var project = as(plan, Project.class); + var projections = project.projections(); + assertThat(projections.size(), equalTo(3)); + assertThat(projections.get(0).name(), equalTo("first_name")); + assertThat(projections.get(1).name(), equalTo("emp_no")); + assertThat(projections.get(2).name(), equalTo("salary")); + + var topN = as(project.child(), TopN.class); + assertThat(topN.order().size(), is(2)); + + var firstOrderExpr = as(topN.order().get(0), Order.class); + assertThat(firstOrderExpr.direction(), equalTo(org.elasticsearch.xpack.ql.expression.Order.OrderDirection.ASC)); + assertThat(firstOrderExpr.nullsPosition(), equalTo(org.elasticsearch.xpack.ql.expression.Order.NullsPosition.LAST)); + var renamedEmpNoSalaryExpression = as(firstOrderExpr.child(), ReferenceAttribute.class); + assertThat(renamedEmpNoSalaryExpression.toString(), startsWith("$$order_by$0$")); + + var secondOrderExpr = as(topN.order().get(1), Order.class); + assertThat(secondOrderExpr.direction(), equalTo(org.elasticsearch.xpack.ql.expression.Order.OrderDirection.DESC)); + assertThat(secondOrderExpr.nullsPosition(), equalTo(org.elasticsearch.xpack.ql.expression.Order.NullsPosition.FIRST)); + var renamedNegatedSalaryExpression = as(secondOrderExpr.child(), ReferenceAttribute.class); + assertThat(renamedNegatedSalaryExpression.toString(), startsWith("$$order_by$1$")); + + Eval renamingEval = null; + if (overwritingCommand.startsWith("EVAL")) { + // Multiple EVALs should be merged, so there's only one. + renamingEval = as(topN.child(), Eval.class); + } + if (overwritingCommand.startsWith("DISSECT")) { + var dissect = as(topN.child(), Dissect.class); + renamingEval = as(dissect.child(), Eval.class); + } + if (overwritingCommand.startsWith("GROK")) { + var grok = as(topN.child(), Grok.class); + renamingEval = as(grok.child(), Eval.class); + } + if (overwritingCommand.startsWith("ENRICH")) { + var enrich = as(topN.child(), Enrich.class); + renamingEval = as(enrich.child(), Eval.class); + } + + assertThat(renamingEval.fields().size(), anyOf(equalTo(2), equalTo(4))); // 4 for EVAL, 2 for the other overwritingCommands + + // 13*(emp_no+salary) + Alias _13empNoSalary = renamingEval.fields().get(0); + assertThat(_13empNoSalary.toAttribute(), equalTo(renamedEmpNoSalaryExpression)); + var mul = as(_13empNoSalary.child(), Mul.class); + var add = as(mul.left(), Add.class); + var emp_no = as(add.left(), FieldAttribute.class); + assertThat(emp_no.name(), equalTo("emp_no")); + var salary = as(add.right(), FieldAttribute.class); + assertThat(salary.name(), equalTo("salary")); + var _13 = as(mul.right(), Literal.class); + assertThat(_13.value(), equalTo(13)); + + // -salary + Alias negatedSalary = renamingEval.fields().get(1); + assertThat(negatedSalary.toAttribute(), equalTo(renamedNegatedSalaryExpression)); + var neg = as(negatedSalary.child(), Neg.class); + assertThat(neg.field(), equalTo(salary)); assertThat(renamingEval.child(), instanceOf(EsRelation.class)); } From aba75664090a808e90148d19fc2def97560ef2d4 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 9 Apr 2024 12:22:15 -0400 Subject: [PATCH 201/264] ESQL: Better tests to AUTO_BUCKET (#107228) This improves the tests for AUTO_BUCKET marginally, specifically so that it tests all valid combinations of arguments and generates a correct types table. This'll combine nicely with #106782 to generate the signatures that kibana needs for it's editor. --- .../esql/functions/types/auto_bucket.asciidoc | 37 +++- .../src/main/resources/meta.csv-spec | 4 +- .../function/scalar/math/AutoBucket.java | 4 +- .../function/AbstractFunctionTestCase.java | 48 +++-- .../expression/function/TestCaseSupplier.java | 8 + .../function/scalar/math/AutoBucketTests.java | 179 ++++++++++-------- 6 files changed, 180 insertions(+), 100 deletions(-) diff --git a/docs/reference/esql/functions/types/auto_bucket.asciidoc b/docs/reference/esql/functions/types/auto_bucket.asciidoc index 535e2df29c353..cfe74ae25c3d0 100644 --- a/docs/reference/esql/functions/types/auto_bucket.asciidoc +++ b/docs/reference/esql/functions/types/auto_bucket.asciidoc @@ -5,5 +5,40 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== field | buckets | from | to | result - +datetime | integer | datetime | datetime | datetime +datetime | integer | datetime | keyword | datetime +datetime | integer | datetime | text | datetime +datetime | integer | keyword | datetime | datetime +datetime | integer | keyword | keyword | datetime +datetime | integer | keyword | text | datetime +datetime | integer | text | datetime | datetime +datetime | integer | text | keyword | datetime +datetime | integer | text | text | datetime +double | integer | double | double | double +double | integer | double | integer | double +double | integer | double | long | double +double | integer | integer | double | double +double | integer | integer | integer | double +double | integer | integer | long | double +double | integer | long | double | double +double | integer | long | integer | double +double | integer | long | long | double +integer | integer | double | double | double +integer | integer | double | integer | double +integer | integer | double | long | double +integer | integer | integer | double | double +integer | integer | integer | integer | double +integer | integer | integer | long | double +integer | integer | long | double | double +integer | integer | long | integer | double +integer | integer | long | long | double +long | integer | double | double | double +long | integer | double | integer | double +long | integer | double | long | double +long | integer | integer | double | double +long | integer | integer | integer | double +long | integer | integer | long | double +long | integer | long | double | double +long | integer | long | integer | double +long | integer | long | long | double |=== diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec index d344b50c0364f..492da4ee5ef36 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -7,7 +7,7 @@ synopsis:keyword "double asin(number:double|integer|long|unsigned_long)" "double atan(number:double|integer|long|unsigned_long)" "double atan2(y_coordinate:double|integer|long|unsigned_long, x_coordinate:double|integer|long|unsigned_long)" -"double|date auto_bucket(field:integer|long|double|date, buckets:integer, from:integer|long|double|date|string, to:integer|long|double|date|string)" +"double|date auto_bucket(field:integer|long|double|date, buckets:integer, from:integer|long|double|date|keyword|text, to:integer|long|double|date|keyword|text)" "double avg(number:double|integer|long)" "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version case(condition:boolean, trueValue...:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" "double|integer|long|unsigned_long ceil(number:double|integer|long|unsigned_long)" @@ -117,7 +117,7 @@ acos |number |"double|integer|long|unsigne asin |number |"double|integer|long|unsigned_long" |Number between -1 and 1. If `null`, the function returns `null`. atan |number |"double|integer|long|unsigned_long" |Numeric expression. If `null`, the function returns `null`. atan2 |[y_coordinate, x_coordinate] |["double|integer|long|unsigned_long", "double|integer|long|unsigned_long"] |[y coordinate. If `null`\, the function returns `null`., x coordinate. If `null`\, the function returns `null`.] -auto_bucket |[field, buckets, from, to] |["integer|long|double|date", integer, "integer|long|double|date|string", "integer|long|double|date|string"] |["", "", "", ""] +auto_bucket |[field, buckets, from, to] |["integer|long|double|date", integer, "integer|long|double|date|keyword|text", "integer|long|double|date|keyword|text"] |["", "", "", ""] avg |number |"double|integer|long" |[""] case |[condition, trueValue] |[boolean, "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version"] |["", ""] ceil |number |"double|integer|long|unsigned_long" |Numeric expression. If `null`, the function returns `null`. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java index b9aeff7f1d935..ea581437f6c4f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java @@ -90,8 +90,8 @@ public AutoBucket( Source source, @Param(name = "field", type = { "integer", "long", "double", "date" }) Expression field, @Param(name = "buckets", type = { "integer" }) Expression buckets, - @Param(name = "from", type = { "integer", "long", "double", "date", "string" }) Expression from, - @Param(name = "to", type = { "integer", "long", "double", "date", "string" }) Expression to + @Param(name = "from", type = { "integer", "long", "double", "date", "keyword", "text" }) Expression from, + @Param(name = "to", type = { "integer", "long", "double", "date", "keyword", "text" }) Expression to ) { super(source, List.of(field, buckets, from, to)); this.field = field; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 12c141cc7c8a7..889dfbf4c9b17 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -214,7 +214,10 @@ public static ExpressionEvaluator.Factory evaluator(Expression e) { } Layout.Builder builder = new Layout.Builder(); buildLayout(builder, e); - assertTrue(e.resolved()); + Expression.TypeResolution resolution = e.typeResolved(); + if (resolution.unresolved()) { + throw new AssertionError("expected resolved " + resolution.message()); + } return EvalMapper.toEvaluator(e, builder.build()); } @@ -256,7 +259,10 @@ public final void testEvaluate() { } return; } - assertFalse("expected resolved", expression.typeResolved().unresolved()); + Expression.TypeResolution resolution = expression.typeResolved(); + if (resolution.unresolved()) { + throw new AssertionError("expected resolved " + resolution.message()); + } expression = new FoldNull().rule(expression); assertThat(expression.dataType(), equalTo(testCase.expectedType())); logger.info("Result type: " + expression.dataType()); @@ -596,6 +602,28 @@ private static String signatureType(DataType type) { * on input types like {@link Greatest} or {@link Coalesce}. */ protected static List anyNullIsNull(boolean entirelyNullPreservesType, List testCaseSuppliers) { + return anyNullIsNull( + testCaseSuppliers, + (nullPosition, nullValueDataType, original) -> entirelyNullPreservesType == false + && nullValueDataType == DataTypes.NULL + && original.getData().size() == 1 ? DataTypes.NULL : original.expectedType(), + (nullPosition, original) -> original + ); + } + + public interface ExpectedType { + DataType expectedType(int nullPosition, DataType nullValueDataType, TestCaseSupplier.TestCase original); + } + + public interface ExpectedEvaluatorToString { + Matcher evaluatorToString(int nullPosition, Matcher original); + } + + protected static List anyNullIsNull( + List testCaseSuppliers, + ExpectedType expectedType, + ExpectedEvaluatorToString evaluatorToString + ) { typesRequired(testCaseSuppliers); List suppliers = new ArrayList<>(testCaseSuppliers.size()); suppliers.addAll(testCaseSuppliers); @@ -618,15 +646,12 @@ protected static List anyNullIsNull(boolean entirelyNullPreser TestCaseSupplier.TestCase oc = original.get(); List data = IntStream.range(0, oc.getData().size()).mapToObj(i -> { TestCaseSupplier.TypedData od = oc.getData().get(i); - if (i == finalNullPosition) { - return new TestCaseSupplier.TypedData(null, od.type(), od.name()); - } - return od; + return i == finalNullPosition ? od.forceValueToNull() : od; }).toList(); return new TestCaseSupplier.TestCase( data, - oc.evaluatorToString(), - oc.expectedType(), + evaluatorToString.evaluatorToString(finalNullPosition, oc.evaluatorToString()), + expectedType.expectedType(finalNullPosition, oc.getData().get(finalNullPosition).type(), oc), nullValue(), null, oc.getExpectedTypeError(), @@ -649,7 +674,7 @@ protected static List anyNullIsNull(boolean entirelyNullPreser return new TestCaseSupplier.TestCase( data, equalTo("LiteralsEvaluator[lit=null]"), - entirelyNullPreservesType == false && oc.getData().size() == 1 ? DataTypes.NULL : oc.expectedType(), + expectedType.expectedType(finalNullPosition, DataTypes.NULL, oc), nullValue(), null, oc.getExpectedTypeError(), @@ -755,9 +780,8 @@ private static Stream> allPermutations(int argumentCount) { if (argumentCount == 0) { return Stream.of(List.of()); } - if (argumentCount > 4) { - // TODO check for a limit 4. is arbitrary. - throw new IllegalArgumentException("would generate too many types"); + if (argumentCount > 3) { + throw new IllegalArgumentException("would generate too many combinations"); } Stream> stream = representable().map(t -> List.of(t)); for (int i = 1; i < argumentCount; i++) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java index d600e51c07925..c064cfebd9cc5 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java @@ -1325,6 +1325,14 @@ public TypedData forceLiteral() { return new TypedData(data, type, name, true); } + /** + * Return a {@link TypedData} that always returns {@code null} for it's + * value without modifying anything else in the supplier. + */ + public TypedData forceValueToNull() { + return new TypedData(null, type, name, forceLiteral); + } + @Override public String toString() { if (type == DataTypes.UNSIGNED_LONG && data instanceof Long longData) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucketTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucketTests.java index 013753c801c39..9d8cf702a375a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucketTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucketTests.java @@ -13,126 +13,139 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Rounding; import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; -import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.expression.Literal; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; import org.hamcrest.Matcher; +import java.util.ArrayList; import java.util.List; +import java.util.function.LongSupplier; import java.util.function.Supplier; import static org.hamcrest.Matchers.equalTo; -public class AutoBucketTests extends AbstractScalarFunctionTestCase { +public class AutoBucketTests extends AbstractFunctionTestCase { public AutoBucketTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } @ParametersFactory public static Iterable parameters() { - return parameterSuppliersFromTypedData(List.of(new TestCaseSupplier("Autobucket Single date", () -> { - List args = List.of( - new TestCaseSupplier.TypedData( - DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2023-02-17T09:00:00.00Z"), - DataTypes.DATETIME, - "arg" - ) - ); - return new TestCaseSupplier.TestCase( - args, - "DateTruncEvaluator[fieldVal=Attribute[channel=0], rounding=Rounding[DAY_OF_MONTH in Z][fixed to midnight]]", - DataTypes.DATETIME, - dateResultsMatcher(args) - ); - }), new TestCaseSupplier("Autobucket Single long", () -> { - List args = List.of(new TestCaseSupplier.TypedData(100L, DataTypes.LONG, "arg")); - return new TestCaseSupplier.TestCase( - args, - "MulDoublesEvaluator[lhs=FloorDoubleEvaluator[" - + "val=DivDoublesEvaluator[lhs=CastLongToDoubleEvaluator[v=Attribute[channel=0]], " - + "rhs=LiteralsEvaluator[lit=50.0]]], rhs=LiteralsEvaluator[lit=50.0]]", - DataTypes.DOUBLE, - numericResultsMatcher(args, 100.0) - ); - }), new TestCaseSupplier("Autobucket Single int", () -> { - List args = List.of(new TestCaseSupplier.TypedData(100, DataTypes.INTEGER, "arg")); - return new TestCaseSupplier.TestCase( - args, - "MulDoublesEvaluator[lhs=FloorDoubleEvaluator[" - + "val=DivDoublesEvaluator[lhs=CastIntToDoubleEvaluator[v=Attribute[channel=0]], " - + "rhs=LiteralsEvaluator[lit=50.0]]], rhs=LiteralsEvaluator[lit=50.0]]", - DataTypes.DOUBLE, - numericResultsMatcher(args, 100.0) - ); - }), new TestCaseSupplier("Autobucket Single double", () -> { - List args = List.of(new TestCaseSupplier.TypedData(100.0, DataTypes.DOUBLE, "arg")); - return new TestCaseSupplier.TestCase( - args, - "MulDoublesEvaluator[lhs=FloorDoubleEvaluator[val=DivDoublesEvaluator[lhs=Attribute[channel=0], " - + "rhs=LiteralsEvaluator[lit=50.0]]], rhs=LiteralsEvaluator[lit=50.0]]", - DataTypes.DOUBLE, - numericResultsMatcher(args, 100.0) - ); - }))); + List suppliers = new ArrayList<>(); + dateCases(suppliers, "fixed date", () -> DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2023-02-17T09:00:00.00Z")); + numberCases(suppliers, "fixed long", DataTypes.LONG, () -> 100L); + numberCases(suppliers, "fixed int", DataTypes.INTEGER, () -> 100); + numberCases(suppliers, "fixed double", DataTypes.DOUBLE, () -> 100.0); + // TODO make errorsForCasesWithoutExamples do something sensible for 4+ parameters + return parameterSuppliersFromTypedData( + anyNullIsNull( + suppliers, + (nullPosition, nullValueDataType, original) -> nullPosition == 0 && nullValueDataType == DataTypes.NULL + ? DataTypes.NULL + : original.expectedType(), + (nullPosition, original) -> nullPosition == 0 ? original : equalTo("LiteralsEvaluator[lit=null]") + ) + ); } - private Expression build(Source source, Expression arg) { - Literal from; - Literal to; - if (arg.dataType() == DataTypes.DATETIME) { - from = stringOrDateTime("2023-02-01T00:00:00.00Z"); - to = stringOrDateTime("2023-03-01T09:00:00.00Z"); - } else { - from = new Literal(Source.EMPTY, 0, DataTypes.DOUBLE); - to = new Literal(Source.EMPTY, 1000, DataTypes.DOUBLE); - } - return new AutoBucket(source, arg, new Literal(Source.EMPTY, 50, DataTypes.INTEGER), from, to); - } + // TODO once we cast above the functions we can drop these + private static final DataType[] DATE_BOUNDS_TYPE = new DataType[] { DataTypes.DATETIME, DataTypes.KEYWORD, DataTypes.TEXT }; - private Literal stringOrDateTime(String date) { - if (randomBoolean()) { - return new Literal(Source.EMPTY, new BytesRef(date), randomBoolean() ? DataTypes.KEYWORD : DataTypes.TEXT); + private static void dateCases(List suppliers, String name, LongSupplier date) { + for (DataType fromType : DATE_BOUNDS_TYPE) { + for (DataType toType : DATE_BOUNDS_TYPE) { + suppliers.add(new TestCaseSupplier(name, List.of(DataTypes.DATETIME, DataTypes.INTEGER, fromType, toType), () -> { + List args = new ArrayList<>(); + args.add(new TestCaseSupplier.TypedData(date.getAsLong(), DataTypes.DATETIME, "field")); + // TODO more "from" and "to" and "buckets" + args.add(new TestCaseSupplier.TypedData(50, DataTypes.INTEGER, "buckets").forceLiteral()); + args.add(dateBound("from", fromType, "2023-02-01T00:00:00.00Z")); + args.add(dateBound("to", toType, "2023-03-01T09:00:00.00Z")); + return new TestCaseSupplier.TestCase( + args, + "DateTruncEvaluator[fieldVal=Attribute[channel=0], rounding=Rounding[DAY_OF_MONTH in Z][fixed to midnight]]", + DataTypes.DATETIME, + dateResultsMatcher(args) + ); + })); + } } - return new Literal(Source.EMPTY, DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis(date), DataTypes.DATETIME); } - @Override - protected DataType expectedType(List argTypes) { - if (argTypes.get(0).isNumeric()) { - return DataTypes.DOUBLE; + private static TestCaseSupplier.TypedData dateBound(String name, DataType type, String date) { + Object value; + if (type == DataTypes.DATETIME) { + value = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis(date); + } else { + value = new BytesRef(date); } - return argTypes.get(0); + return new TestCaseSupplier.TypedData(value, type, name).forceLiteral(); } - private static Matcher dateResultsMatcher(List typedData) { - long millis = ((Number) typedData.get(0).data()).longValue(); - return equalTo(Rounding.builder(Rounding.DateTimeUnit.DAY_OF_MONTH).build().prepareForUnknown().round(millis)); + private static final DataType[] NUMBER_BOUNDS_TYPES = new DataType[] { DataTypes.INTEGER, DataTypes.LONG, DataTypes.DOUBLE }; + + private static void numberCases(List suppliers, String name, DataType numberType, Supplier number) { + for (DataType fromType : NUMBER_BOUNDS_TYPES) { + for (DataType toType : NUMBER_BOUNDS_TYPES) { + suppliers.add(new TestCaseSupplier(name, List.of(numberType, DataTypes.INTEGER, fromType, toType), () -> { + List args = new ArrayList<>(); + args.add(new TestCaseSupplier.TypedData(number.get(), "field")); + // TODO more "from" and "to" and "buckets" + args.add(new TestCaseSupplier.TypedData(50, DataTypes.INTEGER, "buckets").forceLiteral()); + args.add(numericBound("from", fromType, 0.0)); + args.add(numericBound("to", toType, 1000.0)); + // TODO more number types for "from" and "to" + String attr = "Attribute[channel=0]"; + if (numberType == DataTypes.INTEGER) { + attr = "CastIntToDoubleEvaluator[v=" + attr + "]"; + } else if (numberType == DataTypes.LONG) { + attr = "CastLongToDoubleEvaluator[v=" + attr + "]"; + } + return new TestCaseSupplier.TestCase( + args, + "MulDoublesEvaluator[lhs=FloorDoubleEvaluator[val=DivDoublesEvaluator[lhs=" + + attr + + ", " + + "rhs=LiteralsEvaluator[lit=50.0]]], rhs=LiteralsEvaluator[lit=50.0]]", + DataTypes.DOUBLE, + dateResultsMatcher(args) + ); + })); + } + } } - private static Matcher numericResultsMatcher(List typedData, Object value) { - return equalTo(value); + private static TestCaseSupplier.TypedData numericBound(String name, DataType type, double value) { + Number v; + if (type == DataTypes.INTEGER) { + v = (int) value; + } else if (type == DataTypes.LONG) { + v = (long) value; + } else { + v = value; + } + return new TestCaseSupplier.TypedData(v, type, name).forceLiteral(); } - @Override - protected List argSpec() { - DataType[] numerics = numerics(); - DataType[] all = new DataType[numerics.length + 1]; - all[0] = DataTypes.DATETIME; - System.arraycopy(numerics, 0, all, 1, numerics.length); - return List.of(required(all)); + private static Matcher dateResultsMatcher(List typedData) { + if (typedData.get(0).type() == DataTypes.DATETIME) { + long millis = ((Number) typedData.get(0).data()).longValue(); + return equalTo(Rounding.builder(Rounding.DateTimeUnit.DAY_OF_MONTH).build().prepareForUnknown().round(millis)); + } + return equalTo(((Number) typedData.get(0).data()).doubleValue()); } @Override protected Expression build(Source source, List args) { - return build(source, args.get(0)); + return new AutoBucket(source, args.get(0), args.get(1), args.get(2), args.get(3)); } @Override - protected Matcher badTypeError(List spec, int badArgPosition, DataType badArgType) { - return equalTo("first argument of [exp] must be [datetime or numeric], found value [arg0] type [" + badArgType.typeName() + "]"); + public void testSimpleWithNulls() { + assumeFalse("we test nulls in parameters", true); } } From 24aed5c7feaf59474ebb52ffad9db5f51c01fa60 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Tue, 9 Apr 2024 09:24:11 -0700 Subject: [PATCH 202/264] Simplify merging enrich output (#107018) The merge logic in MergePositionsOperator is excessively complex and lacks flexibility. It relies on the source operator emitting pages with ascending positions. Additionally, this merge logic introduced an unusual method, appendAllValuesToCurrentPosition, to the Block.Builder. We should replace this with a simpler and more flexible approach. This PR uses a mechanism similar to the grouping aggregation. In fact, it is very close to the values aggregation. Initially, I considered using the GroupingState from ValuesAggregator. However, unlike in the values aggregation, we don't expect many multi-values in enrich. Hence, I introduced the new EnrichResultBuilders instead. --- x-pack/plugin/esql/build.gradle | 57 +++++++ .../compute/data/BooleanBlock.java | 13 -- .../compute/data/BooleanBlockBuilder.java | 49 ------ .../compute/data/BytesRefBlock.java | 13 -- .../compute/data/BytesRefBlockBuilder.java | 50 ------ .../compute/data/DoubleBlock.java | 13 -- .../compute/data/DoubleBlockBuilder.java | 49 ------ .../elasticsearch/compute/data/IntBlock.java | 13 -- .../compute/data/IntBlockBuilder.java | 49 ------ .../elasticsearch/compute/data/LongBlock.java | 13 -- .../compute/data/LongBlockBuilder.java | 49 ------ .../org/elasticsearch/compute/data/Block.java | 6 - .../compute/data/ConstantNullBlock.java | 5 - .../elasticsearch/compute/data/DocBlock.java | 5 - .../data/SingletonOrdinalsBuilder.java | 5 - .../compute/data/X-Block.java.st | 13 -- .../compute/data/X-BlockBuilder.java.st | 60 ------- .../data/BlockBuilderAppendBlockTests.java | 147 ----------------- .../compute/data/TestBlockBuilder.java | 30 ---- .../enrich/EnrichResultBuilderForBoolean.java | 90 +++++++++++ .../EnrichResultBuilderForBytesRef.java | 107 +++++++++++++ .../enrich/EnrichResultBuilderForDouble.java | 90 +++++++++++ .../enrich/EnrichResultBuilderForInt.java | 90 +++++++++++ .../enrich/EnrichResultBuilderForLong.java | 90 +++++++++++ .../esql/enrich/EnrichLookupService.java | 28 +--- .../esql/enrich/EnrichResultBuilder.java | 80 ++++++++++ .../esql/enrich/MergePositionsOperator.java | 123 +++------------ .../esql/enrich/X-EnrichResultBuilder.java.st | 134 ++++++++++++++++ .../esql/enrich/EnrichResultBuilderTests.java | 148 ++++++++++++++++++ .../enrich/MergePositionsOperatorTests.java | 1 - 30 files changed, 910 insertions(+), 710 deletions(-) delete mode 100644 x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderAppendBlockTests.java create mode 100644 x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBoolean.java create mode 100644 x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBytesRef.java create mode 100644 x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForDouble.java create mode 100644 x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForInt.java create mode 100644 x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForLong.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilder.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/X-EnrichResultBuilder.java.st create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderTests.java diff --git a/x-pack/plugin/esql/build.gradle b/x-pack/plugin/esql/build.gradle index 668ecec0e393d..87ef4dd0b3eff 100644 --- a/x-pack/plugin/esql/build.gradle +++ b/x-pack/plugin/esql/build.gradle @@ -2,6 +2,7 @@ import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' +apply plugin: 'elasticsearch.string-templates' esplugin { name 'x-pack-esql' description 'The plugin that powers ESQL for Elasticsearch' @@ -222,3 +223,59 @@ tasks.register("regen") { } } } + +tasks.named("spotlessJava") { dependsOn stringTemplates } +tasks.named('checkstyleMain').configure { + excludes = [ "**/*.java.st" ] +} + +def prop(Type, type, TYPE, BYTES, Array) { + return [ + "Type" : Type, + "type" : type, + "TYPE" : TYPE, + "BYTES" : BYTES, + "Array" : Array, + + "int" : type == "int" ? "true" : "", + "long" : type == "long" ? "true" : "", + "double" : type == "double" ? "true" : "", + "BytesRef" : type == "BytesRef" ? "true" : "", + "boolean" : type == "boolean" ? "true" : "", + ] +} + +tasks.named('stringTemplates').configure { + var intProperties = prop("Int", "int", "INT", "Integer.BYTES", "IntArray") + var longProperties = prop("Long", "long", "LONG", "Long.BYTES", "LongArray") + var doubleProperties = prop("Double", "double", "DOUBLE", "Double.BYTES", "DoubleArray") + var bytesRefProperties = prop("BytesRef", "BytesRef", "BYTES_REF", "org.apache.lucene.util.RamUsageEstimator.NUM_BYTES_OBJECT_REF", "") + var booleanProperties = prop("Boolean", "boolean", "BOOLEAN", "Byte.BYTES", "BitArray") + // enrich + File enrichResultBuilderInput = file("src/main/java/org/elasticsearch/xpack/esql/enrich/X-EnrichResultBuilder.java.st") + template { + it.properties = intProperties + it.inputFile = enrichResultBuilderInput + it.outputFile = "org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForInt.java" + } + template { + it.properties = longProperties + it.inputFile = enrichResultBuilderInput + it.outputFile = "org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForLong.java" + } + template { + it.properties = doubleProperties + it.inputFile = enrichResultBuilderInput + it.outputFile = "org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForDouble.java" + } + template { + it.properties = bytesRefProperties + it.inputFile = enrichResultBuilderInput + it.outputFile = "org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBytesRef.java" + } + template { + it.properties = booleanProperties + it.inputFile = enrichResultBuilderInput + it.outputFile = "org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBoolean.java" + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java index 43181a344e268..f365a2ed78610 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java @@ -223,19 +223,6 @@ sealed interface Builder extends Block.Builder, BlockLoader.BooleanBuilder permi @Override Builder mvOrdering(Block.MvOrdering mvOrdering); - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - Builder appendAllValuesToCurrentPosition(Block block); - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - Builder appendAllValuesToCurrentPosition(BooleanBlock block); - @Override BooleanBlock build(); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java index 09c436e805d57..32627a0e0d36b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java @@ -71,55 +71,6 @@ public BooleanBlockBuilder endPositionEntry() { return this; } - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public BooleanBlockBuilder appendAllValuesToCurrentPosition(Block block) { - if (block.areAllValuesNull()) { - return appendNull(); - } - return appendAllValuesToCurrentPosition((BooleanBlock) block); - } - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public BooleanBlockBuilder appendAllValuesToCurrentPosition(BooleanBlock block) { - final int positionCount = block.getPositionCount(); - if (positionCount == 0) { - return appendNull(); - } - final int totalValueCount = block.getTotalValueCount(); - if (totalValueCount == 0) { - return appendNull(); - } - if (totalValueCount > 1) { - beginPositionEntry(); - } - final BooleanVector vector = block.asVector(); - if (vector != null) { - for (int p = 0; p < positionCount; p++) { - appendBoolean(vector.getBoolean(p)); - } - } else { - for (int p = 0; p < positionCount; p++) { - int count = block.getValueCount(p); - int i = block.getFirstValueIndex(p); - for (int v = 0; v < count; v++) { - appendBoolean(block.getBoolean(i++)); - } - } - } - if (totalValueCount > 1) { - endPositionEntry(); - } - return this; - } - @Override public BooleanBlockBuilder copyFrom(Block block, int beginInclusive, int endExclusive) { if (block.areAllValuesNull()) { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java index 5f5e1f9caa488..a6c75dbc1122f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java @@ -229,19 +229,6 @@ sealed interface Builder extends Block.Builder, BlockLoader.BytesRefBuilder perm @Override Builder mvOrdering(Block.MvOrdering mvOrdering); - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - Builder appendAllValuesToCurrentPosition(Block block); - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - Builder appendAllValuesToCurrentPosition(BytesRefBlock block); - @Override BytesRefBlock build(); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java index aed422b0c0104..4ef7ed4084228 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java @@ -78,56 +78,6 @@ protected void writeNullValue() { values.append(BytesRefBlock.NULL_VALUE); } - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public BytesRefBlockBuilder appendAllValuesToCurrentPosition(Block block) { - if (block.areAllValuesNull()) { - return appendNull(); - } - return appendAllValuesToCurrentPosition((BytesRefBlock) block); - } - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public BytesRefBlockBuilder appendAllValuesToCurrentPosition(BytesRefBlock block) { - final int positionCount = block.getPositionCount(); - if (positionCount == 0) { - return appendNull(); - } - final int totalValueCount = block.getTotalValueCount(); - if (totalValueCount == 0) { - return appendNull(); - } - if (totalValueCount > 1) { - beginPositionEntry(); - } - BytesRef scratch = new BytesRef(); - final BytesRefVector vector = block.asVector(); - if (vector != null) { - for (int p = 0; p < positionCount; p++) { - appendBytesRef(vector.getBytesRef(p, scratch)); - } - } else { - for (int p = 0; p < positionCount; p++) { - int count = block.getValueCount(p); - int i = block.getFirstValueIndex(p); - for (int v = 0; v < count; v++) { - appendBytesRef(block.getBytesRef(i++, scratch)); - } - } - } - if (totalValueCount > 1) { - endPositionEntry(); - } - return this; - } - @Override public BytesRefBlockBuilder copyFrom(Block block, int beginInclusive, int endExclusive) { if (block.areAllValuesNull()) { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java index 27d70caaa18fe..a682c2cba019e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java @@ -224,19 +224,6 @@ sealed interface Builder extends Block.Builder, BlockLoader.DoubleBuilder permit @Override Builder mvOrdering(Block.MvOrdering mvOrdering); - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - Builder appendAllValuesToCurrentPosition(Block block); - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - Builder appendAllValuesToCurrentPosition(DoubleBlock block); - @Override DoubleBlock build(); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java index 427127784869a..5921c2daa9f92 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java @@ -71,55 +71,6 @@ public DoubleBlockBuilder endPositionEntry() { return this; } - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public DoubleBlockBuilder appendAllValuesToCurrentPosition(Block block) { - if (block.areAllValuesNull()) { - return appendNull(); - } - return appendAllValuesToCurrentPosition((DoubleBlock) block); - } - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public DoubleBlockBuilder appendAllValuesToCurrentPosition(DoubleBlock block) { - final int positionCount = block.getPositionCount(); - if (positionCount == 0) { - return appendNull(); - } - final int totalValueCount = block.getTotalValueCount(); - if (totalValueCount == 0) { - return appendNull(); - } - if (totalValueCount > 1) { - beginPositionEntry(); - } - final DoubleVector vector = block.asVector(); - if (vector != null) { - for (int p = 0; p < positionCount; p++) { - appendDouble(vector.getDouble(p)); - } - } else { - for (int p = 0; p < positionCount; p++) { - int count = block.getValueCount(p); - int i = block.getFirstValueIndex(p); - for (int v = 0; v < count; v++) { - appendDouble(block.getDouble(i++)); - } - } - } - if (totalValueCount > 1) { - endPositionEntry(); - } - return this; - } - @Override public DoubleBlockBuilder copyFrom(Block block, int beginInclusive, int endExclusive) { if (block.areAllValuesNull()) { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java index a34d50bf6ff55..e9d606b51c6a1 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java @@ -223,19 +223,6 @@ sealed interface Builder extends Block.Builder, BlockLoader.IntBuilder permits I @Override Builder mvOrdering(Block.MvOrdering mvOrdering); - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - Builder appendAllValuesToCurrentPosition(Block block); - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - Builder appendAllValuesToCurrentPosition(IntBlock block); - @Override IntBlock build(); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java index aaf46798fd789..85f943004de29 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java @@ -71,55 +71,6 @@ public IntBlockBuilder endPositionEntry() { return this; } - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public IntBlockBuilder appendAllValuesToCurrentPosition(Block block) { - if (block.areAllValuesNull()) { - return appendNull(); - } - return appendAllValuesToCurrentPosition((IntBlock) block); - } - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public IntBlockBuilder appendAllValuesToCurrentPosition(IntBlock block) { - final int positionCount = block.getPositionCount(); - if (positionCount == 0) { - return appendNull(); - } - final int totalValueCount = block.getTotalValueCount(); - if (totalValueCount == 0) { - return appendNull(); - } - if (totalValueCount > 1) { - beginPositionEntry(); - } - final IntVector vector = block.asVector(); - if (vector != null) { - for (int p = 0; p < positionCount; p++) { - appendInt(vector.getInt(p)); - } - } else { - for (int p = 0; p < positionCount; p++) { - int count = block.getValueCount(p); - int i = block.getFirstValueIndex(p); - for (int v = 0; v < count; v++) { - appendInt(block.getInt(i++)); - } - } - } - if (totalValueCount > 1) { - endPositionEntry(); - } - return this; - } - @Override public IntBlockBuilder copyFrom(Block block, int beginInclusive, int endExclusive) { if (block.areAllValuesNull()) { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java index 21c3eb4257b8d..3e1c5fcfaac95 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java @@ -224,19 +224,6 @@ sealed interface Builder extends Block.Builder, BlockLoader.LongBuilder permits @Override Builder mvOrdering(Block.MvOrdering mvOrdering); - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - Builder appendAllValuesToCurrentPosition(Block block); - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - Builder appendAllValuesToCurrentPosition(LongBlock block); - @Override LongBlock build(); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java index 5d8daf306809d..d24ae214da63a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java @@ -71,55 +71,6 @@ public LongBlockBuilder endPositionEntry() { return this; } - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public LongBlockBuilder appendAllValuesToCurrentPosition(Block block) { - if (block.areAllValuesNull()) { - return appendNull(); - } - return appendAllValuesToCurrentPosition((LongBlock) block); - } - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public LongBlockBuilder appendAllValuesToCurrentPosition(LongBlock block) { - final int positionCount = block.getPositionCount(); - if (positionCount == 0) { - return appendNull(); - } - final int totalValueCount = block.getTotalValueCount(); - if (totalValueCount == 0) { - return appendNull(); - } - if (totalValueCount > 1) { - beginPositionEntry(); - } - final LongVector vector = block.asVector(); - if (vector != null) { - for (int p = 0; p < positionCount; p++) { - appendLong(vector.getLong(p)); - } - } else { - for (int p = 0; p < positionCount; p++) { - int count = block.getValueCount(p); - int i = block.getFirstValueIndex(p); - for (int v = 0; v < count; v++) { - appendLong(block.getLong(i++)); - } - } - } - if (totalValueCount > 1) { - endPositionEntry(); - } - return this; - } - @Override public LongBlockBuilder copyFrom(Block block, int beginInclusive, int endExclusive) { if (block.areAllValuesNull()) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java index 7fc92da1943ac..0e34eaa68881f 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java @@ -183,12 +183,6 @@ interface Builder extends BlockLoader.Builder, Releasable { */ Builder endPositionEntry(); - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - Builder appendAllValuesToCurrentPosition(Block block); - /** * Copy the values in {@code block} from {@code beginInclusive} to * {@code endExclusive} into this builder. diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java index c2ac99a7c8489..3df75f4bc1c56 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java @@ -172,11 +172,6 @@ public Builder copyFrom(Block block, int beginInclusive, int endExclusive) { return this; } - @Override - public Block.Builder appendAllValuesToCurrentPosition(Block block) { - return appendNull(); - } - @Override public Block.Builder mvOrdering(MvOrdering mvOrdering) { /* diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java index 8d3497a66a2d7..2751cd31fd362 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java @@ -149,11 +149,6 @@ public Builder copyFrom(Block block, int beginInclusive, int endExclusive) { return this; } - @Override - public Block.Builder appendAllValuesToCurrentPosition(Block block) { - throw new UnsupportedOperationException("DocBlock doesn't support appendBlockAndMerge"); - } - @Override public Block.Builder mvOrdering(MvOrdering mvOrdering) { /* diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/SingletonOrdinalsBuilder.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/SingletonOrdinalsBuilder.java index 8616d7a7e1bc6..fd9dd6a479298 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/SingletonOrdinalsBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/SingletonOrdinalsBuilder.java @@ -165,11 +165,6 @@ public void close() { blockFactory.adjustBreaker(-ordsSize(ords.length)); } - @Override - public Block.Builder appendAllValuesToCurrentPosition(Block block) { - throw new UnsupportedOperationException(); - } - @Override public Block.Builder copyFrom(Block block, int beginInclusive, int endExclusive) { throw new UnsupportedOperationException(); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st index 3850e3da7c796..331a5713fa3d1 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st @@ -277,19 +277,6 @@ $endif$ @Override Builder mvOrdering(Block.MvOrdering mvOrdering); - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - Builder appendAllValuesToCurrentPosition(Block block); - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - Builder appendAllValuesToCurrentPosition($Type$Block block); - @Override $Type$Block build(); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st index 5b432f1c62968..fab3be0be4233 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st @@ -113,66 +113,6 @@ $if(BytesRef)$ } $endif$ - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public $Type$BlockBuilder appendAllValuesToCurrentPosition(Block block) { - if (block.areAllValuesNull()) { - return appendNull(); - } - return appendAllValuesToCurrentPosition(($Type$Block) block); - } - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public $Type$BlockBuilder appendAllValuesToCurrentPosition($Type$Block block) { - final int positionCount = block.getPositionCount(); - if (positionCount == 0) { - return appendNull(); - } - final int totalValueCount = block.getTotalValueCount(); - if (totalValueCount == 0) { - return appendNull(); - } - if (totalValueCount > 1) { - beginPositionEntry(); - } -$if(BytesRef)$ - BytesRef scratch = new BytesRef(); -$endif$ - final $Type$Vector vector = block.asVector(); - if (vector != null) { - for (int p = 0; p < positionCount; p++) { -$if(BytesRef)$ - appendBytesRef(vector.getBytesRef(p, scratch)); -$else$ - append$Type$(vector.get$Type$(p)); -$endif$ - } - } else { - for (int p = 0; p < positionCount; p++) { - int count = block.getValueCount(p); - int i = block.getFirstValueIndex(p); - for (int v = 0; v < count; v++) { -$if(BytesRef)$ - appendBytesRef(block.getBytesRef(i++, scratch)); -$else$ - append$Type$(block.get$Type$(i++)); -$endif$ - } - } - } - if (totalValueCount > 1) { - endPositionEntry(); - } - return this; - } - @Override public $Type$BlockBuilder copyFrom(Block block, int beginInclusive, int endExclusive) { if (block.areAllValuesNull()) { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderAppendBlockTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderAppendBlockTests.java deleted file mode 100644 index 9c1b02aa74107..0000000000000 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderAppendBlockTests.java +++ /dev/null @@ -1,147 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.compute.data; - -import org.elasticsearch.compute.operator.ComputeTestCase; - -import java.util.ArrayList; -import java.util.List; - -import static org.hamcrest.Matchers.equalTo; - -public class BlockBuilderAppendBlockTests extends ComputeTestCase { - - public void testBasic() { - BlockFactory blockFactory = blockFactory(); - IntBlock src = blockFactory.newIntBlockBuilder(10) - .appendInt(1) - .appendNull() - .beginPositionEntry() - .appendInt(4) - .appendInt(6) - .endPositionEntry() - .appendInt(10) - .appendInt(20) - .appendInt(30) - .appendNull() - .beginPositionEntry() - .appendInt(1) - .endPositionEntry() - .build(); - // copy position by position - try (IntBlock.Builder dst = blockFactory.newIntBlockBuilder(randomIntBetween(1, 20))) { - for (int i = 0; i < src.getPositionCount(); i++) { - try (IntBlock filter = src.filter(i)) { - dst.appendAllValuesToCurrentPosition(filter); - } - } - try (IntBlock block = dst.build()) { - assertThat(block, equalTo(src)); - } - } - // copy all block - try (IntBlock.Builder dst = blockFactory.newIntBlockBuilder(randomIntBetween(1, 20))) { - try (IntBlock block = dst.appendAllValuesToCurrentPosition(src).build()) { - assertThat(block.getPositionCount(), equalTo(1)); - assertThat(BlockUtils.toJavaObject(block, 0), equalTo(List.of(1, 4, 6, 10, 20, 30, 1))); - } - } - try (Block dst = randomlyDivideAndMerge(src)) { - assertThat(dst.getPositionCount(), equalTo(1)); - assertThat(BlockUtils.toJavaObject(dst, 0), equalTo(List.of(1, 4, 6, 10, 20, 30, 1))); - } - } - - public void testRandomNullBlock() { - BlockFactory blockFactory = blockFactory(); - IntBlock.Builder src = blockFactory.newIntBlockBuilder(10); - try (var nullBlock = blockFactory.newConstantNullBlock(between(1, 100))) { - src.appendAllValuesToCurrentPosition(nullBlock); - } - src.appendInt(101); - try (var nullBlock = blockFactory.newConstantNullBlock(between(1, 100))) { - src.appendAllValuesToCurrentPosition(nullBlock); - } - IntBlock block = src.build(); - assertThat(block.getPositionCount(), equalTo(3)); - assertTrue(block.isNull(0)); - assertThat(block.getInt(1), equalTo(101)); - assertTrue(block.isNull(2)); - try (Block flatten = randomlyDivideAndMerge(block)) { - assertThat(flatten.getPositionCount(), equalTo(1)); - assertThat(BlockUtils.toJavaObject(flatten, 0), equalTo(101)); - } - } - - public void testRandom() { - ElementType elementType = randomFrom(ElementType.INT, ElementType.BYTES_REF, ElementType.BOOLEAN); - Block block = BasicBlockTests.randomBlock( - elementType, - randomIntBetween(1, 1024), - randomBoolean(), - 0, - between(1, 16), - 0, - between(0, 16) - ).block(); - - block = randomlyDivideAndMerge(block); - block.close(); - } - - private Block randomlyDivideAndMerge(Block block) { - while (block.getPositionCount() > 1 || randomBoolean()) { - int positionCount = block.getPositionCount(); - int offset = 0; - Block.Builder builder = block.elementType() - .newBlockBuilder(randomIntBetween(1, 100), TestBlockFactory.getNonBreakingInstance()); - List expected = new ArrayList<>(); - while (offset < positionCount) { - int length = randomIntBetween(1, positionCount - offset); - int[] positions = new int[length]; - for (int i = 0; i < length; i++) { - positions[i] = offset + i; - } - offset += length; - Block sub = block.filter(positions); - expected.add(extractAndFlattenBlockValues(sub)); - builder.appendAllValuesToCurrentPosition(sub); - sub.close(); - } - block.close(); - block = builder.build(); - assertThat(block.getPositionCount(), equalTo(expected.size())); - for (int i = 0; i < block.getPositionCount(); i++) { - assertThat(BlockUtils.toJavaObject(block, i), equalTo(expected.get(i))); - } - } - return block; - } - - static Object extractAndFlattenBlockValues(Block block) { - List values = new ArrayList<>(); - for (int i = 0; i < block.getPositionCount(); i++) { - Object v = BlockUtils.toJavaObject(block, i); - if (v == null) { - continue; - } - if (v instanceof List l) { - values.addAll(l); - } else { - values.add(v); - } - } - if (values.isEmpty()) { - return null; - } else if (values.size() == 1) { - return values.get(0); - } else { - return values; - } - } -} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockBuilder.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockBuilder.java index a2b074c1403a0..4595b26ca27aa 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockBuilder.java @@ -113,12 +113,6 @@ public TestBlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { return this; } - @Override - public Block.Builder appendAllValuesToCurrentPosition(Block block) { - builder.appendAllValuesToCurrentPosition(block); - return this; - } - @Override public IntBlock build() { return builder.build(); @@ -174,12 +168,6 @@ public TestBlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { return this; } - @Override - public Block.Builder appendAllValuesToCurrentPosition(Block block) { - builder.appendAllValuesToCurrentPosition(block); - return this; - } - @Override public LongBlock build() { return builder.build(); @@ -235,12 +223,6 @@ public TestBlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { return this; } - @Override - public Block.Builder appendAllValuesToCurrentPosition(Block block) { - builder.appendAllValuesToCurrentPosition(block); - return this; - } - @Override public DoubleBlock build() { return builder.build(); @@ -296,12 +278,6 @@ public TestBlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { return this; } - @Override - public Block.Builder appendAllValuesToCurrentPosition(Block block) { - builder.appendAllValuesToCurrentPosition(block); - return this; - } - @Override public BytesRefBlock build() { return builder.build(); @@ -360,12 +336,6 @@ public TestBlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { return this; } - @Override - public Block.Builder appendAllValuesToCurrentPosition(Block block) { - builder.appendAllValuesToCurrentPosition(block); - return this; - } - @Override public BooleanBlock build() { return builder.build(); diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBoolean.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBoolean.java new file mode 100644 index 0000000000000..0427afb6d80c8 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBoolean.java @@ -0,0 +1,90 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.enrich; + +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.Releasables; + +import java.util.Arrays; + +/** + * {@link EnrichResultBuilder} for Booleans. + * This class is generated. Edit `X-EnrichResultBuilder.java.st` instead. + */ +final class EnrichResultBuilderForBoolean extends EnrichResultBuilder { + private ObjectArray cells; + + EnrichResultBuilderForBoolean(BlockFactory blockFactory, int channel, int totalPositions) { + super(blockFactory, channel, totalPositions); + this.cells = blockFactory.bigArrays().newObjectArray(totalPositions); + } + + @Override + void addInputPage(IntVector positions, Page page) { + BooleanBlock block = page.getBlock(channel); + for (int i = 0; i < positions.getPositionCount(); i++) { + int valueCount = block.getValueCount(i); + if (valueCount == 0) { + continue; + } + int cellPosition = positions.getInt(i); + final var oldCell = cells.get(cellPosition); + final var newCell = extendCell(oldCell, valueCount); + cells.set(cellPosition, newCell); + int dstIndex = oldCell != null ? oldCell.length : 0; + adjustBreaker(RamUsageEstimator.sizeOf(newCell) - (oldCell != null ? RamUsageEstimator.sizeOf(oldCell) : 0)); + int firstValueIndex = block.getFirstValueIndex(i); + for (int v = 0; v < valueCount; v++) { + newCell[dstIndex + v] = block.getBoolean(firstValueIndex + v); + } + } + } + + private boolean[] extendCell(boolean[] oldCell, int newValueCount) { + if (oldCell == null) { + return new boolean[newValueCount]; + } else { + return Arrays.copyOf(oldCell, oldCell.length + newValueCount); + } + } + + @Override + Block build() { + try (BooleanBlock.Builder builder = blockFactory.newBooleanBlockBuilder(totalPositions)) { + for (int i = 0; i < totalPositions; i++) { + final var cell = cells.get(i); + if (cell == null) { + builder.appendNull(); + continue; + } + if (cell.length > 1) { + builder.beginPositionEntry(); + } + // TODO: sort and dedup + for (var v : cell) { + builder.appendBoolean(v); + } + if (cell.length > 1) { + builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + @Override + public void close() { + Releasables.close(cells, super::close); + } +} diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBytesRef.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBytesRef.java new file mode 100644 index 0000000000000..ff881da5baf44 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBytesRef.java @@ -0,0 +1,107 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.enrich; + +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.util.BytesRefArray; +import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.Releasables; + +import java.util.Arrays; + +/** + * {@link EnrichResultBuilder} for BytesRefs. + * This class is generated. Edit `X-EnrichResultBuilder.java.st` instead. + */ +final class EnrichResultBuilderForBytesRef extends EnrichResultBuilder { + private final BytesRefArray bytes; // shared between all cells + private ObjectArray cells; + + EnrichResultBuilderForBytesRef(BlockFactory blockFactory, int channel, int totalPositions) { + super(blockFactory, channel, totalPositions); + this.cells = blockFactory.bigArrays().newObjectArray(totalPositions); + BytesRefArray bytes = null; + try { + bytes = new BytesRefArray(totalPositions * 3L, blockFactory.bigArrays()); + this.bytes = bytes; + } finally { + if (bytes == null) { + this.cells.close(); + } + } + } + + @Override + void addInputPage(IntVector positions, Page page) { + BytesRefBlock block = page.getBlock(channel); + BytesRef scratch = new BytesRef(); + for (int i = 0; i < positions.getPositionCount(); i++) { + int valueCount = block.getValueCount(i); + if (valueCount == 0) { + continue; + } + int cellPosition = positions.getInt(i); + final var oldCell = cells.get(cellPosition); + final var newCell = extendCell(oldCell, valueCount); + cells.set(cellPosition, newCell); + int dstIndex = oldCell != null ? oldCell.length : 0; + adjustBreaker(RamUsageEstimator.sizeOf(newCell) - (oldCell != null ? RamUsageEstimator.sizeOf(oldCell) : 0)); + int firstValueIndex = block.getFirstValueIndex(i); + int bytesOrd = Math.toIntExact(bytes.size()); + for (int v = 0; v < valueCount; v++) { + scratch = block.getBytesRef(firstValueIndex + v, scratch); + bytes.append(scratch); + newCell[dstIndex + v] = bytesOrd + v; + } + } + } + + private int[] extendCell(int[] oldCell, int newValueCount) { + if (oldCell == null) { + return new int[newValueCount]; + } else { + return Arrays.copyOf(oldCell, oldCell.length + newValueCount); + } + } + + @Override + Block build() { + try (BytesRefBlock.Builder builder = blockFactory.newBytesRefBlockBuilder(totalPositions)) { + BytesRef scratch = new BytesRef(); + for (int i = 0; i < totalPositions; i++) { + final var cell = cells.get(i); + if (cell == null) { + builder.appendNull(); + continue; + } + if (cell.length > 1) { + builder.beginPositionEntry(); + } + // TODO: sort and dedup + for (var v : cell) { + builder.appendBytesRef(bytes.get(v, scratch)); + } + if (cell.length > 1) { + builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + @Override + public void close() { + Releasables.close(bytes, cells, super::close); + } +} diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForDouble.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForDouble.java new file mode 100644 index 0000000000000..93c178d816326 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForDouble.java @@ -0,0 +1,90 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.enrich; + +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.Releasables; + +import java.util.Arrays; + +/** + * {@link EnrichResultBuilder} for Doubles. + * This class is generated. Edit `X-EnrichResultBuilder.java.st` instead. + */ +final class EnrichResultBuilderForDouble extends EnrichResultBuilder { + private ObjectArray cells; + + EnrichResultBuilderForDouble(BlockFactory blockFactory, int channel, int totalPositions) { + super(blockFactory, channel, totalPositions); + this.cells = blockFactory.bigArrays().newObjectArray(totalPositions); + } + + @Override + void addInputPage(IntVector positions, Page page) { + DoubleBlock block = page.getBlock(channel); + for (int i = 0; i < positions.getPositionCount(); i++) { + int valueCount = block.getValueCount(i); + if (valueCount == 0) { + continue; + } + int cellPosition = positions.getInt(i); + final var oldCell = cells.get(cellPosition); + final var newCell = extendCell(oldCell, valueCount); + cells.set(cellPosition, newCell); + int dstIndex = oldCell != null ? oldCell.length : 0; + adjustBreaker(RamUsageEstimator.sizeOf(newCell) - (oldCell != null ? RamUsageEstimator.sizeOf(oldCell) : 0)); + int firstValueIndex = block.getFirstValueIndex(i); + for (int v = 0; v < valueCount; v++) { + newCell[dstIndex + v] = block.getDouble(firstValueIndex + v); + } + } + } + + private double[] extendCell(double[] oldCell, int newValueCount) { + if (oldCell == null) { + return new double[newValueCount]; + } else { + return Arrays.copyOf(oldCell, oldCell.length + newValueCount); + } + } + + @Override + Block build() { + try (DoubleBlock.Builder builder = blockFactory.newDoubleBlockBuilder(totalPositions)) { + for (int i = 0; i < totalPositions; i++) { + final var cell = cells.get(i); + if (cell == null) { + builder.appendNull(); + continue; + } + if (cell.length > 1) { + builder.beginPositionEntry(); + } + // TODO: sort and dedup + for (var v : cell) { + builder.appendDouble(v); + } + if (cell.length > 1) { + builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + @Override + public void close() { + Releasables.close(cells, super::close); + } +} diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForInt.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForInt.java new file mode 100644 index 0000000000000..4dec877e0d1e4 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForInt.java @@ -0,0 +1,90 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.enrich; + +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.Releasables; + +import java.util.Arrays; + +/** + * {@link EnrichResultBuilder} for Ints. + * This class is generated. Edit `X-EnrichResultBuilder.java.st` instead. + */ +final class EnrichResultBuilderForInt extends EnrichResultBuilder { + private ObjectArray cells; + + EnrichResultBuilderForInt(BlockFactory blockFactory, int channel, int totalPositions) { + super(blockFactory, channel, totalPositions); + this.cells = blockFactory.bigArrays().newObjectArray(totalPositions); + } + + @Override + void addInputPage(IntVector positions, Page page) { + IntBlock block = page.getBlock(channel); + for (int i = 0; i < positions.getPositionCount(); i++) { + int valueCount = block.getValueCount(i); + if (valueCount == 0) { + continue; + } + int cellPosition = positions.getInt(i); + final var oldCell = cells.get(cellPosition); + final var newCell = extendCell(oldCell, valueCount); + cells.set(cellPosition, newCell); + int dstIndex = oldCell != null ? oldCell.length : 0; + adjustBreaker(RamUsageEstimator.sizeOf(newCell) - (oldCell != null ? RamUsageEstimator.sizeOf(oldCell) : 0)); + int firstValueIndex = block.getFirstValueIndex(i); + for (int v = 0; v < valueCount; v++) { + newCell[dstIndex + v] = block.getInt(firstValueIndex + v); + } + } + } + + private int[] extendCell(int[] oldCell, int newValueCount) { + if (oldCell == null) { + return new int[newValueCount]; + } else { + return Arrays.copyOf(oldCell, oldCell.length + newValueCount); + } + } + + @Override + Block build() { + try (IntBlock.Builder builder = blockFactory.newIntBlockBuilder(totalPositions)) { + for (int i = 0; i < totalPositions; i++) { + final var cell = cells.get(i); + if (cell == null) { + builder.appendNull(); + continue; + } + if (cell.length > 1) { + builder.beginPositionEntry(); + } + // TODO: sort and dedup + for (var v : cell) { + builder.appendInt(v); + } + if (cell.length > 1) { + builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + @Override + public void close() { + Releasables.close(cells, super::close); + } +} diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForLong.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForLong.java new file mode 100644 index 0000000000000..0dd4d1d0a8a0d --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForLong.java @@ -0,0 +1,90 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.enrich; + +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.Releasables; + +import java.util.Arrays; + +/** + * {@link EnrichResultBuilder} for Longs. + * This class is generated. Edit `X-EnrichResultBuilder.java.st` instead. + */ +final class EnrichResultBuilderForLong extends EnrichResultBuilder { + private ObjectArray cells; + + EnrichResultBuilderForLong(BlockFactory blockFactory, int channel, int totalPositions) { + super(blockFactory, channel, totalPositions); + this.cells = blockFactory.bigArrays().newObjectArray(totalPositions); + } + + @Override + void addInputPage(IntVector positions, Page page) { + LongBlock block = page.getBlock(channel); + for (int i = 0; i < positions.getPositionCount(); i++) { + int valueCount = block.getValueCount(i); + if (valueCount == 0) { + continue; + } + int cellPosition = positions.getInt(i); + final var oldCell = cells.get(cellPosition); + final var newCell = extendCell(oldCell, valueCount); + cells.set(cellPosition, newCell); + int dstIndex = oldCell != null ? oldCell.length : 0; + adjustBreaker(RamUsageEstimator.sizeOf(newCell) - (oldCell != null ? RamUsageEstimator.sizeOf(oldCell) : 0)); + int firstValueIndex = block.getFirstValueIndex(i); + for (int v = 0; v < valueCount; v++) { + newCell[dstIndex + v] = block.getLong(firstValueIndex + v); + } + } + } + + private long[] extendCell(long[] oldCell, int newValueCount) { + if (oldCell == null) { + return new long[newValueCount]; + } else { + return Arrays.copyOf(oldCell, oldCell.length + newValueCount); + } + } + + @Override + Block build() { + try (LongBlock.Builder builder = blockFactory.newLongBlockBuilder(totalPositions)) { + for (int i = 0; i < totalPositions; i++) { + final var cell = cells.get(i); + if (cell == null) { + builder.appendNull(); + continue; + } + if (cell.length > 1) { + builder.beginPositionEntry(); + } + // TODO: sort and dedup + for (var v : cell) { + builder.appendLong(v); + } + if (cell.length > 1) { + builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + @Override + public void close() { + Releasables.close(cells, super::close); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java index 77120c757e97a..e5d4e58d9d61b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java @@ -37,7 +37,6 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.compute.operator.OutputOperator; -import org.elasticsearch.compute.operator.ProjectOperator; import org.elasticsearch.core.AbstractRefCounted; import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.Releasables; @@ -318,22 +317,10 @@ private void doLookup( 0 ) ); - - // drop docs block - intermediateOperators.add(droppingBlockOperator(extractFields.size() + 2, 0)); - boolean singleLeaf = searchContext.searcher().getLeafContexts().size() == 1; - // merging field-values by position - final int[] mergingChannels = IntStream.range(0, extractFields.size()).map(i -> i + 1).toArray(); + final int[] mergingChannels = IntStream.range(0, extractFields.size()).map(i -> i + 2).toArray(); intermediateOperators.add( - new MergePositionsOperator( - singleLeaf, - inputPage.getPositionCount(), - 0, - mergingChannels, - mergingTypes, - driverContext.blockFactory() - ) + new MergePositionsOperator(inputPage.getPositionCount(), 1, mergingChannels, mergingTypes, driverContext.blockFactory()) ); AtomicReference result = new AtomicReference<>(); OutputOperator outputOperator = new OutputOperator(List.of(), Function.identity(), result::set); @@ -392,17 +379,6 @@ private Page createNullResponse(int positionCount, List extract } } - private static Operator droppingBlockOperator(int totalBlocks, int droppingPosition) { - var size = totalBlocks - 1; - var projection = new ArrayList(size); - for (int i = 0; i < totalBlocks; i++) { - if (i != droppingPosition) { - projection.add(i); - } - } - return new ProjectOperator(projection); - } - private class TransportHandler implements TransportRequestHandler { @Override public void messageReceived(LookupRequest request, TransportChannel channel, Task task) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilder.java new file mode 100644 index 0000000000000..5bb42f3090695 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilder.java @@ -0,0 +1,80 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.enrich; + +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.Releasable; + +/** + * An abstract class responsible for collecting values for an output block of enrich. + * The incoming values of the same position are combined and added to a single corresponding position. + */ +abstract class EnrichResultBuilder implements Releasable { + protected final BlockFactory blockFactory; + protected final int channel; + protected final int totalPositions; + private long usedBytes; + + EnrichResultBuilder(BlockFactory blockFactory, int channel, int totalPositions) { + this.blockFactory = blockFactory; + this.channel = channel; + this.totalPositions = totalPositions; + } + + /** + * Collects the input values from the input page. + * + * @param positions the positions vector + * @param page the input page. The block located at {@code channel} is the value block + */ + abstract void addInputPage(IntVector positions, Page page); + + abstract Block build(); + + final void adjustBreaker(long bytes) { + blockFactory.breaker().addEstimateBytesAndMaybeBreak(bytes, "<>"); + usedBytes += bytes; + } + + @Override + public void close() { + blockFactory.breaker().addWithoutBreaking(-usedBytes); + } + + static EnrichResultBuilder enrichResultBuilder(ElementType elementType, BlockFactory blockFactory, int channel, int totalPositions) { + return switch (elementType) { + case NULL -> new EnrichResultBuilderForNull(blockFactory, channel, totalPositions); + case INT -> new EnrichResultBuilderForInt(blockFactory, channel, totalPositions); + case LONG -> new EnrichResultBuilderForLong(blockFactory, channel, totalPositions); + case DOUBLE -> new EnrichResultBuilderForDouble(blockFactory, channel, totalPositions); + case BOOLEAN -> new EnrichResultBuilderForBoolean(blockFactory, channel, totalPositions); + case BYTES_REF -> new EnrichResultBuilderForBytesRef(blockFactory, channel, totalPositions); + default -> throw new IllegalArgumentException("no enrich result builder for [" + elementType + "]"); + }; + } + + private static class EnrichResultBuilderForNull extends EnrichResultBuilder { + EnrichResultBuilderForNull(BlockFactory blockFactory, int channel, int totalPositions) { + super(blockFactory, channel, totalPositions); + } + + @Override + void addInputPage(IntVector positions, Page page) { + assert page.getBlock(channel).areAllValuesNull() : "expected all nulls; but got values"; + } + + @Override + Block build() { + return blockFactory.newConstantNullBlock(totalPositions); + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperator.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperator.java index 89447807db5b9..a3b7a8be61e2c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperator.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperator.java @@ -11,12 +11,13 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.Operator; -import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import java.util.Arrays; +import java.util.Objects; /** * Combines values at the given blocks with the same positions into a single position for the blocks at the given channels @@ -44,21 +45,13 @@ */ final class MergePositionsOperator implements Operator { private boolean finished = false; - private int filledPositions = 0; - private final boolean singleMode; - private final int positionCount; private final int positionChannel; - private final Block.Builder[] outputBuilders; - private final int[] mergingChannels; - private final ElementType[] mergingTypes; - private PositionBuilder positionBuilder = null; + private final EnrichResultBuilder[] builders; private Page outputPage; - private final BlockFactory blockFactory; MergePositionsOperator( - boolean singleMode, int positionCount, int positionChannel, int[] mergingChannels, @@ -73,123 +66,51 @@ final class MergePositionsOperator implements Operator { + Arrays.toString(mergingTypes) ); } - this.blockFactory = blockFactory; - this.singleMode = singleMode; - this.positionCount = positionCount; this.positionChannel = positionChannel; - this.mergingChannels = mergingChannels; - this.mergingTypes = mergingTypes; - this.outputBuilders = new Block.Builder[mergingTypes.length]; + this.builders = new EnrichResultBuilder[mergingTypes.length]; try { for (int i = 0; i < mergingTypes.length; i++) { - outputBuilders[i] = mergingTypes[i].newBlockBuilder(positionCount, blockFactory); + builders[i] = EnrichResultBuilder.enrichResultBuilder(mergingTypes[i], blockFactory, mergingChannels[i], positionCount); } } finally { - if (outputBuilders[outputBuilders.length - 1] == null) { - Releasables.close(outputBuilders); + if (builders[builders.length - 1] == null) { + Releasables.close(builders); } } } @Override public boolean needsInput() { - return true; + return finished == false; } @Override public void addInput(Page page) { try { final IntBlock positions = page.getBlock(positionChannel); - final int currentPosition = positions.getInt(0); - if (singleMode) { - fillNullUpToPosition(currentPosition); - for (int i = 0; i < mergingChannels.length; i++) { - int channel = mergingChannels[i]; - outputBuilders[i].appendAllValuesToCurrentPosition(page.getBlock(channel)); - } - filledPositions++; - } else { - if (positionBuilder != null && positionBuilder.position != currentPosition) { - flushPositionBuilder(); - } - if (positionBuilder == null) { - positionBuilder = new PositionBuilder(currentPosition, mergingTypes, blockFactory); - } - positionBuilder.combine(page, mergingChannels); + final IntVector positionsVector = Objects.requireNonNull(positions.asVector(), "positions must be a vector"); + for (EnrichResultBuilder builder : builders) { + builder.addInputPage(positionsVector, page); } } finally { Releasables.closeExpectNoException(page::releaseBlocks); } } - static final class PositionBuilder implements Releasable { - private final int position; - private final Block.Builder[] builders; - - PositionBuilder(int position, ElementType[] elementTypes, BlockFactory blockFactory) { - this.position = position; - this.builders = new Block.Builder[elementTypes.length]; - try { - for (int i = 0; i < builders.length; i++) { - builders[i] = elementTypes[i].newBlockBuilder(1, blockFactory); - } - } finally { - if (builders[builders.length - 1] == null) { - Releasables.close(builders); - } - } - } - - void combine(Page page, int[] channels) { - for (int i = 0; i < channels.length; i++) { - Block block = page.getBlock(channels[i]); - builders[i].appendAllValuesToCurrentPosition(block); - } - } - - void buildTo(Block.Builder[] output) { - for (int i = 0; i < output.length; i++) { - try (var b = builders[i]; Block block = b.build()) { - output[i].appendAllValuesToCurrentPosition(block); - } + @Override + public void finish() { + final Block[] blocks = new Block[builders.length]; + try { + for (int i = 0; i < builders.length; i++) { + blocks[i] = builders[i].build(); } - } - - @Override - public void close() { - Releasables.close(builders); - } - } - - private void flushPositionBuilder() { - fillNullUpToPosition(positionBuilder.position); - filledPositions++; - try (var p = positionBuilder) { - p.buildTo(outputBuilders); + outputPage = new Page(blocks); } finally { - positionBuilder = null; - } - } - - private void fillNullUpToPosition(int position) { - while (filledPositions < position) { - for (Block.Builder builder : outputBuilders) { - builder.appendNull(); + finished = true; + if (outputPage == null) { + Releasables.close(blocks); } - filledPositions++; - } - } - - @Override - public void finish() { - if (positionBuilder != null) { - flushPositionBuilder(); } - fillNullUpToPosition(positionCount); - final Block[] blocks = Block.Builder.buildAll(outputBuilders); - outputPage = new Page(blocks); - assert outputPage.getPositionCount() == positionCount; - finished = true; } @Override @@ -206,7 +127,7 @@ public Page getOutput() { @Override public void close() { - Releasables.close(Releasables.wrap(outputBuilders), positionBuilder, () -> { + Releasables.close(Releasables.wrap(builders), () -> { if (outputPage != null) { outputPage.releaseBlocks(); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/X-EnrichResultBuilder.java.st b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/X-EnrichResultBuilder.java.st new file mode 100644 index 0000000000000..4c5c9fabfa797 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/X-EnrichResultBuilder.java.st @@ -0,0 +1,134 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.enrich; + +$if(BytesRef)$ +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.util.BytesRefArray; +$else$ +import org.apache.lucene.util.RamUsageEstimator; +$endif$ +import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +$if(long)$ +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.$Type$Block; +$else$ +import org.elasticsearch.compute.data.$Type$Block; +import org.elasticsearch.compute.data.IntVector; +$endif$ +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.Releasables; + +import java.util.Arrays; + +/** + * {@link EnrichResultBuilder} for $Type$s. + * This class is generated. Edit `X-EnrichResultBuilder.java.st` instead. + */ +final class EnrichResultBuilderFor$Type$ extends EnrichResultBuilder { +$if(BytesRef)$ + private final BytesRefArray bytes; // shared between all cells +$endif$ + private ObjectArray<$if(BytesRef)$int$else$$type$$endif$[]> cells; + + EnrichResultBuilderFor$Type$(BlockFactory blockFactory, int channel, int totalPositions) { + super(blockFactory, channel, totalPositions); + this.cells = blockFactory.bigArrays().newObjectArray(totalPositions); +$if(BytesRef)$ + BytesRefArray bytes = null; + try { + bytes = new BytesRefArray(totalPositions * 3L, blockFactory.bigArrays()); + this.bytes = bytes; + } finally { + if (bytes == null) { + this.cells.close(); + } + } +$endif$ + } + + @Override + void addInputPage(IntVector positions, Page page) { + $Type$Block block = page.getBlock(channel); +$if(BytesRef)$ + BytesRef scratch = new BytesRef(); +$endif$ + for (int i = 0; i < positions.getPositionCount(); i++) { + int valueCount = block.getValueCount(i); + if (valueCount == 0) { + continue; + } + int cellPosition = positions.getInt(i); + final var oldCell = cells.get(cellPosition); + final var newCell = extendCell(oldCell, valueCount); + cells.set(cellPosition, newCell); + int dstIndex = oldCell != null ? oldCell.length : 0; + adjustBreaker(RamUsageEstimator.sizeOf(newCell) - (oldCell != null ? RamUsageEstimator.sizeOf(oldCell) : 0)); + int firstValueIndex = block.getFirstValueIndex(i); +$if(BytesRef)$ + int bytesOrd = Math.toIntExact(bytes.size()); + for (int v = 0; v < valueCount; v++) { + scratch = block.getBytesRef(firstValueIndex + v, scratch); + bytes.append(scratch); + newCell[dstIndex + v] = bytesOrd + v; + } +$else$ + for (int v = 0; v < valueCount; v++) { + newCell[dstIndex + v] = block.get$Type$(firstValueIndex + v); + } +$endif$ + } + } + + private $if(BytesRef)$int$else$$type$$endif$[] extendCell($if(BytesRef)$int$else$$type$$endif$[] oldCell, int newValueCount) { + if (oldCell == null) { + return new $if(BytesRef)$int$else$$type$$endif$[newValueCount]; + } else { + return Arrays.copyOf(oldCell, oldCell.length + newValueCount); + } + } + + @Override + Block build() { + try ($Type$Block.Builder builder = blockFactory.new$Type$BlockBuilder(totalPositions)) { +$if(BytesRef)$ + BytesRef scratch = new BytesRef(); +$endif$ + for (int i = 0; i < totalPositions; i++) { + final var cell = cells.get(i); + if (cell == null) { + builder.appendNull(); + continue; + } + if (cell.length > 1) { + builder.beginPositionEntry(); + } + // TODO: sort and dedup + for (var v : cell) { +$if(BytesRef)$ + builder.appendBytesRef(bytes.get(v, scratch)); +$else$ + builder.append$Type$(v); +$endif$ + } + if (cell.length > 1) { + builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + @Override + public void close() { + Releasables.close($if(BytesRef)$bytes, $endif$cells, super::close); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderTests.java new file mode 100644 index 0000000000000..f6e8b9107504c --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderTests.java @@ -0,0 +1,148 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.enrich; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.common.util.PageCacheRecycler; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.test.ESTestCase; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; + +public class EnrichResultBuilderTests extends ESTestCase { + + public void testBytesRef() { + BlockFactory blockFactory = blockFactory(); + Map> expectedValues = new HashMap<>(); + int numPages = between(0, 10); + int maxPosition = between(0, 100); + var resultBuilder = EnrichResultBuilder.enrichResultBuilder(ElementType.BYTES_REF, blockFactory, 0, maxPosition + 1); + for (int i = 0; i < numPages; i++) { + int numRows = between(1, 100); + try ( + var positionsBuilder = blockFactory.newIntVectorBuilder(numRows); + var valuesBuilder = blockFactory.newBytesRefBlockBuilder(numRows) + ) { + for (int r = 0; r < numRows; r++) { + int position = between(0, maxPosition); + positionsBuilder.appendInt(position); + int numValues = between(0, 3); + if (numValues == 0) { + valuesBuilder.appendNull(); + } + if (numValues > 1) { + valuesBuilder.beginPositionEntry(); + } + for (int v = 0; v < numValues; v++) { + BytesRef val = new BytesRef(randomByteArrayOfLength(10)); + expectedValues.computeIfAbsent(position, k -> new ArrayList<>()).add(val); + valuesBuilder.appendBytesRef(val); + } + if (numValues > 1) { + valuesBuilder.endPositionEntry(); + } + } + try (var positions = positionsBuilder.build(); var valuesBlock = valuesBuilder.build()) { + resultBuilder.addInputPage(positions, new Page(valuesBlock)); + } + } + } + try (BytesRefBlock actualOutput = (BytesRefBlock) resultBuilder.build()) { + assertThat(actualOutput.getPositionCount(), equalTo(maxPosition + 1)); + for (int i = 0; i < actualOutput.getPositionCount(); i++) { + List values = expectedValues.get(i); + if (actualOutput.isNull(i)) { + assertNull(values); + } else { + int valueCount = actualOutput.getValueCount(i); + int first = actualOutput.getFirstValueIndex(i); + assertThat(valueCount, equalTo(values.size())); + for (int v = 0; v < valueCount; v++) { + assertThat(actualOutput.getBytesRef(first + v, new BytesRef()), equalTo(values.get(v))); + } + } + } + } + resultBuilder.close(); + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } + + public void testLong() { + BlockFactory blockFactory = blockFactory(); + Map> expectedValues = new HashMap<>(); + int numPages = between(0, 10); + int maxPosition = between(0, 100); + var resultBuilder = EnrichResultBuilder.enrichResultBuilder(ElementType.LONG, blockFactory, 0, maxPosition + 1); + for (int i = 0; i < numPages; i++) { + int numRows = between(1, 100); + try ( + var positionsBuilder = blockFactory.newIntVectorBuilder(numRows); + var valuesBuilder = blockFactory.newLongBlockBuilder(numRows) + ) { + for (int r = 0; r < numRows; r++) { + int position = between(0, maxPosition); + positionsBuilder.appendInt(position); + int numValues = between(0, 3); + if (numValues == 0) { + valuesBuilder.appendNull(); + } + if (numValues > 1) { + valuesBuilder.beginPositionEntry(); + } + for (int v = 0; v < numValues; v++) { + long val = randomLong(); + expectedValues.computeIfAbsent(position, k -> new ArrayList<>()).add(val); + valuesBuilder.appendLong(val); + } + if (numValues > 1) { + valuesBuilder.endPositionEntry(); + } + } + try (var positions = positionsBuilder.build(); var valuesBlock = valuesBuilder.build()) { + resultBuilder.addInputPage(positions, new Page(valuesBlock)); + } + } + } + try (LongBlock actualOutput = (LongBlock) resultBuilder.build()) { + assertThat(actualOutput.getPositionCount(), equalTo(maxPosition + 1)); + for (int i = 0; i < actualOutput.getPositionCount(); i++) { + List values = expectedValues.get(i); + if (actualOutput.isNull(i)) { + assertNull(values); + } else { + int valueCount = actualOutput.getValueCount(i); + int first = actualOutput.getFirstValueIndex(i); + assertThat(valueCount, equalTo(values.size())); + for (int v = 0; v < valueCount; v++) { + assertThat(actualOutput.getLong(first + v), equalTo(values.get(v))); + } + } + } + } + resultBuilder.close(); + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } + + BlockFactory blockFactory() { + var bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofMb(100)).withCircuitBreaking(); + CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); + return new BlockFactory(breaker, bigArrays); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperatorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperatorTests.java index 80d127fc81907..09bc36a5390af 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperatorTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperatorTests.java @@ -32,7 +32,6 @@ public void testSimple() throws Exception { CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); BlockFactory blockFactory = new BlockFactory(breaker, bigArrays); MergePositionsOperator mergeOperator = new MergePositionsOperator( - randomBoolean(), 7, 0, new int[] { 1, 2 }, From 75228dfd4517a28ef7b72461117f440e440ff37c Mon Sep 17 00:00:00 2001 From: Parker Timmins Date: Tue, 9 Apr 2024 11:11:49 -0600 Subject: [PATCH 203/264] Add granular error list to alias action response (#106514) When an alias action list is posted with must_exist==false, and succeeds only partially, a list of results for each action are now returned. The results contain information about the requested action, indices, and aliases. If must_exist==true, or all actions fail, the call will return a 400 status along with the associated exception. --- docs/changelog/106514.yaml | 6 + docs/reference/alias.asciidoc | 71 +++++ docs/reference/indices/aliases.asciidoc | 58 ++++- .../data_stream/140_data_stream_aliases.yml | 83 ++++++ .../indices.update_aliases/40_must_exist.yml | 97 +++++++ .../org/elasticsearch/TransportVersions.java | 1 + ...dicesAliasesClusterStateUpdateRequest.java | 10 +- .../indices/alias/IndicesAliasesRequest.java | 5 +- .../alias/IndicesAliasesRequestBuilder.java | 3 +- .../indices/alias/IndicesAliasesResponse.java | 245 ++++++++++++++++++ .../alias/TransportIndicesAliasesAction.java | 32 ++- .../client/internal/IndicesAdminClient.java | 5 +- .../internal/support/AbstractClient.java | 5 +- .../cluster/metadata/AliasAction.java | 4 +- .../metadata/MetadataIndexAliasesService.java | 15 +- .../alias/IndicesAliasesResponseTests.java | 108 ++++++++ .../MetadataIndexAliasesServiceTests.java | 16 +- .../core/ml/annotations/AnnotationIndex.java | 6 +- .../xpack/core/ml/utils/MlIndexAndAlias.java | 3 +- .../core/ilm/ShrinkSetAliasStepTests.java | 8 +- .../core/ml/utils/MlIndexAndAliasTests.java | 11 +- .../search/SearchApplicationIndexService.java | 11 +- .../xpack/ml/MlInitializationService.java | 5 +- .../ml/job/persistence/JobDataDeleter.java | 9 +- .../job/persistence/JobResultsProvider.java | 3 +- .../TransformClusterStateListener.java | 4 +- 26 files changed, 766 insertions(+), 58 deletions(-) create mode 100644 docs/changelog/106514.yaml create mode 100644 server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesResponse.java create mode 100644 server/src/test/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesResponseTests.java diff --git a/docs/changelog/106514.yaml b/docs/changelog/106514.yaml new file mode 100644 index 0000000000000..5b25f40db2742 --- /dev/null +++ b/docs/changelog/106514.yaml @@ -0,0 +1,6 @@ +pr: 106514 +summary: Add granular error list to alias action response +area: Indices APIs +type: feature +issues: + - 94478 diff --git a/docs/reference/alias.asciidoc b/docs/reference/alias.asciidoc index 6ddd3602e1467..5b30501ed7c9d 100644 --- a/docs/reference/alias.asciidoc +++ b/docs/reference/alias.asciidoc @@ -121,6 +121,77 @@ POST _aliases // TEST[s/^/PUT _data_stream\/logs-nginx.access-prod\nPUT _data_stream\/logs-my_app-default\n/] // end::alias-multiple-actions-example[] +[discrete] +[[multiple-action-results]] +=== Multiple action results + +When using multiple actions, if some succeed and some fail, a list of per-action results will be returned. + +Consider a similar action list to the previous example, but now with an alias `log-non-existing`, which does not yet exist. +In this case, the `remove` action will fail, but the `add` action will succeed. +The response will contain the list `action_results`, with a result for every requested action. + +[source,console] +---- +POST _aliases +{ + "actions": [ + { + "remove": { + "index": "index1", + "alias": "logs-non-existing" + } + }, + { + "add": { + "index": "index2", + "alias": "logs-non-existing" + } + } + ] +} +---- +// TEST[s/^/PUT \/index1\nPUT \/index2\n/] + +The API returns the following result: + +[source,console-result] +-------------------------------------------------- +{ + "acknowledged": true, + "errors": true, + "action_results": [ + { + "action": { + "type": "remove", + "indices": [ "index1" ], + "aliases": [ "logs-non-existing" ], + }, + "status": 404, + "error": { + "type": "aliases_not_found_exception", + "reason": "aliases [logs-non-existing] missing", + "resource.type": "aliases", + "resource.id": "logs-non-existing" + } + }, + { + "action": { + "type": "add", + "indices": [ "index2" ], + "aliases": [ "logs-non-existing" ], + }, + "status": 200 + } + ] +} +-------------------------------------------------- + +Allowing the action list to succeed partially may not provide the desired result. +It may be more appropriate to set `must_exist` to `true`, which will cause the entire action +list to fail if a single action fails. + + [discrete] [[add-alias-at-creation]] === Add an alias at index creation diff --git a/docs/reference/indices/aliases.asciidoc b/docs/reference/indices/aliases.asciidoc index 76698501fd416..34248cc5f98d3 100644 --- a/docs/reference/indices/aliases.asciidoc +++ b/docs/reference/indices/aliases.asciidoc @@ -145,10 +145,16 @@ the alias points to one data stream. + Only the `add` action supports this parameter. +// tag::alias-options[] `must_exist`:: (Optional, Boolean) -If `true`, the alias must exist to perform the action. Defaults to `false`. Only -the `remove` action supports this parameter. +Affects the behavior when attempting to remove an alias which does not exist. +If `true`, removing an alias which does not exist will cause all actions to fail. +If `false`, removing an alias which does not exist will only cause that removal to fail. +Defaults to `false`. +// end::alias-options[] ++ +Only the `remove` action supports this parameter. // tag::alias-options[] `routing`:: @@ -168,3 +174,51 @@ stream aliases don't support this parameter. Only the `add` action supports this parameter. ===== ==== + + + +[role="child_attributes"] +[[indices-aliases-api-response-body]] +==== {api-response-body-title} + +`acknowledged`:: +(Boolean) +If `true`, the request received a response from the master node within the +`timeout` period. + +`errors`:: +(Boolean) +If `true`, at least one of the requested actions failed. + +`action_results`:: +(Optional, array of objects) Results for each requested action. ++ +.Properties of `action_results` objects +[%collapsible%open] +==== + +`action`:: +(object) +Description of the associated action request. ++ +.Properties of `action` object +[%collapsible%open] +===== +`type`:: +(string) The type of the associated action, one of `add`, `remove`, or `remove_index`. + +`indices`:: +(array of strings) List of indices in the associated action. + +`aliases`:: +(array of strings) List of aliases in the associated action. +===== + +`status`:: +(integer) HTTP status code returned for the action. + +`error`:: +(Optional, object) Contains additional information about the failed action. ++ +Only present if the action failed. +==== diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/140_data_stream_aliases.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/140_data_stream_aliases.yml index 70c563d1d4510..1050d6e01a95f 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/140_data_stream_aliases.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/140_data_stream_aliases.yml @@ -307,3 +307,86 @@ indices.get_alias: name: this-does-not-exist* - is_false: ds-first.aliases.my-alias +--- +"Action Results with multiple matching aliases": + - skip: + version: " - 8.13.99" + reason: "alias action results do not work until 8.14" + features: allowed_warnings + - do: + allowed_warnings: + - "index template [my-template] has index patterns [log-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-template] will take precedence during new index creation" + indices.put_index_template: + name: my-template + body: + index_patterns: [ log-* ] + template: + settings: + index.number_of_replicas: 0 + data_stream: { } + - do: + indices.create_data_stream: + name: log-foobar + - is_true: acknowledged + - do: + indices.update_aliases: + body: + actions: + - add: + index: log-foobar + aliases: test_alias1 + - remove: + index: log-foobar + aliases: test_non_existing + must_exist: false + - is_true: errors + - length: { action_results: 2 } + - match: { action_results.0.status: 200 } + - match: { action_results.0.action: { 'type': 'add', 'indices': ['log-foobar'], 'aliases': ['test_alias1'] } } + - match: { action_results.0.error: null } + - match: { action_results.1.status: 404 } + - match: { action_results.1.action: { 'type': 'remove', 'indices': ['log-foobar'], 'aliases': ['test_non_existing'] } } + - match: { action_results.1.error.type: aliases_not_found_exception } +--- +"Single action result per action": + - skip: + version: " - 8.13.99" + reason: "alias action results do not work until 8.14" + features: allowed_warnings + - do: + allowed_warnings: + - "index template [my-template] has index patterns [log-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-template] will take precedence during new index creation" + indices.put_index_template: + name: my-template + body: + index_patterns: [ log-* ] + template: + settings: + index.number_of_replicas: 0 + data_stream: { } + - do: + indices.create_data_stream: + name: log-test-1 + - do: + indices.create_data_stream: + name: log-test-2 + - is_true: acknowledged + - do: + indices.update_aliases: + body: + actions: + - add: + index: log-test-* + aliases: test_alias1 + - remove: + index: log-test-* + aliases: test_non_existing + must_exist: false + - is_true: errors + - length: { action_results: 2 } + - match: { action_results.0.status: 200} + - match: { action_results.0.action: { 'type': 'add', 'indices': ['log-test-1', 'log-test-2'], 'aliases': ['test_alias1'] } } + - match: { action_results.0.error: null } + - match: { action_results.1.status: 404 } + - match: { action_results.1.action: { 'type': 'remove', 'indices': ['log-test-1', 'log-test-2'], 'aliases': ['test_non_existing'] } } + - match: { action_results.1.error.type: aliases_not_found_exception } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.update_aliases/40_must_exist.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.update_aliases/40_must_exist.yml index dbe167608e576..fa3c740612872 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.update_aliases/40_must_exist.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.update_aliases/40_must_exist.yml @@ -82,3 +82,100 @@ - remove_index: index: test_index must_exist: true +--- +"Partial success with must_exist == false": + - skip: + version: " - 8.13.99" + reason: "alias action results do not work until 8.14" + - do: + indices.create: + index: test_index + - do: + indices.update_aliases: + body: + actions: + - add: + index: test_index + aliases: test_alias1 + - remove: + index: test_index + aliases: test_non_existing + must_exist: false + - is_true: errors + - match: { action_results.0.status: 200 } + - match: { action_results.0.action: { 'type': 'add', 'indices': ['test_index'], 'aliases': ['test_alias1'] } } + - match: { action_results.0.error: null } + - match: { action_results.1.status: 404 } + - match: { action_results.1.action: { 'type': 'remove', 'indices': ['test_index'], 'aliases': ['test_non_existing'] } } + - match: { action_results.1.error.type: aliases_not_found_exception } +--- +"Partial success with must_exist == null (default)": + - skip: + version: " - 8.13.99" + reason: "alias action results do not work until 8.14" + - do: + indices.create: + index: test_index + - do: + indices.update_aliases: + body: + actions: + - add: + index: test_index + aliases: test_alias1 + - remove: + index: test_index + aliases: test_non_existing + - is_true: errors + - match: { action_results.0.status: 200} + - match: { action_results.0.action: { 'type': 'add', 'indices': ['test_index'], 'aliases': ['test_alias1'] } } + - match: { action_results.0.error: null } + - match: { action_results.1.status: 404} + - match: { action_results.1.action: { 'type': 'remove', 'indices': ['test_index'], 'aliases': ['test_non_existing'] } } + - match: { action_results.1.error.type: aliases_not_found_exception } +--- +"No action_results field if all actions successful": + - skip: + version: " - 8.13.99" + reason: "alias action results do not work until 8.14" + - do: + indices.create: + index: test_index + - do: + indices.update_aliases: + body: + actions: + - add: + index: test_index + aliases: test_alias1 + - is_false: errors + - match: { action_results: null } +--- +"Single result per input action": + - skip: + version: " - 8.13.99" + reason: "alias action results do not work until 8.14" + - do: + indices.create: + index: test_index1 + - do: + indices.create: + index: test_index2 + - do: + indices.update_aliases: + body: + actions: + - add: + index: test_index* + aliases: test_alias1 + - remove: + index: test_index* + aliases: test_non_existing + - length: { action_results: 2 } + - is_true: errors + - match: { action_results.0.status: 200} + - match: { action_results.0.action: { 'type': 'add', 'indices': ['test_index1', 'test_index2'], 'aliases': ['test_alias1'] } } + - match: { action_results.0.error: null } + - match: { action_results.1.status: 404} + - match: { action_results.1.action: { 'type': 'remove', 'indices': ['test_index1', 'test_index2'], 'aliases': ['test_non_existing'] } } + - match: { action_results.1.error.type: aliases_not_found_exception } diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 4a1bf691ea1b0..e05487c9c88fe 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -164,6 +164,7 @@ static TransportVersion def(int id) { public static final TransportVersion ESQL_ORDINAL_BLOCK = def(8_623_00_0); public static final TransportVersion ML_INFERENCE_COHERE_RERANK = def(8_624_00_0); public static final TransportVersion INDEXING_PRESSURE_DOCUMENT_REJECTIONS_COUNT = def(8_625_00_0); + public static final TransportVersion ALIAS_ACTION_RESULTS = def(8_626_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesClusterStateUpdateRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesClusterStateUpdateRequest.java index b52098a49c002..1f87cf618dfcf 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesClusterStateUpdateRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesClusterStateUpdateRequest.java @@ -7,6 +7,7 @@ */ package org.elasticsearch.action.admin.indices.alias; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse.AliasActionResult; import org.elasticsearch.cluster.ack.ClusterStateUpdateRequest; import org.elasticsearch.cluster.metadata.AliasAction; @@ -18,8 +19,11 @@ public class IndicesAliasesClusterStateUpdateRequest extends ClusterStateUpdateRequest { private final List actions; - public IndicesAliasesClusterStateUpdateRequest(List actions) { + private final List actionResults; + + public IndicesAliasesClusterStateUpdateRequest(List actions, List actionResults) { this.actions = actions; + this.actionResults = actionResults; } /** @@ -28,4 +32,8 @@ public IndicesAliasesClusterStateUpdateRequest(List actions) { public List actions() { return actions; } + + public List getActionResults() { + return actionResults; + } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java index a4f5ee9eb672b..fac2006b68814 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java @@ -83,7 +83,6 @@ public static class AliasActions implements AliasesRequest, Writeable, ToXConten private static final ParseField IS_WRITE_INDEX = new ParseField("is_write_index"); private static final ParseField IS_HIDDEN = new ParseField("is_hidden"); private static final ParseField MUST_EXIST = new ParseField("must_exist"); - private static final ParseField ADD = new ParseField("add"); private static final ParseField REMOVE = new ParseField("remove"); private static final ParseField REMOVE_INDEX = new ParseField("remove_index"); @@ -105,6 +104,10 @@ public byte value() { return value; } + public String getFieldName() { + return fieldName; + } + public static Type fromValue(byte value) { return switch (value) { case 0 -> ADD; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequestBuilder.java index 4e49a5fe8d400..1462e36ea7895 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequestBuilder.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; import org.elasticsearch.action.support.master.AcknowledgedRequestBuilder; -import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.ElasticsearchClient; import org.elasticsearch.index.query.QueryBuilder; @@ -21,7 +20,7 @@ */ public class IndicesAliasesRequestBuilder extends AcknowledgedRequestBuilder< IndicesAliasesRequest, - AcknowledgedResponse, + IndicesAliasesResponse, IndicesAliasesRequestBuilder> { public IndicesAliasesRequestBuilder(ElasticsearchClient client) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesResponse.java new file mode 100644 index 0000000000000..b4f483e6f8161 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesResponse.java @@ -0,0 +1,245 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.admin.indices.alias; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.rest.action.admin.indices.AliasesNotFoundException; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; +import java.util.stream.Collectors; + +/** + * Response with error information for a request to add/remove aliases for one or more indices. + * Contains an acknowledged boolean, an errors boolean, and a list of results. + * The result list is only present if there are errors, and contains a result for every input action. + * This response replaces AcknowledgedResponse, and knows how to de/serialize from/to AcknowledgedResponse + * in case of mixed version clusters. + */ +public class IndicesAliasesResponse extends AcknowledgedResponse { + + // Response without any error information, analogous to AcknowledgedResponse.FALSE + public static final IndicesAliasesResponse NOT_ACKNOWLEDGED = new IndicesAliasesResponse(false, false, List.of()); + + // Response without any error information, analogous to AcknowledgedResponse.TRUE + public static final IndicesAliasesResponse ACKNOWLEDGED_NO_ERRORS = new IndicesAliasesResponse(true, false, List.of()); + + private static final String ACTION_RESULTS_FIELD = "action_results"; + private static final String ERRORS_FIELD = "errors"; + + private final List actionResults; + private final boolean errors; + + protected IndicesAliasesResponse(StreamInput in) throws IOException { + super(in); + + if (in.getTransportVersion().onOrAfter(TransportVersions.ALIAS_ACTION_RESULTS)) { + this.errors = in.readBoolean(); + this.actionResults = in.readCollectionAsImmutableList(AliasActionResult::new); + } else { + this.errors = false; + this.actionResults = List.of(); + } + } + + /** + * @param acknowledged whether the update was acknowledged by all the relevant nodes in the cluster + * @param errors true if any of the requested actions failed + * @param actionResults the list of results for each input action, only present if there are errors + */ + IndicesAliasesResponse(boolean acknowledged, boolean errors, final List actionResults) { + super(acknowledged); + this.errors = errors; + this.actionResults = actionResults; + } + + public List getActionResults() { + return actionResults; + } + + public boolean hasErrors() { + return errors; + } + + /** + * Build a response from a list of action results. Sets the errors boolean based + * on whether an of the individual results contain an error. + * @param actionResults an action result for each of the requested alias actions + * @return response containing all action results + */ + public static IndicesAliasesResponse build(final List actionResults) { + assert actionResults.isEmpty() == false : "IndicesAliasesResponse must be instantiated with at least one action result."; + final boolean errors = actionResults.stream().anyMatch(a -> a.error != null); + return new IndicesAliasesResponse(true, errors, actionResults); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + if (out.getTransportVersion().onOrAfter(TransportVersions.ALIAS_ACTION_RESULTS)) { + out.writeBoolean(errors); + out.writeCollection(actionResults); + } + } + + @Override + protected void addCustomFields(XContentBuilder builder, Params params) throws IOException { + builder.field(ERRORS_FIELD, errors); + // if there are no errors, don't provide granular list of results + if (errors) { + builder.field(ACTION_RESULTS_FIELD, actionResults); + } + } + + @Override + // Only used equals in tests + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + if (super.equals(o) == false) return false; + IndicesAliasesResponse response = (IndicesAliasesResponse) o; + return errors == response.errors && Objects.equals(actionResults, response.actionResults); + } + + @Override + // Only used hashCode in tests + public int hashCode() { + return Objects.hash(super.hashCode(), actionResults, errors); + } + + /** + * Result for a single alias add/remove action + */ + public static class AliasActionResult implements Writeable, ToXContentObject { + + /** + * Resolved indices to which the action applies. This duplicates information + * which exists in the action, but is included because the action indices may + * or may not be resolved depending on if the security layer is used or not. + */ + private final List indices; + private final AliasActions action; + private final ElasticsearchException error; + + /** + * Build result that could be either a success or failure + * @param indices the resolved indices to which the associated action applies + * @param action the alias action consisting of add/remove, aliases, and indices + * @param numAliasesRemoved the number of aliases remove, if any + * @return the action result + */ + public static AliasActionResult build(List indices, AliasActions action, int numAliasesRemoved) { + if (action.actionType() == AliasActions.Type.REMOVE && numAliasesRemoved == 0) { + return buildRemoveError(indices, action); + } + return buildSuccess(indices, action); + } + + /** + * Build an error result for a failed remove action. + */ + private static AliasActionResult buildRemoveError(List indices, AliasActions action) { + return new AliasActionResult(indices, action, new AliasesNotFoundException((action.getOriginalAliases()))); + } + + /** + * Build a success action result with no errors. + */ + public static AliasActionResult buildSuccess(List indices, AliasActions action) { + return new AliasActionResult(indices, action, null); + } + + private int getStatus() { + return error == null ? 200 : error.status().getStatus(); + } + + private AliasActionResult(List indices, AliasActions action, ElasticsearchException error) { + assert indices.isEmpty() == false : "Alias action result must be instantiated with at least one index"; + this.indices = indices; + this.action = action; + this.error = error; + } + + private AliasActionResult(StreamInput in) throws IOException { + this.indices = in.readStringCollectionAsList(); + this.action = new AliasActions(in); + this.error = in.readException(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeStringCollection(indices); + action.writeTo(out); + out.writeException(error); + } + + public static final String ACTION_FIELD = "action"; + public static final String ACTION_TYPE_FIELD = "type"; + public static final String ACTION_INDICES_FIELD = "indices"; + public static final String ACTION_ALIASES_FIELD = "aliases"; + public static final String STATUS_FIELD = "status"; + public static final String ERROR_FIELD = "error"; + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + // include subset of fields from action request + builder.field(ACTION_FIELD); + builder.startObject(); + builder.field(ACTION_TYPE_FIELD, action.actionType().getFieldName()); + builder.field(ACTION_INDICES_FIELD, indices.stream().sorted().collect(Collectors.toList())); + builder.array(ACTION_ALIASES_FIELD, action.getOriginalAliases()); + builder.endObject(); + + builder.field(STATUS_FIELD, getStatus()); + + if (error != null) { + builder.startObject(ERROR_FIELD); + error.toXContent(builder, params); + builder.endObject(); + } + builder.endObject(); + return builder; + } + + @Override + // Only used equals in tests + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AliasActionResult that = (AliasActionResult) o; + return Objects.equals(indices, that.indices) && Objects.equals(action, that.action) + // ElasticsearchException does not have hashCode() so assume errors are equal iff class and message are equal + && Objects.equals(error == null ? null : error.getMessage(), that.error == null ? null : that.error.getMessage()) + && Objects.equals(error == null ? null : error.getClass(), that.error == null ? null : that.error.getClass()); + } + + @Override + // Only used hashCode in tests + public int hashCode() { + return Objects.hash( + indices, + action, + // ElasticsearchException does not have hashCode() so assume errors are equal iff class and message are equal + error == null ? null : error.getMessage(), + error == null ? null : error.getClass() + ); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java index e56be8852e7df..2e231b398af72 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java @@ -14,9 +14,9 @@ import org.elasticsearch.action.ActionType; import org.elasticsearch.action.RequestValidators; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse.AliasActionResult; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.action.support.master.AcknowledgedTransportMasterNodeAction; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; @@ -56,10 +56,10 @@ /** * Add/remove aliases action */ -public class TransportIndicesAliasesAction extends AcknowledgedTransportMasterNodeAction { +public class TransportIndicesAliasesAction extends TransportMasterNodeAction { public static final String NAME = "indices:admin/aliases"; - public static final ActionType TYPE = new ActionType<>(NAME); + public static final ActionType TYPE = new ActionType<>(NAME); private static final Logger logger = LogManager.getLogger(TransportIndicesAliasesAction.class); private final MetadataIndexAliasesService indexAliasesService; @@ -85,6 +85,7 @@ public TransportIndicesAliasesAction( actionFilters, IndicesAliasesRequest::new, indexNameExpressionResolver, + IndicesAliasesResponse::new, EsExecutors.DIRECT_EXECUTOR_SERVICE ); this.indexAliasesService = indexAliasesService; @@ -106,15 +107,19 @@ protected void masterOperation( Task task, final IndicesAliasesRequest request, final ClusterState state, - final ActionListener listener + final ActionListener listener ) { // Expand the indices names List actions = request.aliasActions(); List finalActions = new ArrayList<>(); + List actionResults = new ArrayList<>(); // Resolve all the AliasActions into AliasAction instances and gather all the aliases Set aliases = new HashSet<>(); for (AliasActions action : actions) { + int numAliasesRemoved = 0; + List resolvedIndices = new ArrayList<>(); + List concreteDataStreams = indexNameExpressionResolver.dataStreamNames( state, request.indicesOptions(), @@ -161,18 +166,24 @@ protected void masterOperation( finalActions.add(new AddDataStreamAlias(alias, dataStreamName, action.writeIndex(), action.filter())); } } + + actionResults.add(AliasActionResult.buildSuccess(concreteDataStreams, action)); continue; } case REMOVE -> { for (String dataStreamName : concreteDataStreams) { for (String alias : concreteDataStreamAliases(action, state.metadata(), dataStreamName)) { finalActions.add(new AliasAction.RemoveDataStreamAlias(alias, dataStreamName, action.mustExist())); + numAliasesRemoved++; } } + if (nonBackingIndices.isEmpty() == false) { // Regular aliases/indices match as well with the provided expression. // (Only when adding new aliases, matching both data streams and indices is disallowed) + resolvedIndices.addAll(concreteDataStreams); } else { + actionResults.add(AliasActionResult.build(concreteDataStreams, action, numAliasesRemoved)); continue; } } @@ -224,6 +235,7 @@ protected void masterOperation( case REMOVE: for (String alias : concreteAliases(action, state.metadata(), index.getName())) { finalActions.add(new AliasAction.Remove(index.getName(), alias, action.mustExist())); + numAliasesRemoved++; } break; case REMOVE_INDEX: @@ -233,14 +245,18 @@ protected void masterOperation( throw new IllegalArgumentException("Unsupported action [" + action.actionType() + "]"); } } + + Arrays.stream(concreteIndices).map(Index::getName).forEach(resolvedIndices::add); + actionResults.add(AliasActionResult.build(resolvedIndices, action, numAliasesRemoved)); } if (finalActions.isEmpty() && false == actions.isEmpty()) { throw new AliasesNotFoundException(aliases.toArray(new String[aliases.size()])); } request.aliasActions().clear(); - IndicesAliasesClusterStateUpdateRequest updateRequest = new IndicesAliasesClusterStateUpdateRequest(unmodifiableList(finalActions)) - .ackTimeout(request.timeout()) - .masterNodeTimeout(request.masterNodeTimeout()); + IndicesAliasesClusterStateUpdateRequest updateRequest = new IndicesAliasesClusterStateUpdateRequest( + unmodifiableList(finalActions), + unmodifiableList(actionResults) + ).ackTimeout(request.timeout()).masterNodeTimeout(request.masterNodeTimeout()); indexAliasesService.indicesAliases(updateRequest, listener.delegateResponse((l, e) -> { logger.debug("failed to perform aliases", e); diff --git a/server/src/main/java/org/elasticsearch/client/internal/IndicesAdminClient.java b/server/src/main/java/org/elasticsearch/client/internal/IndicesAdminClient.java index 69b897df4d76d..d38f5b0439f84 100644 --- a/server/src/main/java/org/elasticsearch/client/internal/IndicesAdminClient.java +++ b/server/src/main/java/org/elasticsearch/client/internal/IndicesAdminClient.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequestBuilder; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse; @@ -371,7 +372,7 @@ public interface IndicesAdminClient extends ElasticsearchClient { * @param request The index aliases request * @return The result future */ - ActionFuture aliases(IndicesAliasesRequest request); + ActionFuture aliases(IndicesAliasesRequest request); /** * Allows to add/remove aliases from indices. @@ -379,7 +380,7 @@ public interface IndicesAdminClient extends ElasticsearchClient { * @param request The index aliases request * @param listener A listener to be notified with a result */ - void aliases(IndicesAliasesRequest request, ActionListener listener); + void aliases(IndicesAliasesRequest request, ActionListener listener); /** * Allows to add/remove aliases from indices. diff --git a/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java b/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java index 76073696b0b27..26a8768a78e78 100644 --- a/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java +++ b/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java @@ -118,6 +118,7 @@ import org.elasticsearch.action.admin.cluster.storedscripts.TransportPutStoredScriptAction; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.admin.indices.alias.TransportIndicesAliasesAction; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesAction; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; @@ -1083,12 +1084,12 @@ public ThreadPool threadPool() { } @Override - public ActionFuture aliases(final IndicesAliasesRequest request) { + public ActionFuture aliases(final IndicesAliasesRequest request) { return execute(TransportIndicesAliasesAction.TYPE, request); } @Override - public void aliases(final IndicesAliasesRequest request, final ActionListener listener) { + public void aliases(final IndicesAliasesRequest request, final ActionListener listener) { execute(TransportIndicesAliasesAction.TYPE, request, listener); } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/AliasAction.java b/server/src/main/java/org/elasticsearch/cluster/metadata/AliasAction.java index 63647e53619fe..533ae3a3ad50d 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/AliasAction.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/AliasAction.java @@ -8,10 +8,10 @@ package org.elasticsearch.cluster.metadata; -import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.core.Nullable; +import org.elasticsearch.rest.action.admin.indices.AliasesNotFoundException; /** * Individual operation to perform on the cluster state as part of an {@link IndicesAliasesRequest}. @@ -189,7 +189,7 @@ boolean removeIndex() { boolean apply(NewAliasValidator aliasValidator, Metadata.Builder metadata, IndexMetadata index) { if (false == index.getAliases().containsKey(alias)) { if (mustExist != null && mustExist) { - throw new ResourceNotFoundException("required alias [" + alias + "] does not exist"); + throw new AliasesNotFoundException(alias); } return false; } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexAliasesService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexAliasesService.java index fb5acbdd2ac49..d9cd1a7725ca8 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexAliasesService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexAliasesService.java @@ -11,7 +11,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesClusterStateUpdateRequest; -import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateAckListener; import org.elasticsearch.cluster.ClusterStateTaskExecutor; @@ -79,7 +79,10 @@ public Tuple executeTask(ApplyAliasesTask this.taskQueue = clusterService.createTaskQueue("index-aliases", Priority.URGENT, this.executor); } - public void indicesAliases(final IndicesAliasesClusterStateUpdateRequest request, final ActionListener listener) { + public void indicesAliases( + final IndicesAliasesClusterStateUpdateRequest request, + final ActionListener listener + ) { taskQueue.submitTask("index-aliases", new ApplyAliasesTask(request, listener), null); // TODO use request.masterNodeTimeout() here? } @@ -254,7 +257,7 @@ private static void validateAliasTargetIsNotDSBackingIndex(ClusterState currentS /** * A cluster state update task that consists of the cluster state request and the listeners that need to be notified upon completion. */ - record ApplyAliasesTask(IndicesAliasesClusterStateUpdateRequest request, ActionListener listener) + record ApplyAliasesTask(IndicesAliasesClusterStateUpdateRequest request, ActionListener listener) implements ClusterStateTaskListener, ClusterStateAckListener { @@ -271,17 +274,17 @@ public boolean mustAck(DiscoveryNode discoveryNode) { @Override public void onAllNodesAcked() { - listener.onResponse(AcknowledgedResponse.TRUE); + listener.onResponse(IndicesAliasesResponse.build(request.getActionResults())); } @Override public void onAckFailure(Exception e) { - listener.onResponse(AcknowledgedResponse.FALSE); + listener.onResponse(IndicesAliasesResponse.NOT_ACKNOWLEDGED); } @Override public void onAckTimeout() { - listener.onResponse(AcknowledgedResponse.FALSE); + listener.onResponse(IndicesAliasesResponse.NOT_ACKNOWLEDGED); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesResponseTests.java new file mode 100644 index 0000000000000..75a1bf8732a4f --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesResponseTests.java @@ -0,0 +1,108 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.admin.indices.alias; + +import org.elasticsearch.TransportVersions; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.index.alias.RandomAliasActionsGenerator; +import org.elasticsearch.test.AbstractWireSerializingTestCase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +public class IndicesAliasesResponseTests extends AbstractWireSerializingTestCase { + public void testMixedModeSerialization() throws IOException { + + // AcknowledgedResponse to IndicesAliasesResponse + // in version before TransportVersions.ALIAS_ACTION_RESULTS + { + var ack = AcknowledgedResponse.of(randomBoolean()); + try (BytesStreamOutput output = new BytesStreamOutput()) { + ack.writeTo(output); + try (StreamInput in = output.bytes().streamInput()) { + in.setTransportVersion(TransportVersions.V_8_12_0); + + var indicesAliasesResponse = new IndicesAliasesResponse(in); + + assertEquals(ack.isAcknowledged(), indicesAliasesResponse.isAcknowledged()); + assertTrue(indicesAliasesResponse.getActionResults().isEmpty()); + assertFalse(indicesAliasesResponse.hasErrors()); + } + } + } + + // IndicesAliasesResponse to AcknowledgedResponse + // out version before TransportVersions.ALIAS_ACTION_RESULTS + { + var indicesAliasesResponse = randomIndicesAliasesResponse(); + try (BytesStreamOutput output = new BytesStreamOutput()) { + output.setTransportVersion(TransportVersions.V_8_12_0); + + indicesAliasesResponse.writeTo(output); + try (StreamInput in = output.bytes().streamInput()) { + var ack = AcknowledgedResponse.readFrom(in); + assertEquals(ack.isAcknowledged(), indicesAliasesResponse.isAcknowledged()); + } + } + } + } + + @Override + protected Writeable.Reader instanceReader() { + return IndicesAliasesResponse::new; + } + + @Override + protected IndicesAliasesResponse createTestInstance() { + return randomIndicesAliasesResponse(); + } + + private static IndicesAliasesResponse randomIndicesAliasesResponse() { + int numActions = between(0, 5); + List results = new ArrayList<>(); + for (int i = 0; i < numActions; ++i) { + results.add(randomIndicesAliasesResult()); + } + return new IndicesAliasesResponse(randomBoolean(), randomBoolean(), results); + } + + @Override + protected IndicesAliasesResponse mutateInstance(IndicesAliasesResponse instance) throws IOException { + switch (between(0, 2)) { + case 0: { + boolean acknowledged = instance.isAcknowledged() == false; + return new IndicesAliasesResponse(acknowledged, instance.hasErrors(), instance.getActionResults()); + } + case 1: { + boolean errors = instance.hasErrors() == false; + return new IndicesAliasesResponse(instance.isAcknowledged(), errors, instance.getActionResults()); + } + default: { + var results = new ArrayList<>(instance.getActionResults()); + if (results.isEmpty()) { + results.add(randomIndicesAliasesResult()); + } else { + results.remove(between(0, results.size() - 1)); + } + return new IndicesAliasesResponse(instance.isAcknowledged(), instance.hasErrors(), results); + } + } + } + + private static IndicesAliasesResponse.AliasActionResult randomIndicesAliasesResult() { + var action = RandomAliasActionsGenerator.randomAliasAction(); + var indices = Arrays.asList(generateRandomStringArray(10, 5, false, false)); + return IndicesAliasesResponse.AliasActionResult.build(indices, action, randomIntBetween(0, 3)); + } +} diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexAliasesServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexAliasesServiceTests.java index 0901b1190cfc0..3f63875bfc216 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexAliasesServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexAliasesServiceTests.java @@ -8,8 +8,9 @@ package org.elasticsearch.cluster.metadata; -import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesClusterStateUpdateRequest; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse.AliasActionResult; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.service.ClusterService; @@ -19,6 +20,7 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.rest.action.admin.indices.AliasesNotFoundException; import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.index.IndexVersionUtils; @@ -156,11 +158,11 @@ public void testMustExist() { // Show that removing non-existing alias with mustExist == true fails final ClusterState finalCS = after; - final ResourceNotFoundException iae = expectThrows( - ResourceNotFoundException.class, + final AliasesNotFoundException iae = expectThrows( + AliasesNotFoundException.class, () -> service.applyAliasActions(finalCS, singletonList(new AliasAction.Remove(index, "test_2", true))) ); - assertThat(iae.getMessage(), containsString("required alias [test_2] does not exist")); + assertThat(iae.getMessage(), containsString("aliases [test_2] missing")); } public void testMultipleIndices() { @@ -690,10 +692,12 @@ public void testAddAndRemoveAliasClusterStateUpdate() throws Exception { String index = randomAlphaOfLength(5); ClusterState before = createIndex(ClusterState.builder(ClusterName.DEFAULT).build(), index); IndicesAliasesClusterStateUpdateRequest addAliasRequest = new IndicesAliasesClusterStateUpdateRequest( - List.of(new AliasAction.Add(index, "test", null, null, null, null, null)) + List.of(new AliasAction.Add(index, "test", null, null, null, null, null)), + List.of(AliasActionResult.buildSuccess(List.of(index), AliasActions.add().aliases("test").indices(index))) ); IndicesAliasesClusterStateUpdateRequest removeAliasRequest = new IndicesAliasesClusterStateUpdateRequest( - List.of(new AliasAction.Remove(index, "test", true)) + List.of(new AliasAction.Remove(index, "test", true)), + List.of(AliasActionResult.buildSuccess(List.of(index), AliasActions.remove().aliases("test").indices(index))) ); ClusterState after = ClusterStateTaskExecutorUtils.executeAndAssertSuccessful( diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/annotations/AnnotationIndex.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/annotations/AnnotationIndex.java index d3a20235e3a38..07be597c7024e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/annotations/AnnotationIndex.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/annotations/AnnotationIndex.java @@ -14,9 +14,9 @@ import org.elasticsearch.action.admin.cluster.health.TransportClusterHealthAction; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; -import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexAbstraction; @@ -130,7 +130,9 @@ public static void createAnnotationsIndexIfNecessary( client.threadPool().getThreadContext(), ML_ORIGIN, requestBuilder.request(), - finalDelegate.delegateFailureAndWrap((l, r) -> checkMappingsListener.onResponse(r.isAcknowledged())), + finalDelegate.delegateFailureAndWrap( + (l, r) -> checkMappingsListener.onResponse(r.isAcknowledged()) + ), client.admin().indices()::aliases ); }); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAlias.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAlias.java index 016540815fb0a..d4ec7563b868b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAlias.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAlias.java @@ -16,6 +16,7 @@ import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; @@ -295,7 +296,7 @@ private static void updateWriteAlias( client.threadPool().getThreadContext(), ML_ORIGIN, request, - listener.delegateFailureAndWrap((l, resp) -> l.onResponse(resp.isAcknowledged())), + listener.delegateFailureAndWrap((l, resp) -> l.onResponse(resp.isAcknowledged())), client.admin().indices()::aliases ); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkSetAliasStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkSetAliasStepTests.java index 15e1539570e28..d12cd17d957d4 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkSetAliasStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkSetAliasStepTests.java @@ -9,8 +9,8 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.cluster.metadata.AliasMetadata; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.index.IndexVersion; @@ -90,8 +90,8 @@ public void testPerformAction() throws Exception { IndicesAliasesRequest request = (IndicesAliasesRequest) invocation.getArguments()[0]; assertThat(request.getAliasActions(), equalTo(expectedAliasActions)); @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocation.getArguments()[1]; - listener.onResponse(AcknowledgedResponse.TRUE); + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + listener.onResponse(IndicesAliasesResponse.ACKNOWLEDGED_NO_ERRORS); return null; }).when(indicesClient).aliases(Mockito.any(), Mockito.any()); @@ -113,7 +113,7 @@ public void testPerformActionFailure() { Mockito.doAnswer((Answer) invocation -> { @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocation.getArguments()[1]; + ActionListener listener = (ActionListener) invocation.getArguments()[1]; listener.onFailure(exception); return null; }).when(indicesClient).aliases(Mockito.any(), Mockito.any()); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java index e7dcc6b441a31..f9fdc0c8362e5 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java @@ -13,11 +13,11 @@ import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.template.put.TransportPutComposableIndexTemplateAction; -import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.client.internal.AdminClient; import org.elasticsearch.client.internal.Client; @@ -97,8 +97,8 @@ public void setUpMocks() { ); doAnswer(withResponse(new CreateIndexResponse(true, true, FIRST_CONCRETE_INDEX))).when(indicesAdminClient).create(any(), any()); when(indicesAdminClient.prepareAliases()).thenReturn(new IndicesAliasesRequestBuilder(client)); - doAnswer(withResponse(AcknowledgedResponse.TRUE)).when(indicesAdminClient).aliases(any(), any()); - doAnswer(withResponse(AcknowledgedResponse.TRUE)).when(indicesAdminClient).putTemplate(any(), any()); + doAnswer(withResponse(IndicesAliasesResponse.ACKNOWLEDGED_NO_ERRORS)).when(indicesAdminClient).aliases(any(), any()); + doAnswer(withResponse(IndicesAliasesResponse.ACKNOWLEDGED_NO_ERRORS)).when(indicesAdminClient).putTemplate(any(), any()); clusterAdminClient = mock(ClusterAdminClient.class); doAnswer(invocationOnMock -> { @@ -116,8 +116,9 @@ public void setUpMocks() { when(client.threadPool()).thenReturn(threadPool); when(client.admin()).thenReturn(adminClient); doAnswer(invocationOnMock -> { - ActionListener actionListener = (ActionListener) invocationOnMock.getArguments()[2]; - actionListener.onResponse(AcknowledgedResponse.TRUE); + ActionListener actionListener = (ActionListener) invocationOnMock + .getArguments()[2]; + actionListener.onResponse(IndicesAliasesResponse.ACKNOWLEDGED_NO_ERRORS); return null; }).when(client) .execute( diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexService.java index 61e425d4b05dd..0ccef9acba088 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexService.java @@ -18,6 +18,7 @@ import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse; import org.elasticsearch.action.delete.DeleteRequest; @@ -223,7 +224,7 @@ private static String getSearchAliasName(SearchApplication app) { public void putSearchApplication(SearchApplication app, boolean create, ActionListener listener) { createOrUpdateAlias(app, new ActionListener<>() { @Override - public void onResponse(AcknowledgedResponse acknowledgedResponse) { + public void onResponse(IndicesAliasesResponse response) { updateSearchApplication(app, create, listener); } @@ -240,7 +241,7 @@ public void onFailure(Exception e) { }); } - private void createOrUpdateAlias(SearchApplication app, ActionListener listener) { + private void createOrUpdateAlias(SearchApplication app, ActionListener listener) { final Metadata metadata = clusterService.state().metadata(); final String searchAliasName = getSearchAliasName(app); @@ -332,14 +333,14 @@ private void removeAlias(String searchAliasName, ActionListener() { @Override - public void onResponse(AcknowledgedResponse acknowledgedResponse) { - listener.onResponse(AcknowledgedResponse.TRUE); + public void onResponse(IndicesAliasesResponse response) { + listener.onResponse(response); } @Override public void onFailure(Exception e) { if (e instanceof ResourceNotFoundException) { - listener.onResponse(AcknowledgedResponse.TRUE); + listener.onResponse(IndicesAliasesResponse.ACKNOWLEDGED_NO_ERRORS); } else { listener.onFailure(e); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java index dab2010035b66..c849e69c780bd 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.admin.indices.alias.TransportIndicesAliasesAction; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesAction; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; @@ -173,7 +174,7 @@ private void makeMlInternalIndicesHidden() { String[] mlHiddenIndexPatterns = MachineLearning.getMlHiddenIndexPatterns(); // Step 5: Handle errors encountered on the way. - ActionListener finalListener = ActionListener.wrap(updateAliasesResponse -> { + ActionListener finalListener = ActionListener.wrap(updateAliasesResponse -> { if (updateAliasesResponse.isAcknowledged() == false) { logger.warn("One or more of the ML internal aliases could not be made hidden."); return; @@ -194,7 +195,7 @@ private void makeMlInternalIndicesHidden() { } if (indicesAliasesRequest.getAliasActions().isEmpty()) { logger.debug("There are no ML internal aliases that need to be made hidden, [{}]", getAliasesResponse.getAliases()); - finalListener.onResponse(AcknowledgedResponse.TRUE); + finalListener.onResponse(IndicesAliasesResponse.ACKNOWLEDGED_NO_ERRORS); return; } String indicesWithNonHiddenAliasesString = indicesAliasesRequest.getAliasActions() diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java index 577bbe3dac6ce..b9cc1902b7ab6 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; @@ -287,7 +288,7 @@ public void deleteJobDocuments( AtomicReference indexNames = new AtomicReference<>(); - final ActionListener completionHandler = ActionListener.wrap( + final ActionListener completionHandler = ActionListener.wrap( response -> finishedHandler.accept(response.isAcknowledged()), failureHandler ); @@ -295,7 +296,7 @@ public void deleteJobDocuments( // Step 9. If we did not drop the indices and after DBQ state done, we delete the aliases ActionListener dbqHandler = ActionListener.wrap(bulkByScrollResponse -> { if (bulkByScrollResponse == null) { // no action was taken by DBQ, assume indices were deleted - completionHandler.onResponse(AcknowledgedResponse.TRUE); + completionHandler.onResponse(IndicesAliasesResponse.ACKNOWLEDGED_NO_ERRORS); } else { if (bulkByScrollResponse.isTimedOut()) { logger.warn("[{}] DeleteByQuery for indices [{}] timed out.", jobId, String.join(", ", indexNames.get())); @@ -469,7 +470,7 @@ private void deleteResultsByQuery( executeAsyncWithOrigin(client, ML_ORIGIN, RefreshAction.INSTANCE, refreshRequest, refreshListener); } - private void deleteAliases(@SuppressWarnings("HiddenField") String jobId, ActionListener finishedHandler) { + private void deleteAliases(@SuppressWarnings("HiddenField") String jobId, ActionListener finishedHandler) { final String readAliasName = AnomalyDetectorsIndex.jobResultsAliasedName(jobId); final String writeAliasName = AnomalyDetectorsIndex.resultsWriteAlias(jobId); @@ -486,7 +487,7 @@ private void deleteAliases(@SuppressWarnings("HiddenField") String jobId, Action if (removeRequest == null) { // don't error if the job's aliases have already been deleted - carry on and delete the // rest of the job's data - finishedHandler.onResponse(AcknowledgedResponse.TRUE); + finishedHandler.onResponse(IndicesAliasesResponse.ACKNOWLEDGED_NO_ERRORS); return; } executeAsyncWithOrigin( diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java index 1abb466a20f1a..50342a7bf99e0 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java @@ -16,6 +16,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DelegatingActionListener; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; @@ -344,7 +345,7 @@ public void createJobResultIndex(Job job, ClusterState state, final ActionListen client.threadPool().getThreadContext(), ML_ORIGIN, request, - ActionListener.wrap(r -> finalListener.onResponse(true), finalListener::onFailure), + ActionListener.wrap(r -> finalListener.onResponse(true), finalListener::onFailure), client.admin().indices()::aliases ); }, finalListener::onFailure); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformClusterStateListener.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformClusterStateListener.java index e2f66fe914bc2..970403e49c5a3 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformClusterStateListener.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformClusterStateListener.java @@ -11,7 +11,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; -import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; @@ -97,7 +97,7 @@ private static void createAuditAliasForDataFrameBWC(ClusterState state, Client c client.threadPool().getThreadContext(), TRANSFORM_ORIGIN, request, - ActionListener.wrap(r -> finalListener.onResponse(r.isAcknowledged()), finalListener::onFailure), + ActionListener.wrap(r -> finalListener.onResponse(r.isAcknowledged()), finalListener::onFailure), client.admin().indices()::aliases ); } From 885256648983b5cef143f6a57f43acb4872229ce Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 9 Apr 2024 13:17:15 -0400 Subject: [PATCH 204/264] Move ESQL's LOCATE test cases to cases (#107271) This moves the test cases declared in the tests for ESQL's LOCATE function to test cases which will cause #106782 to properly generate all of the available signatures. It also buys us all of testing for incorrect parameter combinations. --- .../function/scalar/string/Locate.java | 4 +- .../expression/function/TestCaseSupplier.java | 2 +- .../function/scalar/string/LocateTests.java | 316 +++++++++--------- 3 files changed, 152 insertions(+), 170 deletions(-) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Locate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Locate.java index c8b546718aabf..52d60da3f7341 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Locate.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Locate.java @@ -28,8 +28,8 @@ import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.SECOND; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.THIRD; -import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isInteger; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isString; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isType; /** * Locate function, given a string 'a' and a substring 'b', it returns the index of the first occurrence of the substring 'b' in 'a'. @@ -80,7 +80,7 @@ protected TypeResolution resolveType() { return resolution; } - return start == null ? TypeResolution.TYPE_RESOLVED : isInteger(start, sourceText(), THIRD); + return start == null ? TypeResolution.TYPE_RESOLVED : isType(start, dt -> dt == DataTypes.INTEGER, sourceText(), THIRD, "integer"); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java index c064cfebd9cc5..db26624bc66bf 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java @@ -72,7 +72,7 @@ public TestCaseSupplier(List types, Supplier supplier) { this(nameFromTypes(types), types, supplier); } - static String nameFromTypes(List types) { + public static String nameFromTypes(List types) { return types.stream().map(t -> "<" + t.typeName() + ">").collect(Collectors.joining(", ")); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java index b95f05039630a..a7f4ca0342782 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java @@ -11,22 +11,21 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.expression.Literal; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; +import java.util.Locale; +import java.util.function.Function; import java.util.function.Supplier; -import static java.nio.charset.StandardCharsets.UTF_8; -import static org.elasticsearch.compute.data.BlockUtils.toJavaObject; import static org.hamcrest.Matchers.equalTo; /** @@ -37,192 +36,175 @@ public LocateTests(@Name("TestCase") Supplier testCas this.testCase = testCaseSupplier.get(); } + private static final DataType[] STRING_TYPES = new DataType[] { DataTypes.KEYWORD, DataTypes.TEXT }; + @ParametersFactory public static Iterable parameters() { List suppliers = new ArrayList<>(); - suppliers.add( - supplier( - "keywords", - DataTypes.KEYWORD, - DataTypes.KEYWORD, - () -> randomRealisticUnicodeOfCodepointLength(10), - () -> randomRealisticUnicodeOfCodepointLength(2), - () -> 0 - ) - ); - suppliers.add( - supplier( - "mixed keyword, text", - DataTypes.KEYWORD, - DataTypes.TEXT, - () -> randomRealisticUnicodeOfCodepointLength(10), - () -> randomRealisticUnicodeOfCodepointLength(2), - () -> 0 - ) - ); - suppliers.add( - supplier( - "texts", - DataTypes.TEXT, - DataTypes.TEXT, - () -> randomRealisticUnicodeOfCodepointLength(10), - () -> randomRealisticUnicodeOfCodepointLength(2), - () -> 0 - ) - ); - suppliers.add( - supplier( - "mixed text, keyword", - DataTypes.TEXT, - DataTypes.KEYWORD, - () -> randomRealisticUnicodeOfCodepointLength(10), - () -> randomRealisticUnicodeOfCodepointLength(2), - () -> 0 - ) - ); - return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); - } - - public void testToString() { - assertThat( - evaluator( - new Locate( - Source.EMPTY, - field("str", DataTypes.KEYWORD), - field("substr", DataTypes.KEYWORD), - field("start", DataTypes.INTEGER) - ) - ).get(driverContext()).toString(), - equalTo("LocateEvaluator[str=Attribute[channel=0], substr=Attribute[channel=1], start=Attribute[channel=2]]") - ); - } - - @Override - protected Expression build(Source source, List args) { - return new Locate(source, args.get(0), args.get(1), args.size() < 3 ? null : args.get(2)); - } - - public void testPrefixString() { - assertThat(process("a tiger", "a t", 0), equalTo(1)); - assertThat(process("a tiger", "a", 0), equalTo(1)); - assertThat(process("界世", "界", 0), equalTo(1)); - } - - public void testSuffixString() { - assertThat(process("a tiger", "er", 0), equalTo(6)); - assertThat(process("a tiger", "r", 0), equalTo(7)); - assertThat(process("世界", "界", 0), equalTo(2)); - } - - public void testMidString() { - assertThat(process("a tiger", "ti", 0), equalTo(3)); - assertThat(process("a tiger", "ige", 0), equalTo(4)); - assertThat(process("世界世", "界", 0), equalTo(2)); - } - - public void testOutOfRange() { - assertThat(process("a tiger", "tigers", 0), equalTo(0)); - assertThat(process("a tiger", "ipa", 0), equalTo(0)); - assertThat(process("世界世", "\uD83C\uDF0D", 0), equalTo(0)); - } - - public void testExactString() { - assertThat(process("a tiger", "a tiger", 0), equalTo(1)); - assertThat(process("tigers", "tigers", 0), equalTo(1)); - assertThat(process("界世", "界世", 0), equalTo(1)); - } + for (DataType strType : STRING_TYPES) { + for (DataType substrType : STRING_TYPES) { + suppliers.add( + supplier( + "", + strType, + substrType, + () -> randomRealisticUnicodeOfCodepointLength(10), + str -> randomRealisticUnicodeOfCodepointLength(2), + null, + (str, substr, start) -> 1 + str.indexOf(substr) + ) + ); + suppliers.add( + supplier( + "exact match ", + strType, + substrType, + () -> randomRealisticUnicodeOfCodepointLength(10), + str -> str, + null, + (str, substr, start) -> 1 + ) + ); + suppliers.add( + supplier( + "", + strType, + substrType, + () -> randomRealisticUnicodeOfCodepointLength(10), + str -> randomRealisticUnicodeOfCodepointLength(2), + () -> between(0, 3), + (str, substr, start) -> 1 + str.indexOf(substr, start) + ) + ); + } + } - public void testSupplementaryCharacter() { + suppliers = errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers)); + + // Here follows some non-randomized examples that we want to cover on every run + suppliers.add(supplier("a tiger", "a t", null, 1)); + suppliers.add(supplier("a tiger", "a", null, 1)); + suppliers.add(supplier("界世", "界", null, 1)); + suppliers.add(supplier("a tiger", "er", null, 6)); + suppliers.add(supplier("a tiger", "r", null, 7)); + suppliers.add(supplier("界世", "世", null, 2)); + suppliers.add(supplier("a tiger", "ti", null, 3)); + suppliers.add(supplier("a tiger", "ige", null, 4)); + suppliers.add(supplier("世界世", "界", null, 2)); + suppliers.add(supplier("a tiger", "tigers", null, 0)); + suppliers.add(supplier("a tiger", "ipa", null, 0)); + suppliers.add(supplier("世界世", "\uD83C\uDF0D", null, 0)); + + // Extra assertions about 4-byte characters // some assertions about the supplementary (4-byte) character we'll use for testing assert "𠜎".length() == 2; assert "𠜎".codePointCount(0, 2) == 1; - assert "𠜎".getBytes(UTF_8).length == 4; - - assertThat(process("a ti𠜎er", "𠜎er", 0), equalTo(5)); - assertThat(process("a ti𠜎er", "i𠜎e", 0), equalTo(4)); - assertThat(process("a ti𠜎er", "ti𠜎", 0), equalTo(3)); - assertThat(process("a ti𠜎er", "er", 0), equalTo(6)); - assertThat(process("a ti𠜎er", "r", 0), equalTo(7)); - - assertThat(process("𠜎a ti𠜎er", "𠜎er", 0), equalTo(6)); - assertThat(process("𠜎a ti𠜎er", "i𠜎e", 0), equalTo(5)); - assertThat(process("𠜎a ti𠜎er", "ti𠜎", 0), equalTo(4)); - assertThat(process("𠜎a ti𠜎er", "er", 0), equalTo(7)); - assertThat(process("𠜎a ti𠜎er", "r", 0), equalTo(8)); - - // exact - assertThat(process("a ti𠜎er", "a ti𠜎er", 0), equalTo(1)); - assertThat(process("𠜎𠜎𠜎abc", "𠜎𠜎𠜎abc", 0), equalTo(1)); - assertThat(process(" 𠜎𠜎𠜎abc", " 𠜎𠜎𠜎abc", 0), equalTo(1)); - assertThat(process("𠜎𠜎𠜎 abc ", "𠜎𠜎𠜎 abc ", 0), equalTo(1)); - + assert "𠜎".getBytes(StandardCharsets.UTF_8).length == 4; + suppliers.add(supplier("a ti𠜎er", "𠜎er", null, 5)); + suppliers.add(supplier("a ti𠜎er", "i𠜎e", null, 4)); + suppliers.add(supplier("a ti𠜎er", "ti𠜎", null, 3)); + suppliers.add(supplier("a ti𠜎er", "er", null, 6)); + suppliers.add(supplier("a ti𠜎er", "r", null, 7)); + suppliers.add(supplier("a ti𠜎er", "a ti𠜎er", null, 1)); // prefix - assertThat(process("𠜎abc", "𠜎", 0), equalTo(1)); - assertThat(process("𠜎 abc", "𠜎 ", 0), equalTo(1)); - assertThat(process("𠜎𠜎𠜎abc", "𠜎𠜎𠜎", 0), equalTo(1)); - assertThat(process("𠜎𠜎𠜎 abc", "𠜎𠜎𠜎 ", 0), equalTo(1)); - assertThat(process(" 𠜎𠜎𠜎 abc", " 𠜎𠜎𠜎 ", 0), equalTo(1)); - assertThat(process("𠜎 𠜎 𠜎 abc", "𠜎 𠜎 𠜎 ", 0), equalTo(1)); - + suppliers.add(supplier("𠜎abc", "𠜎", null, 1)); + suppliers.add(supplier("𠜎 abc", "𠜎 ", null, 1)); + suppliers.add(supplier("𠜎𠜎𠜎abc", "𠜎𠜎𠜎", null, 1)); + suppliers.add(supplier("𠜎𠜎𠜎 abc", "𠜎𠜎𠜎 ", null, 1)); + suppliers.add(supplier(" 𠜎𠜎𠜎 abc", " 𠜎𠜎𠜎 ", null, 1)); + suppliers.add(supplier("𠜎 𠜎 𠜎 abc", "𠜎 𠜎 𠜎 ", null, 1)); // suffix - assertThat(process("abc𠜎", "𠜎", 0), equalTo(4)); - assertThat(process("abc 𠜎", " 𠜎", 0), equalTo(4)); - assertThat(process("abc𠜎𠜎𠜎", "𠜎𠜎𠜎", 0), equalTo(4)); - assertThat(process("abc 𠜎𠜎𠜎", " 𠜎𠜎𠜎", 0), equalTo(4)); - assertThat(process("abc𠜎𠜎𠜎 ", "𠜎𠜎𠜎 ", 0), equalTo(4)); - + suppliers.add(supplier("abc𠜎", "𠜎", null, 4)); + suppliers.add(supplier("abc 𠜎", " 𠜎", null, 4)); + suppliers.add(supplier("abc𠜎𠜎𠜎", "𠜎𠜎𠜎", null, 4)); + suppliers.add(supplier("abc 𠜎𠜎𠜎", " 𠜎𠜎𠜎", null, 4)); + suppliers.add(supplier("abc𠜎𠜎𠜎 ", "𠜎𠜎𠜎 ", null, 4)); // out of range - assertThat(process("𠜎a ti𠜎er", "𠜎a ti𠜎ers", 0), equalTo(0)); - assertThat(process("a ti𠜎er", "aa ti𠜎er", 0), equalTo(0)); - assertThat(process("abc𠜎𠜎", "𠜎𠜎𠜎", 0), equalTo(0)); + suppliers.add(supplier("𠜎a ti𠜎er", "𠜎a ti𠜎ers", null, 0)); + suppliers.add(supplier("a ti𠜎er", "aa ti𠜎er", null, 0)); + suppliers.add(supplier("abc𠜎𠜎", "𠜎𠜎𠜎", null, 0)); assert "🐱".length() == 2 && "🐶".length() == 2; assert "🐱".codePointCount(0, 2) == 1 && "🐶".codePointCount(0, 2) == 1; - assert "🐱".getBytes(UTF_8).length == 4 && "🐶".getBytes(UTF_8).length == 4; - assertThat(process("🐱Meow!🐶Woof!", "🐱Meow!🐶Woof!", 0), equalTo(1)); - assertThat(process("🐱Meow!🐶Woof!", "Meow!🐶Woof!", 0), equalTo(2)); - assertThat(process("🐱Meow!🐶Woof!", "eow!🐶Woof!", 0), equalTo(3)); + assert "🐱".getBytes(StandardCharsets.UTF_8).length == 4 && "🐶".getBytes(StandardCharsets.UTF_8).length == 4; + suppliers.add(supplier("🐱Meow!🐶Woof!", "🐱Meow!🐶Woof!", null, 1)); + suppliers.add(supplier("🐱Meow!🐶Woof!", "Meow!🐶Woof!", 0, 2)); + suppliers.add(supplier("🐱Meow!🐶Woof!", "eow!🐶Woof!", 0, 3)); + + return parameterSuppliersFromTypedData(suppliers); + } + + @Override + protected Expression build(Source source, List args) { + return new Locate(source, args.get(0), args.get(1), args.size() < 3 ? null : args.get(2)); } - private Integer process(String str, String substr, Integer start) { - try ( - EvalOperator.ExpressionEvaluator eval = evaluator( - new Locate( - Source.EMPTY, - field("str", DataTypes.KEYWORD), - field("substr", DataTypes.KEYWORD), - new Literal(Source.EMPTY, start, DataTypes.INTEGER) - ) - ).get(driverContext()); - Block block = eval.eval(row(List.of(new BytesRef(str), new BytesRef(substr)))) - ) { - return block.isNull(0) ? Integer.valueOf(0) : ((Integer) toJavaObject(block, 0)); + private static TestCaseSupplier supplier(String str, String substr, @Nullable Integer start, @Nullable Integer expectedValue) { + String name = String.format(Locale.ROOT, "\"%s\" in \"%s\"", substr, str); + if (start != null) { + name += " starting at " + start; } + + return new TestCaseSupplier( + name, + types(DataTypes.KEYWORD, DataTypes.KEYWORD, start != null), + () -> testCase(DataTypes.KEYWORD, DataTypes.KEYWORD, str, substr, start, expectedValue) + ); + } + + interface ExpectedValue { + int expectedValue(String str, String substr, Integer start); } private static TestCaseSupplier supplier( String name, - DataType firstType, - DataType secondType, + DataType strType, + DataType substrType, Supplier strValueSupplier, - Supplier substrValueSupplier, - Supplier startSupplier + Function substrValueSupplier, + @Nullable Supplier startSupplier, + ExpectedValue expectedValue ) { - return new TestCaseSupplier(name, List.of(firstType, secondType), () -> { - List values = new ArrayList<>(); - String expectedToString = "LocateEvaluator[str=Attribute[channel=0], substr=Attribute[channel=1], start=Attribute[channel=2]]"; - - String value = strValueSupplier.get(); - values.add(new TestCaseSupplier.TypedData(new BytesRef(value), firstType, "0")); + List types = types(strType, substrType, startSupplier != null); + return new TestCaseSupplier(name + TestCaseSupplier.nameFromTypes(types), types, () -> { + String str = strValueSupplier.get(); + String substr = substrValueSupplier.apply(str); + Integer start = startSupplier == null ? null : startSupplier.get(); + return testCase(strType, substrType, str, substr, start, expectedValue.expectedValue(str, substr, start)); + }); + } - String substrValue = substrValueSupplier.get(); - values.add(new TestCaseSupplier.TypedData(new BytesRef(substrValue), secondType, "1")); + private static String expectedToString(boolean hasStart) { + if (hasStart) { + return "LocateEvaluator[str=Attribute[channel=0], substr=Attribute[channel=1], start=Attribute[channel=2]]"; + } + return "LocateNoStartEvaluator[str=Attribute[channel=0], substr=Attribute[channel=1]]"; + } - Integer startValue = startSupplier.get(); - values.add(new TestCaseSupplier.TypedData(startValue, DataTypes.INTEGER, "2")); + private static List types(DataType firstType, DataType secondType, boolean hasStart) { + List types = new ArrayList<>(); + types.add(firstType); + types.add(secondType); + if (hasStart) { + types.add(DataTypes.INTEGER); + } + return types; + } - int expectedValue = 1 + value.indexOf(substrValue); - return new TestCaseSupplier.TestCase(values, expectedToString, DataTypes.INTEGER, equalTo(expectedValue)); - }); + private static TestCaseSupplier.TestCase testCase( + DataType strType, + DataType substrType, + String str, + String substr, + Integer start, + Integer expectedValue + ) { + List values = new ArrayList<>(); + values.add(new TestCaseSupplier.TypedData(str == null ? null : new BytesRef(str), strType, "str")); + values.add(new TestCaseSupplier.TypedData(substr == null ? null : new BytesRef(substr), substrType, "substr")); + if (start != null) { + values.add(new TestCaseSupplier.TypedData(start, DataTypes.INTEGER, "start")); + } + return new TestCaseSupplier.TestCase(values, expectedToString(start != null), DataTypes.INTEGER, equalTo(expectedValue)); } } From 96227a1970c9da0ffbd05dcbea7fa61eb4ce1df9 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 9 Apr 2024 14:19:48 -0400 Subject: [PATCH 205/264] ESQL: Generate kibana inline docs (#106782) This takes a stab at generating the markdown files that Kibana uses for its inline help. It doesn't include all of the examples because the `@Example` annotation is not filled in - we're tracking that in https://github.com/elastic/elasticsearch/issues/104247#issuecomment-2018944371 There are some links in the output and they are in markdown syntax. We should figure out how to make them work for kibana. --- docs/reference/esql/functions/README.md | 2 + .../esql/functions/kibana/definition/abs.json | 60 + .../functions/kibana/definition/acos.json | 59 + .../functions/kibana/definition/asin.json | 59 + .../functions/kibana/definition/atan.json | 59 + .../functions/kibana/definition/atan2.json | 299 +++++ .../kibana/definition/auto_bucket.json | 1088 +++++++++++++++++ .../functions/kibana/definition/case.json | 32 + .../functions/kibana/definition/ceil.json | 60 + .../functions/kibana/definition/coalesce.json | 161 +++ .../functions/kibana/definition/concat.json | 44 + .../esql/functions/kibana/definition/cos.json | 59 + .../functions/kibana/definition/cosh.json | 59 + .../kibana/definition/date_diff.json | 56 + .../kibana/definition/date_extract.json | 44 + .../kibana/definition/date_format.json | 44 + .../kibana/definition/date_parse.json | 62 + .../kibana/definition/date_trunc.json | 49 + .../esql/functions/kibana/definition/e.json | 12 + .../kibana/definition/ends_with.json | 44 + .../functions/kibana/definition/floor.json | 56 + .../functions/kibana/definition/greatest.json | 212 ++++ .../functions/kibana/definition/least.json | 212 ++++ .../functions/kibana/definition/left.json | 47 + .../functions/kibana/definition/length.json | 32 + .../functions/kibana/definition/locate.json | 176 +++ .../esql/functions/kibana/definition/log.json | 348 ++++++ .../functions/kibana/definition/log10.json | 59 + .../functions/kibana/definition/ltrim.json | 32 + .../functions/kibana/definition/mv_avg.json | 56 + .../kibana/definition/mv_concat.json | 80 ++ .../functions/kibana/definition/mv_count.json | 176 +++ .../kibana/definition/mv_dedupe.json | 116 ++ .../functions/kibana/definition/mv_first.json | 176 +++ .../functions/kibana/definition/mv_last.json | 176 +++ .../functions/kibana/definition/mv_max.json | 128 ++ .../kibana/definition/mv_median.json | 56 + .../functions/kibana/definition/mv_min.json | 128 ++ .../functions/kibana/definition/mv_slice.json | 320 +++++ .../functions/kibana/definition/mv_sort.json | 170 +++ .../functions/kibana/definition/mv_sum.json | 56 + .../functions/kibana/definition/mv_zip.json | 56 + .../esql/functions/kibana/definition/pi.json | 12 + .../esql/functions/kibana/definition/pow.json | 296 +++++ .../functions/kibana/definition/replace.json | 200 +++ .../functions/kibana/definition/right.json | 44 + .../functions/kibana/definition/round.json | 26 + .../functions/kibana/definition/rtrim.json | 32 + .../functions/kibana/definition/signum.json | 59 + .../esql/functions/kibana/definition/sin.json | 59 + .../functions/kibana/definition/sinh.json | 59 + .../functions/kibana/definition/split.json | 44 + .../functions/kibana/definition/sqrt.json | 56 + .../kibana/definition/st_contains.json | 155 +++ .../kibana/definition/st_disjoint.json | 155 +++ .../kibana/definition/st_intersects.json | 155 +++ .../kibana/definition/st_within.json | 155 +++ .../functions/kibana/definition/st_x.json | 32 + .../functions/kibana/definition/st_y.json | 32 + .../kibana/definition/starts_with.json | 44 + .../kibana/definition/substring.json | 56 + .../esql/functions/kibana/definition/tan.json | 59 + .../functions/kibana/definition/tanh.json | 59 + .../esql/functions/kibana/definition/tau.json | 12 + .../kibana/definition/to_boolean.json | 92 ++ .../kibana/definition/to_cartesianpoint.json | 44 + .../kibana/definition/to_cartesianshape.json | 56 + .../kibana/definition/to_datetime.json | 92 ++ .../kibana/definition/to_degrees.json | 56 + .../kibana/definition/to_double.json | 104 ++ .../kibana/definition/to_geopoint.json | 44 + .../kibana/definition/to_geoshape.json | 56 + .../kibana/definition/to_integer.json | 104 ++ .../functions/kibana/definition/to_ip.json | 44 + .../functions/kibana/definition/to_long.json | 104 ++ .../functions/kibana/definition/to_lower.json | 32 + .../kibana/definition/to_radians.json | 56 + .../kibana/definition/to_string.json | 176 +++ .../kibana/definition/to_unsigned_long.json | 104 ++ .../functions/kibana/definition/to_upper.json | 32 + .../kibana/definition/to_version.json | 44 + .../functions/kibana/definition/trim.json | 32 + .../esql/functions/kibana/docs/abs.md | 11 + .../esql/functions/kibana/docs/acos.md | 11 + .../esql/functions/kibana/docs/asin.md | 12 + .../esql/functions/kibana/docs/atan.md | 12 + .../esql/functions/kibana/docs/atan2.md | 12 + .../esql/functions/kibana/docs/auto_bucket.md | 8 + .../esql/functions/kibana/docs/case.md | 8 + .../esql/functions/kibana/docs/ceil.md | 12 + .../esql/functions/kibana/docs/coalesce.md | 11 + .../esql/functions/kibana/docs/concat.md | 7 + .../esql/functions/kibana/docs/cos.md | 11 + .../esql/functions/kibana/docs/cosh.md | 11 + .../esql/functions/kibana/docs/date_diff.md | 7 + .../functions/kibana/docs/date_extract.md | 7 + .../esql/functions/kibana/docs/date_format.md | 7 + .../esql/functions/kibana/docs/date_parse.md | 7 + .../esql/functions/kibana/docs/date_trunc.md | 12 + .../reference/esql/functions/kibana/docs/e.md | 7 + .../esql/functions/kibana/docs/ends_with.md | 7 + .../esql/functions/kibana/docs/floor.md | 7 + .../esql/functions/kibana/docs/greatest.md | 7 + .../esql/functions/kibana/docs/least.md | 7 + .../esql/functions/kibana/docs/left.md | 14 + .../esql/functions/kibana/docs/length.md | 7 + .../esql/functions/kibana/docs/locate.md | 7 + .../esql/functions/kibana/docs/log.md | 13 + .../esql/functions/kibana/docs/log10.md | 13 + .../esql/functions/kibana/docs/ltrim.md | 7 + .../esql/functions/kibana/docs/mv_avg.md | 7 + .../esql/functions/kibana/docs/mv_concat.md | 7 + .../esql/functions/kibana/docs/mv_count.md | 7 + .../esql/functions/kibana/docs/mv_dedupe.md | 7 + .../esql/functions/kibana/docs/mv_first.md | 7 + .../esql/functions/kibana/docs/mv_last.md | 7 + .../esql/functions/kibana/docs/mv_max.md | 7 + .../esql/functions/kibana/docs/mv_median.md | 7 + .../esql/functions/kibana/docs/mv_min.md | 7 + .../esql/functions/kibana/docs/mv_slice.md | 7 + .../esql/functions/kibana/docs/mv_sort.md | 7 + .../esql/functions/kibana/docs/mv_sum.md | 7 + .../esql/functions/kibana/docs/mv_zip.md | 7 + .../esql/functions/kibana/docs/pi.md | 7 + .../esql/functions/kibana/docs/pow.md | 7 + .../esql/functions/kibana/docs/replace.md | 7 + .../esql/functions/kibana/docs/right.md | 7 + .../esql/functions/kibana/docs/round.md | 7 + .../esql/functions/kibana/docs/rtrim.md | 7 + .../esql/functions/kibana/docs/signum.md | 12 + .../esql/functions/kibana/docs/sin.md | 11 + .../esql/functions/kibana/docs/sinh.md | 11 + .../esql/functions/kibana/docs/split.md | 7 + .../esql/functions/kibana/docs/sqrt.md | 7 + .../esql/functions/kibana/docs/st_contains.md | 12 + .../esql/functions/kibana/docs/st_disjoint.md | 12 + .../functions/kibana/docs/st_intersects.md | 11 + .../esql/functions/kibana/docs/st_within.md | 12 + .../esql/functions/kibana/docs/st_x.md | 7 + .../esql/functions/kibana/docs/st_y.md | 7 + .../esql/functions/kibana/docs/starts_with.md | 7 + .../esql/functions/kibana/docs/substring.md | 7 + .../esql/functions/kibana/docs/tan.md | 11 + .../esql/functions/kibana/docs/tanh.md | 11 + .../esql/functions/kibana/docs/tau.md | 7 + .../esql/functions/kibana/docs/to_boolean.md | 7 + .../kibana/docs/to_cartesianpoint.md | 7 + .../kibana/docs/to_cartesianshape.md | 7 + .../esql/functions/kibana/docs/to_datetime.md | 7 + .../esql/functions/kibana/docs/to_degrees.md | 7 + .../esql/functions/kibana/docs/to_double.md | 7 + .../esql/functions/kibana/docs/to_geopoint.md | 7 + .../esql/functions/kibana/docs/to_geoshape.md | 7 + .../esql/functions/kibana/docs/to_integer.md | 7 + .../esql/functions/kibana/docs/to_ip.md | 7 + .../esql/functions/kibana/docs/to_long.md | 7 + .../esql/functions/kibana/docs/to_lower.md | 7 + .../esql/functions/kibana/docs/to_radians.md | 7 + .../esql/functions/kibana/docs/to_string.md | 7 + .../functions/kibana/docs/to_unsigned_long.md | 7 + .../esql/functions/kibana/docs/to_upper.md | 7 + .../esql/functions/kibana/docs/to_version.md | 7 + .../esql/functions/kibana/docs/trim.md | 7 + .../esql/functions/signature/case.svg | 1 + x-pack/plugin/esql/build.gradle | 33 +- .../function/EsqlFunctionRegistry.java | 17 +- .../function/AbstractFunctionTestCase.java | 248 +++- 167 files changed, 9196 insertions(+), 69 deletions(-) create mode 100644 docs/reference/esql/functions/kibana/definition/abs.json create mode 100644 docs/reference/esql/functions/kibana/definition/acos.json create mode 100644 docs/reference/esql/functions/kibana/definition/asin.json create mode 100644 docs/reference/esql/functions/kibana/definition/atan.json create mode 100644 docs/reference/esql/functions/kibana/definition/atan2.json create mode 100644 docs/reference/esql/functions/kibana/definition/auto_bucket.json create mode 100644 docs/reference/esql/functions/kibana/definition/case.json create mode 100644 docs/reference/esql/functions/kibana/definition/ceil.json create mode 100644 docs/reference/esql/functions/kibana/definition/coalesce.json create mode 100644 docs/reference/esql/functions/kibana/definition/concat.json create mode 100644 docs/reference/esql/functions/kibana/definition/cos.json create mode 100644 docs/reference/esql/functions/kibana/definition/cosh.json create mode 100644 docs/reference/esql/functions/kibana/definition/date_diff.json create mode 100644 docs/reference/esql/functions/kibana/definition/date_extract.json create mode 100644 docs/reference/esql/functions/kibana/definition/date_format.json create mode 100644 docs/reference/esql/functions/kibana/definition/date_parse.json create mode 100644 docs/reference/esql/functions/kibana/definition/date_trunc.json create mode 100644 docs/reference/esql/functions/kibana/definition/e.json create mode 100644 docs/reference/esql/functions/kibana/definition/ends_with.json create mode 100644 docs/reference/esql/functions/kibana/definition/floor.json create mode 100644 docs/reference/esql/functions/kibana/definition/greatest.json create mode 100644 docs/reference/esql/functions/kibana/definition/least.json create mode 100644 docs/reference/esql/functions/kibana/definition/left.json create mode 100644 docs/reference/esql/functions/kibana/definition/length.json create mode 100644 docs/reference/esql/functions/kibana/definition/locate.json create mode 100644 docs/reference/esql/functions/kibana/definition/log.json create mode 100644 docs/reference/esql/functions/kibana/definition/log10.json create mode 100644 docs/reference/esql/functions/kibana/definition/ltrim.json create mode 100644 docs/reference/esql/functions/kibana/definition/mv_avg.json create mode 100644 docs/reference/esql/functions/kibana/definition/mv_concat.json create mode 100644 docs/reference/esql/functions/kibana/definition/mv_count.json create mode 100644 docs/reference/esql/functions/kibana/definition/mv_dedupe.json create mode 100644 docs/reference/esql/functions/kibana/definition/mv_first.json create mode 100644 docs/reference/esql/functions/kibana/definition/mv_last.json create mode 100644 docs/reference/esql/functions/kibana/definition/mv_max.json create mode 100644 docs/reference/esql/functions/kibana/definition/mv_median.json create mode 100644 docs/reference/esql/functions/kibana/definition/mv_min.json create mode 100644 docs/reference/esql/functions/kibana/definition/mv_slice.json create mode 100644 docs/reference/esql/functions/kibana/definition/mv_sort.json create mode 100644 docs/reference/esql/functions/kibana/definition/mv_sum.json create mode 100644 docs/reference/esql/functions/kibana/definition/mv_zip.json create mode 100644 docs/reference/esql/functions/kibana/definition/pi.json create mode 100644 docs/reference/esql/functions/kibana/definition/pow.json create mode 100644 docs/reference/esql/functions/kibana/definition/replace.json create mode 100644 docs/reference/esql/functions/kibana/definition/right.json create mode 100644 docs/reference/esql/functions/kibana/definition/round.json create mode 100644 docs/reference/esql/functions/kibana/definition/rtrim.json create mode 100644 docs/reference/esql/functions/kibana/definition/signum.json create mode 100644 docs/reference/esql/functions/kibana/definition/sin.json create mode 100644 docs/reference/esql/functions/kibana/definition/sinh.json create mode 100644 docs/reference/esql/functions/kibana/definition/split.json create mode 100644 docs/reference/esql/functions/kibana/definition/sqrt.json create mode 100644 docs/reference/esql/functions/kibana/definition/st_contains.json create mode 100644 docs/reference/esql/functions/kibana/definition/st_disjoint.json create mode 100644 docs/reference/esql/functions/kibana/definition/st_intersects.json create mode 100644 docs/reference/esql/functions/kibana/definition/st_within.json create mode 100644 docs/reference/esql/functions/kibana/definition/st_x.json create mode 100644 docs/reference/esql/functions/kibana/definition/st_y.json create mode 100644 docs/reference/esql/functions/kibana/definition/starts_with.json create mode 100644 docs/reference/esql/functions/kibana/definition/substring.json create mode 100644 docs/reference/esql/functions/kibana/definition/tan.json create mode 100644 docs/reference/esql/functions/kibana/definition/tanh.json create mode 100644 docs/reference/esql/functions/kibana/definition/tau.json create mode 100644 docs/reference/esql/functions/kibana/definition/to_boolean.json create mode 100644 docs/reference/esql/functions/kibana/definition/to_cartesianpoint.json create mode 100644 docs/reference/esql/functions/kibana/definition/to_cartesianshape.json create mode 100644 docs/reference/esql/functions/kibana/definition/to_datetime.json create mode 100644 docs/reference/esql/functions/kibana/definition/to_degrees.json create mode 100644 docs/reference/esql/functions/kibana/definition/to_double.json create mode 100644 docs/reference/esql/functions/kibana/definition/to_geopoint.json create mode 100644 docs/reference/esql/functions/kibana/definition/to_geoshape.json create mode 100644 docs/reference/esql/functions/kibana/definition/to_integer.json create mode 100644 docs/reference/esql/functions/kibana/definition/to_ip.json create mode 100644 docs/reference/esql/functions/kibana/definition/to_long.json create mode 100644 docs/reference/esql/functions/kibana/definition/to_lower.json create mode 100644 docs/reference/esql/functions/kibana/definition/to_radians.json create mode 100644 docs/reference/esql/functions/kibana/definition/to_string.json create mode 100644 docs/reference/esql/functions/kibana/definition/to_unsigned_long.json create mode 100644 docs/reference/esql/functions/kibana/definition/to_upper.json create mode 100644 docs/reference/esql/functions/kibana/definition/to_version.json create mode 100644 docs/reference/esql/functions/kibana/definition/trim.json create mode 100644 docs/reference/esql/functions/kibana/docs/abs.md create mode 100644 docs/reference/esql/functions/kibana/docs/acos.md create mode 100644 docs/reference/esql/functions/kibana/docs/asin.md create mode 100644 docs/reference/esql/functions/kibana/docs/atan.md create mode 100644 docs/reference/esql/functions/kibana/docs/atan2.md create mode 100644 docs/reference/esql/functions/kibana/docs/auto_bucket.md create mode 100644 docs/reference/esql/functions/kibana/docs/case.md create mode 100644 docs/reference/esql/functions/kibana/docs/ceil.md create mode 100644 docs/reference/esql/functions/kibana/docs/coalesce.md create mode 100644 docs/reference/esql/functions/kibana/docs/concat.md create mode 100644 docs/reference/esql/functions/kibana/docs/cos.md create mode 100644 docs/reference/esql/functions/kibana/docs/cosh.md create mode 100644 docs/reference/esql/functions/kibana/docs/date_diff.md create mode 100644 docs/reference/esql/functions/kibana/docs/date_extract.md create mode 100644 docs/reference/esql/functions/kibana/docs/date_format.md create mode 100644 docs/reference/esql/functions/kibana/docs/date_parse.md create mode 100644 docs/reference/esql/functions/kibana/docs/date_trunc.md create mode 100644 docs/reference/esql/functions/kibana/docs/e.md create mode 100644 docs/reference/esql/functions/kibana/docs/ends_with.md create mode 100644 docs/reference/esql/functions/kibana/docs/floor.md create mode 100644 docs/reference/esql/functions/kibana/docs/greatest.md create mode 100644 docs/reference/esql/functions/kibana/docs/least.md create mode 100644 docs/reference/esql/functions/kibana/docs/left.md create mode 100644 docs/reference/esql/functions/kibana/docs/length.md create mode 100644 docs/reference/esql/functions/kibana/docs/locate.md create mode 100644 docs/reference/esql/functions/kibana/docs/log.md create mode 100644 docs/reference/esql/functions/kibana/docs/log10.md create mode 100644 docs/reference/esql/functions/kibana/docs/ltrim.md create mode 100644 docs/reference/esql/functions/kibana/docs/mv_avg.md create mode 100644 docs/reference/esql/functions/kibana/docs/mv_concat.md create mode 100644 docs/reference/esql/functions/kibana/docs/mv_count.md create mode 100644 docs/reference/esql/functions/kibana/docs/mv_dedupe.md create mode 100644 docs/reference/esql/functions/kibana/docs/mv_first.md create mode 100644 docs/reference/esql/functions/kibana/docs/mv_last.md create mode 100644 docs/reference/esql/functions/kibana/docs/mv_max.md create mode 100644 docs/reference/esql/functions/kibana/docs/mv_median.md create mode 100644 docs/reference/esql/functions/kibana/docs/mv_min.md create mode 100644 docs/reference/esql/functions/kibana/docs/mv_slice.md create mode 100644 docs/reference/esql/functions/kibana/docs/mv_sort.md create mode 100644 docs/reference/esql/functions/kibana/docs/mv_sum.md create mode 100644 docs/reference/esql/functions/kibana/docs/mv_zip.md create mode 100644 docs/reference/esql/functions/kibana/docs/pi.md create mode 100644 docs/reference/esql/functions/kibana/docs/pow.md create mode 100644 docs/reference/esql/functions/kibana/docs/replace.md create mode 100644 docs/reference/esql/functions/kibana/docs/right.md create mode 100644 docs/reference/esql/functions/kibana/docs/round.md create mode 100644 docs/reference/esql/functions/kibana/docs/rtrim.md create mode 100644 docs/reference/esql/functions/kibana/docs/signum.md create mode 100644 docs/reference/esql/functions/kibana/docs/sin.md create mode 100644 docs/reference/esql/functions/kibana/docs/sinh.md create mode 100644 docs/reference/esql/functions/kibana/docs/split.md create mode 100644 docs/reference/esql/functions/kibana/docs/sqrt.md create mode 100644 docs/reference/esql/functions/kibana/docs/st_contains.md create mode 100644 docs/reference/esql/functions/kibana/docs/st_disjoint.md create mode 100644 docs/reference/esql/functions/kibana/docs/st_intersects.md create mode 100644 docs/reference/esql/functions/kibana/docs/st_within.md create mode 100644 docs/reference/esql/functions/kibana/docs/st_x.md create mode 100644 docs/reference/esql/functions/kibana/docs/st_y.md create mode 100644 docs/reference/esql/functions/kibana/docs/starts_with.md create mode 100644 docs/reference/esql/functions/kibana/docs/substring.md create mode 100644 docs/reference/esql/functions/kibana/docs/tan.md create mode 100644 docs/reference/esql/functions/kibana/docs/tanh.md create mode 100644 docs/reference/esql/functions/kibana/docs/tau.md create mode 100644 docs/reference/esql/functions/kibana/docs/to_boolean.md create mode 100644 docs/reference/esql/functions/kibana/docs/to_cartesianpoint.md create mode 100644 docs/reference/esql/functions/kibana/docs/to_cartesianshape.md create mode 100644 docs/reference/esql/functions/kibana/docs/to_datetime.md create mode 100644 docs/reference/esql/functions/kibana/docs/to_degrees.md create mode 100644 docs/reference/esql/functions/kibana/docs/to_double.md create mode 100644 docs/reference/esql/functions/kibana/docs/to_geopoint.md create mode 100644 docs/reference/esql/functions/kibana/docs/to_geoshape.md create mode 100644 docs/reference/esql/functions/kibana/docs/to_integer.md create mode 100644 docs/reference/esql/functions/kibana/docs/to_ip.md create mode 100644 docs/reference/esql/functions/kibana/docs/to_long.md create mode 100644 docs/reference/esql/functions/kibana/docs/to_lower.md create mode 100644 docs/reference/esql/functions/kibana/docs/to_radians.md create mode 100644 docs/reference/esql/functions/kibana/docs/to_string.md create mode 100644 docs/reference/esql/functions/kibana/docs/to_unsigned_long.md create mode 100644 docs/reference/esql/functions/kibana/docs/to_upper.md create mode 100644 docs/reference/esql/functions/kibana/docs/to_version.md create mode 100644 docs/reference/esql/functions/kibana/docs/trim.md create mode 100644 docs/reference/esql/functions/signature/case.svg diff --git a/docs/reference/esql/functions/README.md b/docs/reference/esql/functions/README.md index 7be4c70fbe6b0..35b852ba060f1 100644 --- a/docs/reference/esql/functions/README.md +++ b/docs/reference/esql/functions/README.md @@ -5,6 +5,8 @@ The files in these subdirectories are generated by ESQL's test suite: * `signature` - railroad diagram of the syntax to invoke each function * `types` - a table of each combination of support type for each parameter. These are generated from tests. * `layout` - a fully generated description for each function +* `kibana/definition` - function definitions for kibana's ESQL editor +* `kibana/docs` - the inline docs for kibana Most functions can use the generated docs generated in the `layout` directory. If we need something more custom for the function we can make a file in this diff --git a/docs/reference/esql/functions/kibana/definition/abs.json b/docs/reference/esql/functions/kibana/definition/abs.json new file mode 100644 index 0000000000000..82c3c205d7512 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/abs.json @@ -0,0 +1,60 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "abs", + "description" : "Returns the absolute value.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + } + ], + "examples" : [ + "ROW number = -1.0 \n| EVAL abs_number = ABS(number)", + "FROM employees\n| KEEP first_name, last_name, height\n| EVAL abs_height = ABS(0.0 - height)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/acos.json b/docs/reference/esql/functions/kibana/definition/acos.json new file mode 100644 index 0000000000000..6a6ab59278639 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/acos.json @@ -0,0 +1,59 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "acos", + "description" : "Returns the arccosine of `n` as an angle, expressed in radians.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "Number between -1 and 1. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "Number between -1 and 1. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "Number between -1 and 1. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "Number between -1 and 1. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW a=.9\n| EVAL acos=ACOS(a)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/asin.json b/docs/reference/esql/functions/kibana/definition/asin.json new file mode 100644 index 0000000000000..f5ebb817fff33 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/asin.json @@ -0,0 +1,59 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "asin", + "description" : "Returns the arcsine of the input\nnumeric expression as an angle, expressed in radians.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "Number between -1 and 1. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "Number between -1 and 1. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "Number between -1 and 1. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "Number between -1 and 1. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW a=.9\n| EVAL asin=ASIN(a)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/atan.json b/docs/reference/esql/functions/kibana/definition/atan.json new file mode 100644 index 0000000000000..654a48b8ca76d --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/atan.json @@ -0,0 +1,59 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "atan", + "description" : "Returns the arctangent of the input\nnumeric expression as an angle, expressed in radians.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW a=12.9\n| EVAL atan=ATAN(a)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/atan2.json b/docs/reference/esql/functions/kibana/definition/atan2.json new file mode 100644 index 0000000000000..63940831241f7 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/atan2.json @@ -0,0 +1,299 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "atan2", + "description" : "The angle between the positive x-axis and the ray from the\norigin to the point (x , y) in the Cartesian plane, expressed in radians.", + "signatures" : [ + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "double", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "double", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "double", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "integer", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "double", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "long", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "double", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "unsigned_long", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "integer", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "double", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "integer", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "integer", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "integer", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "long", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "integer", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "unsigned_long", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "long", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "double", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "long", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "integer", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "long", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "long", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "long", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "unsigned_long", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "unsigned_long", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "double", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "unsigned_long", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "integer", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "unsigned_long", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "long", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "unsigned_long", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "unsigned_long", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW y=12.9, x=.6\n| EVAL atan2=ATAN2(y, x)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/auto_bucket.json b/docs/reference/esql/functions/kibana/definition/auto_bucket.json new file mode 100644 index 0000000000000..96940e5f051f2 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/auto_bucket.json @@ -0,0 +1,1088 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "auto_bucket", + "description" : "Creates human-friendly buckets and returns a datetime value\nfor each row that corresponds to the resulting bucket the row falls into.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "datetime", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "datetime", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "datetime", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/case.json b/docs/reference/esql/functions/kibana/definition/case.json new file mode 100644 index 0000000000000..73bc215ac6ade --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/case.json @@ -0,0 +1,32 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "case", + "description" : "Accepts pairs of conditions and values.\nThe function returns the value that belongs to the first condition that evaluates to true.", + "signatures" : [ + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "" + }, + { + "name" : "trueValue", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "falseValue", + "type" : "keyword", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "keyword" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/ceil.json b/docs/reference/esql/functions/kibana/definition/ceil.json new file mode 100644 index 0000000000000..b8ac9ad55f31a --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/ceil.json @@ -0,0 +1,60 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "ceil", + "description" : "Round a number up to the nearest integer.", + "note" : "This is a noop for `long` (including unsigned) and `integer`. For `double` this picks the closest `double` value to the integer similar to Math.ceil.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + } + ], + "examples" : [ + "ROW a=1.8\n| EVAL a=CEIL(a)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/coalesce.json b/docs/reference/esql/functions/kibana/definition/coalesce.json new file mode 100644 index 0000000000000..87feead06d091 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/coalesce.json @@ -0,0 +1,161 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "coalesce", + "description" : "Returns the first of its arguments that is not null. If all arguments are null, it returns `null`.", + "signatures" : [ + { + "params" : [ + { + "name" : "first", + "type" : "boolean", + "optional" : false, + "description" : "Expression to evaluate" + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "first", + "type" : "boolean", + "optional" : false, + "description" : "Expression to evaluate" + }, + { + "name" : "rest", + "type" : "boolean", + "optional" : true, + "description" : "Other expression to evaluate" + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "first", + "type" : "integer", + "optional" : false, + "description" : "Expression to evaluate" + } + ], + "variadic" : true, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "first", + "type" : "integer", + "optional" : false, + "description" : "Expression to evaluate" + }, + { + "name" : "rest", + "type" : "integer", + "optional" : true, + "description" : "Other expression to evaluate" + } + ], + "variadic" : true, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "first", + "type" : "keyword", + "optional" : false, + "description" : "Expression to evaluate" + } + ], + "variadic" : true, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "first", + "type" : "keyword", + "optional" : false, + "description" : "Expression to evaluate" + }, + { + "name" : "rest", + "type" : "keyword", + "optional" : true, + "description" : "Other expression to evaluate" + } + ], + "variadic" : true, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "first", + "type" : "long", + "optional" : false, + "description" : "Expression to evaluate" + } + ], + "variadic" : true, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "first", + "type" : "long", + "optional" : false, + "description" : "Expression to evaluate" + }, + { + "name" : "rest", + "type" : "long", + "optional" : true, + "description" : "Other expression to evaluate" + } + ], + "variadic" : true, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "first", + "type" : "text", + "optional" : false, + "description" : "Expression to evaluate" + } + ], + "variadic" : true, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "first", + "type" : "text", + "optional" : false, + "description" : "Expression to evaluate" + }, + { + "name" : "rest", + "type" : "text", + "optional" : true, + "description" : "Other expression to evaluate" + } + ], + "variadic" : true, + "returnType" : "text" + } + ], + "examples" : [ + "ROW a=null, b=\"b\"\n| EVAL COALESCE(a, b)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/concat.json b/docs/reference/esql/functions/kibana/definition/concat.json new file mode 100644 index 0000000000000..bb1b84f67aff9 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/concat.json @@ -0,0 +1,44 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "concat", + "description" : "Concatenates two or more strings.", + "signatures" : [ + { + "params" : [ + { + "name" : "string1", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "string2", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string1", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "string2", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "keyword" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/cos.json b/docs/reference/esql/functions/kibana/definition/cos.json new file mode 100644 index 0000000000000..c7757fbd4071d --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/cos.json @@ -0,0 +1,59 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "cos", + "description" : "Returns the cosine of an angle.", + "signatures" : [ + { + "params" : [ + { + "name" : "angle", + "type" : "double", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "integer", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "unsigned_long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW a=1.8 \n| EVAL cos=COS(a)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/cosh.json b/docs/reference/esql/functions/kibana/definition/cosh.json new file mode 100644 index 0000000000000..a34eee15be37e --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/cosh.json @@ -0,0 +1,59 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "cosh", + "description" : "Returns the hyperbolic cosine of an angle.", + "signatures" : [ + { + "params" : [ + { + "name" : "angle", + "type" : "double", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "integer", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "unsigned_long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW a=1.8 \n| EVAL cosh=COSH(a)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/date_diff.json b/docs/reference/esql/functions/kibana/definition/date_diff.json new file mode 100644 index 0000000000000..aa030ea163709 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/date_diff.json @@ -0,0 +1,56 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "date_diff", + "description" : "Subtract 2 dates and return their difference in multiples of a unit specified in the 1st argument", + "signatures" : [ + { + "params" : [ + { + "name" : "unit", + "type" : "keyword", + "optional" : false, + "description" : "A valid date unit" + }, + { + "name" : "startTimestamp", + "type" : "datetime", + "optional" : false, + "description" : "A string representing a start timestamp" + }, + { + "name" : "endTimestamp", + "type" : "datetime", + "optional" : false, + "description" : "A string representing an end timestamp" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "unit", + "type" : "text", + "optional" : false, + "description" : "A valid date unit" + }, + { + "name" : "startTimestamp", + "type" : "datetime", + "optional" : false, + "description" : "A string representing a start timestamp" + }, + { + "name" : "endTimestamp", + "type" : "datetime", + "optional" : false, + "description" : "A string representing an end timestamp" + } + ], + "variadic" : false, + "returnType" : "integer" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/date_extract.json b/docs/reference/esql/functions/kibana/definition/date_extract.json new file mode 100644 index 0000000000000..c5edf5ac14109 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/date_extract.json @@ -0,0 +1,44 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "date_extract", + "description" : "Extracts parts of a date, like year, month, day, hour.", + "signatures" : [ + { + "params" : [ + { + "name" : "datePart", + "type" : "keyword", + "optional" : false, + "description" : "Part of the date to extract. Can be: aligned_day_of_week_in_month; aligned_day_of_week_in_year; aligned_week_of_month; aligned_week_of_year; ampm_of_day; clock_hour_of_ampm; clock_hour_of_day; day_of_month; day_of_week; day_of_year; epoch_day; era; hour_of_ampm; hour_of_day; instant_seconds; micro_of_day; micro_of_second; milli_of_day; milli_of_second; minute_of_day; minute_of_hour; month_of_year; nano_of_day; nano_of_second; offset_seconds; proleptic_month; second_of_day; second_of_minute; year; or year_of_era." + }, + { + "name" : "date", + "type" : "datetime", + "optional" : false, + "description" : "Date expression" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "datePart", + "type" : "text", + "optional" : false, + "description" : "Part of the date to extract. Can be: aligned_day_of_week_in_month; aligned_day_of_week_in_year; aligned_week_of_month; aligned_week_of_year; ampm_of_day; clock_hour_of_ampm; clock_hour_of_day; day_of_month; day_of_week; day_of_year; epoch_day; era; hour_of_ampm; hour_of_day; instant_seconds; micro_of_day; micro_of_second; milli_of_day; milli_of_second; minute_of_day; minute_of_hour; month_of_year; nano_of_day; nano_of_second; offset_seconds; proleptic_month; second_of_day; second_of_minute; year; or year_of_era." + }, + { + "name" : "date", + "type" : "datetime", + "optional" : false, + "description" : "Date expression" + } + ], + "variadic" : false, + "returnType" : "long" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/date_format.json b/docs/reference/esql/functions/kibana/definition/date_format.json new file mode 100644 index 0000000000000..8807e5d330f84 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/date_format.json @@ -0,0 +1,44 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "date_format", + "description" : "Returns a string representation of a date, in the provided format.", + "signatures" : [ + { + "params" : [ + { + "name" : "dateFormat", + "type" : "keyword", + "optional" : true, + "description" : "A valid date pattern" + }, + { + "name" : "date", + "type" : "datetime", + "optional" : false, + "description" : "Date expression" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "dateFormat", + "type" : "text", + "optional" : true, + "description" : "A valid date pattern" + }, + { + "name" : "date", + "type" : "datetime", + "optional" : false, + "description" : "Date expression" + } + ], + "variadic" : false, + "returnType" : "keyword" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/date_parse.json b/docs/reference/esql/functions/kibana/definition/date_parse.json new file mode 100644 index 0000000000000..85bce19532020 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/date_parse.json @@ -0,0 +1,62 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "date_parse", + "description" : "Parses a string into a date value", + "signatures" : [ + { + "params" : [ + { + "name" : "datePattern", + "type" : "keyword", + "optional" : true, + "description" : "A valid date pattern" + }, + { + "name" : "dateString", + "type" : "keyword", + "optional" : false, + "description" : "A string representing a date" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "datePattern", + "type" : "keyword", + "optional" : true, + "description" : "A valid date pattern" + }, + { + "name" : "dateString", + "type" : "text", + "optional" : false, + "description" : "A string representing a date" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "datePattern", + "type" : "text", + "optional" : true, + "description" : "A valid date pattern" + }, + { + "name" : "dateString", + "type" : "text", + "optional" : false, + "description" : "A string representing a date" + } + ], + "variadic" : false, + "returnType" : "datetime" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/date_trunc.json b/docs/reference/esql/functions/kibana/definition/date_trunc.json new file mode 100644 index 0000000000000..3d8658c496529 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/date_trunc.json @@ -0,0 +1,49 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "date_trunc", + "description" : "Rounds down a date to the closest interval.", + "signatures" : [ + { + "params" : [ + { + "name" : "interval", + "type" : "date_period", + "optional" : false, + "description" : "Interval; expressed using the timespan literal syntax." + }, + { + "name" : "date", + "type" : "datetime", + "optional" : false, + "description" : "Date expression" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "interval", + "type" : "time_duration", + "optional" : false, + "description" : "Interval; expressed using the timespan literal syntax." + }, + { + "name" : "date", + "type" : "datetime", + "optional" : false, + "description" : "Date expression" + } + ], + "variadic" : false, + "returnType" : "datetime" + } + ], + "examples" : [ + "FROM employees\n| KEEP first_name, last_name, hire_date\n| EVAL year_hired = DATE_TRUNC(1 year, hire_date)", + "FROM employees\n| EVAL year = DATE_TRUNC(1 year, hire_date)\n| STATS hires = COUNT(emp_no) BY year\n| SORT year", + "FROM sample_data\n| EVAL error = CASE(message LIKE \"*error*\", 1, 0)\n| EVAL hour = DATE_TRUNC(1 hour, @timestamp)\n| STATS error_rate = AVG(error) by hour\n| SORT hour" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/e.json b/docs/reference/esql/functions/kibana/definition/e.json new file mode 100644 index 0000000000000..97d33b752d042 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/e.json @@ -0,0 +1,12 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "e", + "description" : "Euler’s number.", + "signatures" : [ + { + "params" : [ ], + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/ends_with.json b/docs/reference/esql/functions/kibana/definition/ends_with.json new file mode 100644 index 0000000000000..66f4c7404905c --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/ends_with.json @@ -0,0 +1,44 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "ends_with", + "description" : "Returns a boolean that indicates whether a keyword string ends with another string", + "signatures" : [ + { + "params" : [ + { + "name" : "str", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "suffix", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "str", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "suffix", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/floor.json b/docs/reference/esql/functions/kibana/definition/floor.json new file mode 100644 index 0000000000000..18ab8031558bd --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/floor.json @@ -0,0 +1,56 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "floor", + "description" : "Round a number down to the nearest integer.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/greatest.json b/docs/reference/esql/functions/kibana/definition/greatest.json new file mode 100644 index 0000000000000..f72f54708c6b1 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/greatest.json @@ -0,0 +1,212 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "greatest", + "description" : "Returns the maximum value from many columns.", + "signatures" : [ + { + "params" : [ + { + "name" : "first", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "first", + "type" : "boolean", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "boolean", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "first", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "double", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "first", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "first", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "integer", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "first", + "type" : "ip", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "ip", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "first", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "first", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "keyword", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "first", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "first", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "long", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "first", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "first", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "text", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "first", + "type" : "version", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "version", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "version" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/least.json b/docs/reference/esql/functions/kibana/definition/least.json new file mode 100644 index 0000000000000..66efedc0c9fe5 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/least.json @@ -0,0 +1,212 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "least", + "description" : "Returns the minimum value from many columns.", + "signatures" : [ + { + "params" : [ + { + "name" : "first", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "first", + "type" : "boolean", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "boolean", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "first", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "double", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "first", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "first", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "integer", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "first", + "type" : "ip", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "ip", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "first", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "first", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "keyword", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "first", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "first", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "long", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "first", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "first", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "text", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "first", + "type" : "version", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "version", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "version" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/left.json b/docs/reference/esql/functions/kibana/definition/left.json new file mode 100644 index 0000000000000..bcda92b887bb0 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/left.json @@ -0,0 +1,47 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "left", + "description" : "Returns the substring that extracts 'length' chars from 'string' starting from the left.", + "signatures" : [ + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "The string from which to return a substring." + }, + { + "name" : "length", + "type" : "integer", + "optional" : false, + "description" : "The number of characters to return." + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "The string from which to return a substring." + }, + { + "name" : "length", + "type" : "integer", + "optional" : false, + "description" : "The number of characters to return." + } + ], + "variadic" : false, + "returnType" : "keyword" + } + ], + "examples" : [ + "FROM employees\n| KEEP last_name\n| EVAL left = LEFT(last_name, 3)\n| SORT last_name ASC\n| LIMIT 5" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/length.json b/docs/reference/esql/functions/kibana/definition/length.json new file mode 100644 index 0000000000000..a42656b71d471 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/length.json @@ -0,0 +1,32 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "length", + "description" : "Returns the character length of a string.", + "signatures" : [ + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/locate.json b/docs/reference/esql/functions/kibana/definition/locate.json new file mode 100644 index 0000000000000..9629b81820f8a --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/locate.json @@ -0,0 +1,176 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "locate", + "description" : "Returns an integer that indicates the position of a keyword substring within another string", + "signatures" : [ + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "An input string" + }, + { + "name" : "substring", + "type" : "keyword", + "optional" : false, + "description" : "A substring to locate in the input string" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "An input string" + }, + { + "name" : "substring", + "type" : "keyword", + "optional" : false, + "description" : "A substring to locate in the input string" + }, + { + "name" : "start", + "type" : "integer", + "optional" : true, + "description" : "The start index" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "An input string" + }, + { + "name" : "substring", + "type" : "text", + "optional" : false, + "description" : "A substring to locate in the input string" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "An input string" + }, + { + "name" : "substring", + "type" : "text", + "optional" : false, + "description" : "A substring to locate in the input string" + }, + { + "name" : "start", + "type" : "integer", + "optional" : true, + "description" : "The start index" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "An input string" + }, + { + "name" : "substring", + "type" : "keyword", + "optional" : false, + "description" : "A substring to locate in the input string" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "An input string" + }, + { + "name" : "substring", + "type" : "keyword", + "optional" : false, + "description" : "A substring to locate in the input string" + }, + { + "name" : "start", + "type" : "integer", + "optional" : true, + "description" : "The start index" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "An input string" + }, + { + "name" : "substring", + "type" : "text", + "optional" : false, + "description" : "A substring to locate in the input string" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "An input string" + }, + { + "name" : "substring", + "type" : "text", + "optional" : false, + "description" : "A substring to locate in the input string" + }, + { + "name" : "start", + "type" : "integer", + "optional" : true, + "description" : "The start index" + } + ], + "variadic" : false, + "returnType" : "integer" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/log.json b/docs/reference/esql/functions/kibana/definition/log.json new file mode 100644 index 0000000000000..0edafefc4dd1a --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/log.json @@ -0,0 +1,348 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "log", + "description" : "Returns the logarithm of a value to a base. The input can be any numeric value, the return value is always a double.\n\nLogs of zero, negative numbers, and base of one return `null` as well as a warning.", + "signatures" : [ + { + "params" : [ + { + "name" : "base", + "type" : "double", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "double", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "double", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "double", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "double", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "integer", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "integer", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "integer", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "integer", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "integer", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "long", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "long", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "long", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "long", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "long", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "unsigned_long", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "unsigned_long", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "unsigned_long", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "unsigned_long", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "unsigned_long", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW base = 2.0, value = 8.0\n| EVAL s = LOG(base, value)", + "row value = 100\n| EVAL s = LOG(value);" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/log10.json b/docs/reference/esql/functions/kibana/definition/log10.json new file mode 100644 index 0000000000000..ca506b0df33e2 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/log10.json @@ -0,0 +1,59 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "log10", + "description" : "Returns the logarithm of a value to base 10. The input can be any numeric value, the return value is always a double.\n\nLogs of 0 and negative numbers return `null` as well as a warning.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW d = 1000.0 \n| EVAL s = LOG10(d)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/ltrim.json b/docs/reference/esql/functions/kibana/definition/ltrim.json new file mode 100644 index 0000000000000..bcf51f6b9e9fb --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/ltrim.json @@ -0,0 +1,32 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "ltrim", + "description" : "Removes leading whitespaces from a string.", + "signatures" : [ + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "text" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_avg.json b/docs/reference/esql/functions/kibana/definition/mv_avg.json new file mode 100644 index 0000000000000..2fa14f0c91d51 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_avg.json @@ -0,0 +1,56 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_avg", + "description" : "Converts a multivalued field into a single valued field containing the average of all of the values.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_concat.json b/docs/reference/esql/functions/kibana/definition/mv_concat.json new file mode 100644 index 0000000000000..1f6936857bcff --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_concat.json @@ -0,0 +1,80 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_concat", + "description" : "Reduce a multivalued string field to a single valued field by concatenating all values.", + "signatures" : [ + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "values to join" + }, + { + "name" : "delim", + "type" : "keyword", + "optional" : false, + "description" : "delimiter" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "values to join" + }, + { + "name" : "delim", + "type" : "text", + "optional" : false, + "description" : "delimiter" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "values to join" + }, + { + "name" : "delim", + "type" : "keyword", + "optional" : false, + "description" : "delimiter" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "values to join" + }, + { + "name" : "delim", + "type" : "text", + "optional" : false, + "description" : "delimiter" + } + ], + "variadic" : false, + "returnType" : "keyword" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_count.json b/docs/reference/esql/functions/kibana/definition/mv_count.json new file mode 100644 index 0000000000000..d27821451899b --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_count.json @@ -0,0 +1,176 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_count", + "description" : "Reduce a multivalued field to a single valued field containing the count of values.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_dedupe.json b/docs/reference/esql/functions/kibana/definition/mv_dedupe.json new file mode 100644 index 0000000000000..c0f02d9febc42 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_dedupe.json @@ -0,0 +1,116 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_dedupe", + "description" : "Remove duplicate values from a multivalued field.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "version" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_first.json b/docs/reference/esql/functions/kibana/definition/mv_first.json new file mode 100644 index 0000000000000..d73b3ae002be3 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_first.json @@ -0,0 +1,176 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_first", + "description" : "Reduce a multivalued field to a single valued field containing the first value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_point" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_point" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "version" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_last.json b/docs/reference/esql/functions/kibana/definition/mv_last.json new file mode 100644 index 0000000000000..0484bfa0b488b --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_last.json @@ -0,0 +1,176 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_last", + "description" : "Reduce a multivalued field to a single valued field containing the last value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_point" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_point" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "version" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_max.json b/docs/reference/esql/functions/kibana/definition/mv_max.json new file mode 100644 index 0000000000000..62a6e15f3346a --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_max.json @@ -0,0 +1,128 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_max", + "description" : "Reduce a multivalued field to a single valued field containing the maximum value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "version" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_median.json b/docs/reference/esql/functions/kibana/definition/mv_median.json new file mode 100644 index 0000000000000..a6d79f7e6f0a3 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_median.json @@ -0,0 +1,56 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_median", + "description" : "Converts a multivalued field into a single valued field containing the median value.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_min.json b/docs/reference/esql/functions/kibana/definition/mv_min.json new file mode 100644 index 0000000000000..8a6f485aedc57 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_min.json @@ -0,0 +1,128 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_min", + "description" : "Reduce a multivalued field to a single valued field containing the minimum value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "version" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_slice.json b/docs/reference/esql/functions/kibana/definition/mv_slice.json new file mode 100644 index 0000000000000..6d3aa873d8d01 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_slice.json @@ -0,0 +1,320 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_slice", + "description" : "Returns a subset of the multivalued field using the start and end index values.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_point", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "cartesian_point" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_shape", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "cartesian_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_point", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "geo_point" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_shape", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "geo_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "version" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_sort.json b/docs/reference/esql/functions/kibana/definition/mv_sort.json new file mode 100644 index 0000000000000..f647d51a2cfaf --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_sort.json @@ -0,0 +1,170 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_sort", + "description" : "Sorts a multivalued field in lexicographical order.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "order", + "type" : "keyword", + "optional" : true, + "description" : "sort order" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "order", + "type" : "keyword", + "optional" : true, + "description" : "sort order" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "order", + "type" : "keyword", + "optional" : true, + "description" : "sort order" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "order", + "type" : "keyword", + "optional" : true, + "description" : "sort order" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "order", + "type" : "keyword", + "optional" : true, + "description" : "sort order" + } + ], + "variadic" : false, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "order", + "type" : "keyword", + "optional" : true, + "description" : "sort order" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "order", + "type" : "keyword", + "optional" : true, + "description" : "sort order" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "order", + "type" : "keyword", + "optional" : true, + "description" : "sort order" + } + ], + "variadic" : false, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "order", + "type" : "keyword", + "optional" : true, + "description" : "sort order" + } + ], + "variadic" : false, + "returnType" : "version" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_sum.json b/docs/reference/esql/functions/kibana/definition/mv_sum.json new file mode 100644 index 0000000000000..25f687efed675 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_sum.json @@ -0,0 +1,56 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_sum", + "description" : "Converts a multivalued field into a single valued field containing the sum of all of the values.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_zip.json b/docs/reference/esql/functions/kibana/definition/mv_zip.json new file mode 100644 index 0000000000000..7fabc0e56f12d --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_zip.json @@ -0,0 +1,56 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_zip", + "description" : "Combines the values from two multivalued fields with a delimiter that joins them together.", + "signatures" : [ + { + "params" : [ + { + "name" : "string1", + "type" : "keyword", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "string2", + "type" : "keyword", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "delim", + "type" : "keyword", + "optional" : true, + "description" : "delimiter" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string1", + "type" : "text", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "string2", + "type" : "text", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "delim", + "type" : "text", + "optional" : true, + "description" : "delimiter" + } + ], + "variadic" : false, + "returnType" : "keyword" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/pi.json b/docs/reference/esql/functions/kibana/definition/pi.json new file mode 100644 index 0000000000000..d1d700d2011ee --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/pi.json @@ -0,0 +1,12 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "pi", + "description" : "The ratio of a circle’s circumference to its diameter.", + "signatures" : [ + { + "params" : [ ], + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/pow.json b/docs/reference/esql/functions/kibana/definition/pow.json new file mode 100644 index 0000000000000..9970a45847cc7 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/pow.json @@ -0,0 +1,296 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "pow", + "description" : "Returns the value of a base raised to the power of an exponent.", + "signatures" : [ + { + "params" : [ + { + "name" : "base", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/replace.json b/docs/reference/esql/functions/kibana/definition/replace.json new file mode 100644 index 0000000000000..cf54b296555a4 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/replace.json @@ -0,0 +1,200 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "replace", + "description" : "The function substitutes in the string any match of the regular expression with the replacement string.", + "signatures" : [ + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "regex", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "newString", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "regex", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "newString", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "regex", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "newString", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "regex", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "newString", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "regex", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "newString", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "regex", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "newString", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "regex", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "newString", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "regex", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "newString", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/right.json b/docs/reference/esql/functions/kibana/definition/right.json new file mode 100644 index 0000000000000..58d081c3782bf --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/right.json @@ -0,0 +1,44 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "right", + "description" : "Return the substring that extracts length chars from the string starting from the right.", + "signatures" : [ + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "length", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "length", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/round.json b/docs/reference/esql/functions/kibana/definition/round.json new file mode 100644 index 0000000000000..e12672d8ee6e2 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/round.json @@ -0,0 +1,26 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "round", + "description" : "Rounds a number to the closest number with the specified number of digits.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "The numeric value to round" + }, + { + "name" : "decimals", + "type" : "integer", + "optional" : true, + "description" : "The number of decimal places to round to. Defaults to 0." + } + ], + "variadic" : false, + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/rtrim.json b/docs/reference/esql/functions/kibana/definition/rtrim.json new file mode 100644 index 0000000000000..586d53a3f84da --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/rtrim.json @@ -0,0 +1,32 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "rtrim", + "description" : "Removes trailing whitespaces from a string.", + "signatures" : [ + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "text" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/signum.json b/docs/reference/esql/functions/kibana/definition/signum.json new file mode 100644 index 0000000000000..b8343283f457e --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/signum.json @@ -0,0 +1,59 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "signum", + "description" : "Returns the sign of the given number.\nIt returns `-1` for negative numbers, `0` for `0` and `1` for positive numbers.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW d = 100.0\n| EVAL s = SIGNUM(d)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/sin.json b/docs/reference/esql/functions/kibana/definition/sin.json new file mode 100644 index 0000000000000..8d092bd0c15a3 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/sin.json @@ -0,0 +1,59 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "sin", + "description" : "Returns ths Sine trigonometric function of an angle.", + "signatures" : [ + { + "params" : [ + { + "name" : "angle", + "type" : "double", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "integer", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "unsigned_long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW a=1.8 \n| EVAL sin=SIN(a)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/sinh.json b/docs/reference/esql/functions/kibana/definition/sinh.json new file mode 100644 index 0000000000000..2261b18134f6c --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/sinh.json @@ -0,0 +1,59 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "sinh", + "description" : "Returns the hyperbolic sine of an angle.", + "signatures" : [ + { + "params" : [ + { + "name" : "angle", + "type" : "double", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "integer", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "unsigned_long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW a=1.8 \n| EVAL sinh=SINH(a)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/split.json b/docs/reference/esql/functions/kibana/definition/split.json new file mode 100644 index 0000000000000..b64def1b813fc --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/split.json @@ -0,0 +1,44 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "split", + "description" : "Split a single valued string into multiple strings.", + "signatures" : [ + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "delim", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "delim", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/sqrt.json b/docs/reference/esql/functions/kibana/definition/sqrt.json new file mode 100644 index 0000000000000..6036fcfd113f3 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/sqrt.json @@ -0,0 +1,56 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "sqrt", + "description" : "Returns the square root of a number.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/st_contains.json b/docs/reference/esql/functions/kibana/definition/st_contains.json new file mode 100644 index 0000000000000..f4f8003917908 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/st_contains.json @@ -0,0 +1,155 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "st_contains", + "description" : "Returns whether the first geometry contains the second geometry.", + "signatures" : [ + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + } + ], + "examples" : [ + "FROM airport_city_boundaries\n| WHERE ST_CONTAINS(city_boundary, TO_GEOSHAPE(\"POLYGON((109.35 18.3, 109.45 18.3, 109.45 18.4, 109.35 18.4, 109.35 18.3))\"))\n| KEEP abbrev, airport, region, city, city_location" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/st_disjoint.json b/docs/reference/esql/functions/kibana/definition/st_disjoint.json new file mode 100644 index 0000000000000..98647b63ff18f --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/st_disjoint.json @@ -0,0 +1,155 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "st_disjoint", + "description" : "Returns whether the two geometries or geometry columns are disjoint.", + "signatures" : [ + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + } + ], + "examples" : [ + "FROM airport_city_boundaries\n| WHERE ST_DISJOINT(city_boundary, TO_GEOSHAPE(\"POLYGON((-10 -60, 120 -60, 120 60, -10 60, -10 -60))\"))\n| KEEP abbrev, airport, region, city, city_location" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/st_intersects.json b/docs/reference/esql/functions/kibana/definition/st_intersects.json new file mode 100644 index 0000000000000..ba619fe57ecf5 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/st_intersects.json @@ -0,0 +1,155 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "st_intersects", + "description" : "Returns whether the two geometries or geometry columns intersect.", + "signatures" : [ + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + } + ], + "examples" : [ + "FROM airports\n| WHERE ST_INTERSECTS(location, TO_GEOSHAPE(\"POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))\"))" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/st_within.json b/docs/reference/esql/functions/kibana/definition/st_within.json new file mode 100644 index 0000000000000..ee98337441ab7 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/st_within.json @@ -0,0 +1,155 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "st_within", + "description" : "Returns whether the first geometry is within the second geometry.", + "signatures" : [ + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + } + ], + "examples" : [ + "FROM airport_city_boundaries\n| WHERE ST_WITHIN(city_boundary, TO_GEOSHAPE(\"POLYGON((109.1 18.15, 109.6 18.15, 109.6 18.65, 109.1 18.65, 109.1 18.15))\"))\n| KEEP abbrev, airport, region, city, city_location" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/st_x.json b/docs/reference/esql/functions/kibana/definition/st_x.json new file mode 100644 index 0000000000000..57598b3470e11 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/st_x.json @@ -0,0 +1,32 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "st_x", + "description" : "Extracts the x-coordinate from a point geometry.", + "signatures" : [ + { + "params" : [ + { + "name" : "point", + "type" : "cartesian_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "point", + "type" : "geo_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/st_y.json b/docs/reference/esql/functions/kibana/definition/st_y.json new file mode 100644 index 0000000000000..0dacaa56bb8de --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/st_y.json @@ -0,0 +1,32 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "st_y", + "description" : "Extracts the y-coordinate from a point geometry.", + "signatures" : [ + { + "params" : [ + { + "name" : "point", + "type" : "cartesian_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "point", + "type" : "geo_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/starts_with.json b/docs/reference/esql/functions/kibana/definition/starts_with.json new file mode 100644 index 0000000000000..918940d110651 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/starts_with.json @@ -0,0 +1,44 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "starts_with", + "description" : "Returns a boolean that indicates whether a keyword string starts with another string", + "signatures" : [ + { + "params" : [ + { + "name" : "str", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "prefix", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "str", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "prefix", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/substring.json b/docs/reference/esql/functions/kibana/definition/substring.json new file mode 100644 index 0000000000000..89c62258f4516 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/substring.json @@ -0,0 +1,56 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "substring", + "description" : "Returns a substring of a string, specified by a start position and an optional length", + "signatures" : [ + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "length", + "type" : "integer", + "optional" : true, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "length", + "type" : "integer", + "optional" : true, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/tan.json b/docs/reference/esql/functions/kibana/definition/tan.json new file mode 100644 index 0000000000000..7498964dc1a2c --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/tan.json @@ -0,0 +1,59 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "tan", + "description" : "Returns the Tangent trigonometric function of an angle.", + "signatures" : [ + { + "params" : [ + { + "name" : "angle", + "type" : "double", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "integer", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "unsigned_long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW a=1.8 \n| EVAL tan=TAN(a)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/tanh.json b/docs/reference/esql/functions/kibana/definition/tanh.json new file mode 100644 index 0000000000000..507f62d394be3 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/tanh.json @@ -0,0 +1,59 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "tanh", + "description" : "Returns the Tangent hyperbolic function of an angle.", + "signatures" : [ + { + "params" : [ + { + "name" : "angle", + "type" : "double", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "integer", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "unsigned_long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW a=1.8 \n| EVAL tanh=TANH(a)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/tau.json b/docs/reference/esql/functions/kibana/definition/tau.json new file mode 100644 index 0000000000000..6ad20f86be4de --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/tau.json @@ -0,0 +1,12 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "tau", + "description" : "The ratio of a circle’s circumference to its radius.", + "signatures" : [ + { + "params" : [ ], + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_boolean.json b/docs/reference/esql/functions/kibana/definition/to_boolean.json new file mode 100644 index 0000000000000..314df3f7a4ca9 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_boolean.json @@ -0,0 +1,92 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_boolean", + "description" : "Converts an input value to a boolean value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_cartesianpoint.json b/docs/reference/esql/functions/kibana/definition/to_cartesianpoint.json new file mode 100644 index 0000000000000..59b0c0b38f850 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_cartesianpoint.json @@ -0,0 +1,44 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_cartesianpoint", + "description" : "Converts an input value to a point value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_point" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_point" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_point" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_cartesianshape.json b/docs/reference/esql/functions/kibana/definition/to_cartesianshape.json new file mode 100644 index 0000000000000..75c1f05bd7738 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_cartesianshape.json @@ -0,0 +1,56 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_cartesianshape", + "description" : "Converts an input value to a shape value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_shape" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_datetime.json b/docs/reference/esql/functions/kibana/definition/to_datetime.json new file mode 100644 index 0000000000000..e2b10e54f4a29 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_datetime.json @@ -0,0 +1,92 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_datetime", + "description" : "Converts an input value to a date value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_degrees.json b/docs/reference/esql/functions/kibana/definition/to_degrees.json new file mode 100644 index 0000000000000..7652254fcebe1 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_degrees.json @@ -0,0 +1,56 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_degrees", + "description" : "Converts a number in radians to degrees.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_double.json b/docs/reference/esql/functions/kibana/definition/to_double.json new file mode 100644 index 0000000000000..7fad85d7be129 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_double.json @@ -0,0 +1,104 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_double", + "description" : "Converts an input value to a double value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_geopoint.json b/docs/reference/esql/functions/kibana/definition/to_geopoint.json new file mode 100644 index 0000000000000..b8a7ca9b9a19f --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_geopoint.json @@ -0,0 +1,44 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_geopoint", + "description" : "Converts an input value to a geo_point value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "geo_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_point" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_point" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_point" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_geoshape.json b/docs/reference/esql/functions/kibana/definition/to_geoshape.json new file mode 100644 index 0000000000000..d3dee5812510c --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_geoshape.json @@ -0,0 +1,56 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_geoshape", + "description" : "Converts an input value to a geo_shape value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "geo_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_shape" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_integer.json b/docs/reference/esql/functions/kibana/definition/to_integer.json new file mode 100644 index 0000000000000..3e8a7897bda7b --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_integer.json @@ -0,0 +1,104 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_integer", + "description" : "Converts an input value to an integer value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_ip.json b/docs/reference/esql/functions/kibana/definition/to_ip.json new file mode 100644 index 0000000000000..f99ef65752559 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_ip.json @@ -0,0 +1,44 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_ip", + "description" : "Converts an input string to an IP value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "ip" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_long.json b/docs/reference/esql/functions/kibana/definition/to_long.json new file mode 100644 index 0000000000000..56fd5dc83e721 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_long.json @@ -0,0 +1,104 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_long", + "description" : "Converts an input value to a long value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_lower.json b/docs/reference/esql/functions/kibana/definition/to_lower.json new file mode 100644 index 0000000000000..4b3121da437ed --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_lower.json @@ -0,0 +1,32 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_lower", + "description" : "Returns a new string representing the input string converted to lower case.", + "signatures" : [ + { + "params" : [ + { + "name" : "str", + "type" : "keyword", + "optional" : false, + "description" : "The input string" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "str", + "type" : "text", + "optional" : false, + "description" : "The input string" + } + ], + "variadic" : false, + "returnType" : "text" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_radians.json b/docs/reference/esql/functions/kibana/definition/to_radians.json new file mode 100644 index 0000000000000..8b8fc287318ab --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_radians.json @@ -0,0 +1,56 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_radians", + "description" : "Converts a number in degrees to radians.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_string.json b/docs/reference/esql/functions/kibana/definition/to_string.json new file mode 100644 index 0000000000000..bb77c68bf59e4 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_string.json @@ -0,0 +1,176 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_string", + "description" : "Converts a field into a string.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_unsigned_long.json b/docs/reference/esql/functions/kibana/definition/to_unsigned_long.json new file mode 100644 index 0000000000000..923294c19ffba --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_unsigned_long.json @@ -0,0 +1,104 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_unsigned_long", + "description" : "Converts an input value to an unsigned long value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_upper.json b/docs/reference/esql/functions/kibana/definition/to_upper.json new file mode 100644 index 0000000000000..d5ecb1f47206f --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_upper.json @@ -0,0 +1,32 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_upper", + "description" : "Returns a new string representing the input string converted to upper case.", + "signatures" : [ + { + "params" : [ + { + "name" : "str", + "type" : "keyword", + "optional" : false, + "description" : "The input string" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "str", + "type" : "text", + "optional" : false, + "description" : "The input string" + } + ], + "variadic" : false, + "returnType" : "text" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_version.json b/docs/reference/esql/functions/kibana/definition/to_version.json new file mode 100644 index 0000000000000..6076f8dfd70c0 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_version.json @@ -0,0 +1,44 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_version", + "description" : "Converts an input string to a version value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "version" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "version" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "version" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/trim.json b/docs/reference/esql/functions/kibana/definition/trim.json new file mode 100644 index 0000000000000..8e194df0eb84d --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/trim.json @@ -0,0 +1,32 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "trim", + "description" : "Removes leading and trailing whitespaces from a string.", + "signatures" : [ + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "text" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/docs/abs.md b/docs/reference/esql/functions/kibana/docs/abs.md new file mode 100644 index 0000000000000..9dc2c5c76f4f6 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/abs.md @@ -0,0 +1,11 @@ + + +### ABS +Returns the absolute value. + +``` +ROW number = -1.0 +| EVAL abs_number = ABS(number) +``` diff --git a/docs/reference/esql/functions/kibana/docs/acos.md b/docs/reference/esql/functions/kibana/docs/acos.md new file mode 100644 index 0000000000000..19ae2522d48b4 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/acos.md @@ -0,0 +1,11 @@ + + +### ACOS +Returns the {wikipedia}/Inverse_trigonometric_functions[arccosine] of `n` as an angle, expressed in radians. + +``` +ROW a=.9 +| EVAL acos=ACOS(a) +``` diff --git a/docs/reference/esql/functions/kibana/docs/asin.md b/docs/reference/esql/functions/kibana/docs/asin.md new file mode 100644 index 0000000000000..c072ac19b5b92 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/asin.md @@ -0,0 +1,12 @@ + + +### ASIN +Returns the {wikipedia}/Inverse_trigonometric_functions[arcsine] of the input +numeric expression as an angle, expressed in radians. + +``` +ROW a=.9 +| EVAL asin=ASIN(a) +``` diff --git a/docs/reference/esql/functions/kibana/docs/atan.md b/docs/reference/esql/functions/kibana/docs/atan.md new file mode 100644 index 0000000000000..62686f2fbab2c --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/atan.md @@ -0,0 +1,12 @@ + + +### ATAN +Returns the {wikipedia}/Inverse_trigonometric_functions[arctangent] of the input +numeric expression as an angle, expressed in radians. + +``` +ROW a=12.9 +| EVAL atan=ATAN(a) +``` diff --git a/docs/reference/esql/functions/kibana/docs/atan2.md b/docs/reference/esql/functions/kibana/docs/atan2.md new file mode 100644 index 0000000000000..0000c532236d9 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/atan2.md @@ -0,0 +1,12 @@ + + +### ATAN2 +The {wikipedia}/Atan2[angle] between the positive x-axis and the ray from the +origin to the point (x , y) in the Cartesian plane, expressed in radians. + +``` +ROW y=12.9, x=.6 +| EVAL atan2=ATAN2(y, x) +``` diff --git a/docs/reference/esql/functions/kibana/docs/auto_bucket.md b/docs/reference/esql/functions/kibana/docs/auto_bucket.md new file mode 100644 index 0000000000000..df3999f968486 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/auto_bucket.md @@ -0,0 +1,8 @@ + + +### AUTO_BUCKET +Creates human-friendly buckets and returns a datetime value +for each row that corresponds to the resulting bucket the row falls into. + diff --git a/docs/reference/esql/functions/kibana/docs/case.md b/docs/reference/esql/functions/kibana/docs/case.md new file mode 100644 index 0000000000000..e1494a5c2af8c --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/case.md @@ -0,0 +1,8 @@ + + +### CASE +Accepts pairs of conditions and values. +The function returns the value that belongs to the first condition that evaluates to true. + diff --git a/docs/reference/esql/functions/kibana/docs/ceil.md b/docs/reference/esql/functions/kibana/docs/ceil.md new file mode 100644 index 0000000000000..812b139206c35 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/ceil.md @@ -0,0 +1,12 @@ + + +### CEIL +Round a number up to the nearest integer. + +``` +ROW a=1.8 +| EVAL a=CEIL(a) +``` +Note: This is a noop for `long` (including unsigned) and `integer`. For `double` this picks the closest `double` value to the integer similar to {javadoc}/java.base/java/lang/Math.html#ceil(double)[Math.ceil]. diff --git a/docs/reference/esql/functions/kibana/docs/coalesce.md b/docs/reference/esql/functions/kibana/docs/coalesce.md new file mode 100644 index 0000000000000..89cca3f3a286a --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/coalesce.md @@ -0,0 +1,11 @@ + + +### COALESCE +Returns the first of its arguments that is not null. If all arguments are null, it returns `null`. + +``` +ROW a=null, b="b" +| EVAL COALESCE(a, b) +``` diff --git a/docs/reference/esql/functions/kibana/docs/concat.md b/docs/reference/esql/functions/kibana/docs/concat.md new file mode 100644 index 0000000000000..9c30d978370dc --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/concat.md @@ -0,0 +1,7 @@ + + +### CONCAT +Concatenates two or more strings. + diff --git a/docs/reference/esql/functions/kibana/docs/cos.md b/docs/reference/esql/functions/kibana/docs/cos.md new file mode 100644 index 0000000000000..9e8abebaddb89 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/cos.md @@ -0,0 +1,11 @@ + + +### COS +Returns the {wikipedia}/Sine_and_cosine[cosine] of an angle. + +``` +ROW a=1.8 +| EVAL cos=COS(a) +``` diff --git a/docs/reference/esql/functions/kibana/docs/cosh.md b/docs/reference/esql/functions/kibana/docs/cosh.md new file mode 100644 index 0000000000000..b8fae70ae2eed --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/cosh.md @@ -0,0 +1,11 @@ + + +### COSH +Returns the {wikipedia}/Hyperbolic_functions[hyperbolic cosine] of an angle. + +``` +ROW a=1.8 +| EVAL cosh=COSH(a) +``` diff --git a/docs/reference/esql/functions/kibana/docs/date_diff.md b/docs/reference/esql/functions/kibana/docs/date_diff.md new file mode 100644 index 0000000000000..8d33e21d2f92c --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/date_diff.md @@ -0,0 +1,7 @@ + + +### DATE_DIFF +Subtract 2 dates and return their difference in multiples of a unit specified in the 1st argument + diff --git a/docs/reference/esql/functions/kibana/docs/date_extract.md b/docs/reference/esql/functions/kibana/docs/date_extract.md new file mode 100644 index 0000000000000..49eb2391c188e --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/date_extract.md @@ -0,0 +1,7 @@ + + +### DATE_EXTRACT +Extracts parts of a date, like year, month, day, hour. + diff --git a/docs/reference/esql/functions/kibana/docs/date_format.md b/docs/reference/esql/functions/kibana/docs/date_format.md new file mode 100644 index 0000000000000..fbf7fcbf0cb48 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/date_format.md @@ -0,0 +1,7 @@ + + +### DATE_FORMAT +Returns a string representation of a date, in the provided format. + diff --git a/docs/reference/esql/functions/kibana/docs/date_parse.md b/docs/reference/esql/functions/kibana/docs/date_parse.md new file mode 100644 index 0000000000000..8cf0769c38f3b --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/date_parse.md @@ -0,0 +1,7 @@ + + +### DATE_PARSE +Parses a string into a date value + diff --git a/docs/reference/esql/functions/kibana/docs/date_trunc.md b/docs/reference/esql/functions/kibana/docs/date_trunc.md new file mode 100644 index 0000000000000..6aa81ebbac3c3 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/date_trunc.md @@ -0,0 +1,12 @@ + + +### DATE_TRUNC +Rounds down a date to the closest interval. + +``` +FROM employees +| KEEP first_name, last_name, hire_date +| EVAL year_hired = DATE_TRUNC(1 year, hire_date) +``` diff --git a/docs/reference/esql/functions/kibana/docs/e.md b/docs/reference/esql/functions/kibana/docs/e.md new file mode 100644 index 0000000000000..da85eadf2e74e --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/e.md @@ -0,0 +1,7 @@ + + +### E +Euler’s number. + diff --git a/docs/reference/esql/functions/kibana/docs/ends_with.md b/docs/reference/esql/functions/kibana/docs/ends_with.md new file mode 100644 index 0000000000000..74f02c732edef --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/ends_with.md @@ -0,0 +1,7 @@ + + +### ENDS_WITH +Returns a boolean that indicates whether a keyword string ends with another string + diff --git a/docs/reference/esql/functions/kibana/docs/floor.md b/docs/reference/esql/functions/kibana/docs/floor.md new file mode 100644 index 0000000000000..a0a095525e08d --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/floor.md @@ -0,0 +1,7 @@ + + +### FLOOR +Round a number down to the nearest integer. + diff --git a/docs/reference/esql/functions/kibana/docs/greatest.md b/docs/reference/esql/functions/kibana/docs/greatest.md new file mode 100644 index 0000000000000..3db0c9ed87aa5 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/greatest.md @@ -0,0 +1,7 @@ + + +### GREATEST +Returns the maximum value from many columns. + diff --git a/docs/reference/esql/functions/kibana/docs/least.md b/docs/reference/esql/functions/kibana/docs/least.md new file mode 100644 index 0000000000000..ff2c19592c8e1 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/least.md @@ -0,0 +1,7 @@ + + +### LEAST +Returns the minimum value from many columns. + diff --git a/docs/reference/esql/functions/kibana/docs/left.md b/docs/reference/esql/functions/kibana/docs/left.md new file mode 100644 index 0000000000000..73b79f7976512 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/left.md @@ -0,0 +1,14 @@ + + +### LEFT +Returns the substring that extracts 'length' chars from 'string' starting from the left. + +``` +FROM employees +| KEEP last_name +| EVAL left = LEFT(last_name, 3) +| SORT last_name ASC +| LIMIT 5 +``` diff --git a/docs/reference/esql/functions/kibana/docs/length.md b/docs/reference/esql/functions/kibana/docs/length.md new file mode 100644 index 0000000000000..bb1cefd390c71 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/length.md @@ -0,0 +1,7 @@ + + +### LENGTH +Returns the character length of a string. + diff --git a/docs/reference/esql/functions/kibana/docs/locate.md b/docs/reference/esql/functions/kibana/docs/locate.md new file mode 100644 index 0000000000000..0b4d4c625c17e --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/locate.md @@ -0,0 +1,7 @@ + + +### LOCATE +Returns an integer that indicates the position of a keyword substring within another string + diff --git a/docs/reference/esql/functions/kibana/docs/log.md b/docs/reference/esql/functions/kibana/docs/log.md new file mode 100644 index 0000000000000..7ac136d31f720 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/log.md @@ -0,0 +1,13 @@ + + +### LOG +Returns the logarithm of a value to a base. The input can be any numeric value, the return value is always a double. + +Logs of zero, negative numbers, and base of one return `null` as well as a warning. + +``` +ROW base = 2.0, value = 8.0 +| EVAL s = LOG(base, value) +``` diff --git a/docs/reference/esql/functions/kibana/docs/log10.md b/docs/reference/esql/functions/kibana/docs/log10.md new file mode 100644 index 0000000000000..23ec30643e51e --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/log10.md @@ -0,0 +1,13 @@ + + +### LOG10 +Returns the logarithm of a value to base 10. The input can be any numeric value, the return value is always a double. + +Logs of 0 and negative numbers return `null` as well as a warning. + +``` +ROW d = 1000.0 +| EVAL s = LOG10(d) +``` diff --git a/docs/reference/esql/functions/kibana/docs/ltrim.md b/docs/reference/esql/functions/kibana/docs/ltrim.md new file mode 100644 index 0000000000000..33fe7b8da1b6f --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/ltrim.md @@ -0,0 +1,7 @@ + + +### LTRIM +Removes leading whitespaces from a string. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_avg.md b/docs/reference/esql/functions/kibana/docs/mv_avg.md new file mode 100644 index 0000000000000..73636e07fa6e4 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_avg.md @@ -0,0 +1,7 @@ + + +### MV_AVG +Converts a multivalued field into a single valued field containing the average of all of the values. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_concat.md b/docs/reference/esql/functions/kibana/docs/mv_concat.md new file mode 100644 index 0000000000000..f8092e47aaed0 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_concat.md @@ -0,0 +1,7 @@ + + +### MV_CONCAT +Reduce a multivalued string field to a single valued field by concatenating all values. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_count.md b/docs/reference/esql/functions/kibana/docs/mv_count.md new file mode 100644 index 0000000000000..ceea555d0d05c --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_count.md @@ -0,0 +1,7 @@ + + +### MV_COUNT +Reduce a multivalued field to a single valued field containing the count of values. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_dedupe.md b/docs/reference/esql/functions/kibana/docs/mv_dedupe.md new file mode 100644 index 0000000000000..6968c4dd9b3a9 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_dedupe.md @@ -0,0 +1,7 @@ + + +### MV_DEDUPE +Remove duplicate values from a multivalued field. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_first.md b/docs/reference/esql/functions/kibana/docs/mv_first.md new file mode 100644 index 0000000000000..6ed8bb7570a93 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_first.md @@ -0,0 +1,7 @@ + + +### MV_FIRST +Reduce a multivalued field to a single valued field containing the first value. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_last.md b/docs/reference/esql/functions/kibana/docs/mv_last.md new file mode 100644 index 0000000000000..5b68b84b4393f --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_last.md @@ -0,0 +1,7 @@ + + +### MV_LAST +Reduce a multivalued field to a single valued field containing the last value. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_max.md b/docs/reference/esql/functions/kibana/docs/mv_max.md new file mode 100644 index 0000000000000..acb29f7a592f6 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_max.md @@ -0,0 +1,7 @@ + + +### MV_MAX +Reduce a multivalued field to a single valued field containing the maximum value. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_median.md b/docs/reference/esql/functions/kibana/docs/mv_median.md new file mode 100644 index 0000000000000..81de2c3b2c689 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_median.md @@ -0,0 +1,7 @@ + + +### MV_MEDIAN +Converts a multivalued field into a single valued field containing the median value. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_min.md b/docs/reference/esql/functions/kibana/docs/mv_min.md new file mode 100644 index 0000000000000..637211487a972 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_min.md @@ -0,0 +1,7 @@ + + +### MV_MIN +Reduce a multivalued field to a single valued field containing the minimum value. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_slice.md b/docs/reference/esql/functions/kibana/docs/mv_slice.md new file mode 100644 index 0000000000000..7bbf36f67079d --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_slice.md @@ -0,0 +1,7 @@ + + +### MV_SLICE +Returns a subset of the multivalued field using the start and end index values. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_sort.md b/docs/reference/esql/functions/kibana/docs/mv_sort.md new file mode 100644 index 0000000000000..65a74d0455f4b --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_sort.md @@ -0,0 +1,7 @@ + + +### MV_SORT +Sorts a multivalued field in lexicographical order. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_sum.md b/docs/reference/esql/functions/kibana/docs/mv_sum.md new file mode 100644 index 0000000000000..a2b1bfb8ac481 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_sum.md @@ -0,0 +1,7 @@ + + +### MV_SUM +Converts a multivalued field into a single valued field containing the sum of all of the values. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_zip.md b/docs/reference/esql/functions/kibana/docs/mv_zip.md new file mode 100644 index 0000000000000..b6de218ecb45b --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_zip.md @@ -0,0 +1,7 @@ + + +### MV_ZIP +Combines the values from two multivalued fields with a delimiter that joins them together. + diff --git a/docs/reference/esql/functions/kibana/docs/pi.md b/docs/reference/esql/functions/kibana/docs/pi.md new file mode 100644 index 0000000000000..f796ace56607d --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/pi.md @@ -0,0 +1,7 @@ + + +### PI +The ratio of a circle’s circumference to its diameter. + diff --git a/docs/reference/esql/functions/kibana/docs/pow.md b/docs/reference/esql/functions/kibana/docs/pow.md new file mode 100644 index 0000000000000..6cb9139dd91cc --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/pow.md @@ -0,0 +1,7 @@ + + +### POW +Returns the value of a base raised to the power of an exponent. + diff --git a/docs/reference/esql/functions/kibana/docs/replace.md b/docs/reference/esql/functions/kibana/docs/replace.md new file mode 100644 index 0000000000000..9744a9ad7244b --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/replace.md @@ -0,0 +1,7 @@ + + +### REPLACE +The function substitutes in the string any match of the regular expression with the replacement string. + diff --git a/docs/reference/esql/functions/kibana/docs/right.md b/docs/reference/esql/functions/kibana/docs/right.md new file mode 100644 index 0000000000000..6e211ae079f62 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/right.md @@ -0,0 +1,7 @@ + + +### RIGHT +Return the substring that extracts length chars from the string starting from the right. + diff --git a/docs/reference/esql/functions/kibana/docs/round.md b/docs/reference/esql/functions/kibana/docs/round.md new file mode 100644 index 0000000000000..2f8fd0864badf --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/round.md @@ -0,0 +1,7 @@ + + +### ROUND +Rounds a number to the closest number with the specified number of digits. + diff --git a/docs/reference/esql/functions/kibana/docs/rtrim.md b/docs/reference/esql/functions/kibana/docs/rtrim.md new file mode 100644 index 0000000000000..fc5636e40e804 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/rtrim.md @@ -0,0 +1,7 @@ + + +### RTRIM +Removes trailing whitespaces from a string. + diff --git a/docs/reference/esql/functions/kibana/docs/signum.md b/docs/reference/esql/functions/kibana/docs/signum.md new file mode 100644 index 0000000000000..f2e66b84c69c8 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/signum.md @@ -0,0 +1,12 @@ + + +### SIGNUM +Returns the sign of the given number. +It returns `-1` for negative numbers, `0` for `0` and `1` for positive numbers. + +``` +ROW d = 100.0 +| EVAL s = SIGNUM(d) +``` diff --git a/docs/reference/esql/functions/kibana/docs/sin.md b/docs/reference/esql/functions/kibana/docs/sin.md new file mode 100644 index 0000000000000..a87b4e4f452af --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/sin.md @@ -0,0 +1,11 @@ + + +### SIN +Returns ths {wikipedia}/Sine_and_cosine[Sine] trigonometric function of an angle. + +``` +ROW a=1.8 +| EVAL sin=SIN(a) +``` diff --git a/docs/reference/esql/functions/kibana/docs/sinh.md b/docs/reference/esql/functions/kibana/docs/sinh.md new file mode 100644 index 0000000000000..81e8d9fd473d5 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/sinh.md @@ -0,0 +1,11 @@ + + +### SINH +Returns the {wikipedia}/Hyperbolic_functions[hyperbolic sine] of an angle. + +``` +ROW a=1.8 +| EVAL sinh=SINH(a) +``` diff --git a/docs/reference/esql/functions/kibana/docs/split.md b/docs/reference/esql/functions/kibana/docs/split.md new file mode 100644 index 0000000000000..d06d8857967f4 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/split.md @@ -0,0 +1,7 @@ + + +### SPLIT +Split a single valued string into multiple strings. + diff --git a/docs/reference/esql/functions/kibana/docs/sqrt.md b/docs/reference/esql/functions/kibana/docs/sqrt.md new file mode 100644 index 0000000000000..6e52bfed4037b --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/sqrt.md @@ -0,0 +1,7 @@ + + +### SQRT +Returns the square root of a number. + diff --git a/docs/reference/esql/functions/kibana/docs/st_contains.md b/docs/reference/esql/functions/kibana/docs/st_contains.md new file mode 100644 index 0000000000000..6e23bb9b0f116 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/st_contains.md @@ -0,0 +1,12 @@ + + +### ST_CONTAINS +Returns whether the first geometry contains the second geometry. + +``` +FROM airport_city_boundaries +| WHERE ST_CONTAINS(city_boundary, TO_GEOSHAPE("POLYGON((109.35 18.3, 109.45 18.3, 109.45 18.4, 109.35 18.4, 109.35 18.3))")) +| KEEP abbrev, airport, region, city, city_location +``` diff --git a/docs/reference/esql/functions/kibana/docs/st_disjoint.md b/docs/reference/esql/functions/kibana/docs/st_disjoint.md new file mode 100644 index 0000000000000..7cf66b168bd70 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/st_disjoint.md @@ -0,0 +1,12 @@ + + +### ST_DISJOINT +Returns whether the two geometries or geometry columns are disjoint. + +``` +FROM airport_city_boundaries +| WHERE ST_DISJOINT(city_boundary, TO_GEOSHAPE("POLYGON((-10 -60, 120 -60, 120 60, -10 60, -10 -60))")) +| KEEP abbrev, airport, region, city, city_location +``` diff --git a/docs/reference/esql/functions/kibana/docs/st_intersects.md b/docs/reference/esql/functions/kibana/docs/st_intersects.md new file mode 100644 index 0000000000000..e4db33429dbe3 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/st_intersects.md @@ -0,0 +1,11 @@ + + +### ST_INTERSECTS +Returns whether the two geometries or geometry columns intersect. + +``` +FROM airports +| WHERE ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) +``` diff --git a/docs/reference/esql/functions/kibana/docs/st_within.md b/docs/reference/esql/functions/kibana/docs/st_within.md new file mode 100644 index 0000000000000..cbb3ae5ee9aca --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/st_within.md @@ -0,0 +1,12 @@ + + +### ST_WITHIN +Returns whether the first geometry is within the second geometry. + +``` +FROM airport_city_boundaries +| WHERE ST_WITHIN(city_boundary, TO_GEOSHAPE("POLYGON((109.1 18.15, 109.6 18.15, 109.6 18.65, 109.1 18.65, 109.1 18.15))")) +| KEEP abbrev, airport, region, city, city_location +``` diff --git a/docs/reference/esql/functions/kibana/docs/st_x.md b/docs/reference/esql/functions/kibana/docs/st_x.md new file mode 100644 index 0000000000000..af2f4de1487cd --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/st_x.md @@ -0,0 +1,7 @@ + + +### ST_X +Extracts the x-coordinate from a point geometry. + diff --git a/docs/reference/esql/functions/kibana/docs/st_y.md b/docs/reference/esql/functions/kibana/docs/st_y.md new file mode 100644 index 0000000000000..575a5bd3c7d33 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/st_y.md @@ -0,0 +1,7 @@ + + +### ST_Y +Extracts the y-coordinate from a point geometry. + diff --git a/docs/reference/esql/functions/kibana/docs/starts_with.md b/docs/reference/esql/functions/kibana/docs/starts_with.md new file mode 100644 index 0000000000000..5af544c855051 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/starts_with.md @@ -0,0 +1,7 @@ + + +### STARTS_WITH +Returns a boolean that indicates whether a keyword string starts with another string + diff --git a/docs/reference/esql/functions/kibana/docs/substring.md b/docs/reference/esql/functions/kibana/docs/substring.md new file mode 100644 index 0000000000000..d1d9c696f7813 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/substring.md @@ -0,0 +1,7 @@ + + +### SUBSTRING +Returns a substring of a string, specified by a start position and an optional length + diff --git a/docs/reference/esql/functions/kibana/docs/tan.md b/docs/reference/esql/functions/kibana/docs/tan.md new file mode 100644 index 0000000000000..edfb4210f7dd2 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/tan.md @@ -0,0 +1,11 @@ + + +### TAN +Returns the {wikipedia}/Sine_and_cosine[Tangent] trigonometric function of an angle. + +``` +ROW a=1.8 +| EVAL tan=TAN(a) +``` diff --git a/docs/reference/esql/functions/kibana/docs/tanh.md b/docs/reference/esql/functions/kibana/docs/tanh.md new file mode 100644 index 0000000000000..d3d8c7d4e9196 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/tanh.md @@ -0,0 +1,11 @@ + + +### TANH +Returns the {wikipedia}/Hyperbolic_functions[Tangent] hyperbolic function of an angle. + +``` +ROW a=1.8 +| EVAL tanh=TANH(a) +``` diff --git a/docs/reference/esql/functions/kibana/docs/tau.md b/docs/reference/esql/functions/kibana/docs/tau.md new file mode 100644 index 0000000000000..9a530e61dd342 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/tau.md @@ -0,0 +1,7 @@ + + +### TAU +The ratio of a circle’s circumference to its radius. + diff --git a/docs/reference/esql/functions/kibana/docs/to_boolean.md b/docs/reference/esql/functions/kibana/docs/to_boolean.md new file mode 100644 index 0000000000000..9c1bd747d168f --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_boolean.md @@ -0,0 +1,7 @@ + + +### TO_BOOLEAN +Converts an input value to a boolean value. + diff --git a/docs/reference/esql/functions/kibana/docs/to_cartesianpoint.md b/docs/reference/esql/functions/kibana/docs/to_cartesianpoint.md new file mode 100644 index 0000000000000..dbaa76d1d23e0 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_cartesianpoint.md @@ -0,0 +1,7 @@ + + +### TO_CARTESIANPOINT +Converts an input value to a point value. + diff --git a/docs/reference/esql/functions/kibana/docs/to_cartesianshape.md b/docs/reference/esql/functions/kibana/docs/to_cartesianshape.md new file mode 100644 index 0000000000000..e3fd29e8f9907 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_cartesianshape.md @@ -0,0 +1,7 @@ + + +### TO_CARTESIANSHAPE +Converts an input value to a shape value. + diff --git a/docs/reference/esql/functions/kibana/docs/to_datetime.md b/docs/reference/esql/functions/kibana/docs/to_datetime.md new file mode 100644 index 0000000000000..8326866c7166d --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_datetime.md @@ -0,0 +1,7 @@ + + +### TO_DATETIME +Converts an input value to a date value. + diff --git a/docs/reference/esql/functions/kibana/docs/to_degrees.md b/docs/reference/esql/functions/kibana/docs/to_degrees.md new file mode 100644 index 0000000000000..dc5e36a592b2c --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_degrees.md @@ -0,0 +1,7 @@ + + +### TO_DEGREES +Converts a number in radians to degrees. + diff --git a/docs/reference/esql/functions/kibana/docs/to_double.md b/docs/reference/esql/functions/kibana/docs/to_double.md new file mode 100644 index 0000000000000..4f531e1c8fdde --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_double.md @@ -0,0 +1,7 @@ + + +### TO_DOUBLE +Converts an input value to a double value. + diff --git a/docs/reference/esql/functions/kibana/docs/to_geopoint.md b/docs/reference/esql/functions/kibana/docs/to_geopoint.md new file mode 100644 index 0000000000000..7f9b8ca59bc8f --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_geopoint.md @@ -0,0 +1,7 @@ + + +### TO_GEOPOINT +Converts an input value to a geo_point value. + diff --git a/docs/reference/esql/functions/kibana/docs/to_geoshape.md b/docs/reference/esql/functions/kibana/docs/to_geoshape.md new file mode 100644 index 0000000000000..cdfbdc5b6ffd9 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_geoshape.md @@ -0,0 +1,7 @@ + + +### TO_GEOSHAPE +Converts an input value to a geo_shape value. + diff --git a/docs/reference/esql/functions/kibana/docs/to_integer.md b/docs/reference/esql/functions/kibana/docs/to_integer.md new file mode 100644 index 0000000000000..ad04ecbd1e304 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_integer.md @@ -0,0 +1,7 @@ + + +### TO_INTEGER +Converts an input value to an integer value. + diff --git a/docs/reference/esql/functions/kibana/docs/to_ip.md b/docs/reference/esql/functions/kibana/docs/to_ip.md new file mode 100644 index 0000000000000..47d06e9ab755e --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_ip.md @@ -0,0 +1,7 @@ + + +### TO_IP +Converts an input string to an IP value. + diff --git a/docs/reference/esql/functions/kibana/docs/to_long.md b/docs/reference/esql/functions/kibana/docs/to_long.md new file mode 100644 index 0000000000000..c19273376bd4b --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_long.md @@ -0,0 +1,7 @@ + + +### TO_LONG +Converts an input value to a long value. + diff --git a/docs/reference/esql/functions/kibana/docs/to_lower.md b/docs/reference/esql/functions/kibana/docs/to_lower.md new file mode 100644 index 0000000000000..f63926ba13825 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_lower.md @@ -0,0 +1,7 @@ + + +### TO_LOWER +Returns a new string representing the input string converted to lower case. + diff --git a/docs/reference/esql/functions/kibana/docs/to_radians.md b/docs/reference/esql/functions/kibana/docs/to_radians.md new file mode 100644 index 0000000000000..071d9ff05e0b6 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_radians.md @@ -0,0 +1,7 @@ + + +### TO_RADIANS +Converts a number in degrees to radians. + diff --git a/docs/reference/esql/functions/kibana/docs/to_string.md b/docs/reference/esql/functions/kibana/docs/to_string.md new file mode 100644 index 0000000000000..a066f488363aa --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_string.md @@ -0,0 +1,7 @@ + + +### TO_STRING +Converts a field into a string. + diff --git a/docs/reference/esql/functions/kibana/docs/to_unsigned_long.md b/docs/reference/esql/functions/kibana/docs/to_unsigned_long.md new file mode 100644 index 0000000000000..fbe9e22215ee8 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_unsigned_long.md @@ -0,0 +1,7 @@ + + +### TO_UNSIGNED_LONG +Converts an input value to an unsigned long value. + diff --git a/docs/reference/esql/functions/kibana/docs/to_upper.md b/docs/reference/esql/functions/kibana/docs/to_upper.md new file mode 100644 index 0000000000000..4c4f5fe02b646 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_upper.md @@ -0,0 +1,7 @@ + + +### TO_UPPER +Returns a new string representing the input string converted to upper case. + diff --git a/docs/reference/esql/functions/kibana/docs/to_version.md b/docs/reference/esql/functions/kibana/docs/to_version.md new file mode 100644 index 0000000000000..23cd9fcb152a3 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_version.md @@ -0,0 +1,7 @@ + + +### TO_VERSION +Converts an input string to a version value. + diff --git a/docs/reference/esql/functions/kibana/docs/trim.md b/docs/reference/esql/functions/kibana/docs/trim.md new file mode 100644 index 0000000000000..2911abbf5e1a9 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/trim.md @@ -0,0 +1,7 @@ + + +### TRIM +Removes leading and trailing whitespaces from a string. + diff --git a/docs/reference/esql/functions/signature/case.svg b/docs/reference/esql/functions/signature/case.svg new file mode 100644 index 0000000000000..d6fd7da38aca6 --- /dev/null +++ b/docs/reference/esql/functions/signature/case.svg @@ -0,0 +1 @@ +CASE(condition,trueValue) \ No newline at end of file diff --git a/x-pack/plugin/esql/build.gradle b/x-pack/plugin/esql/build.gradle index 87ef4dd0b3eff..86245e1c93e97 100644 --- a/x-pack/plugin/esql/build.gradle +++ b/x-pack/plugin/esql/build.gradle @@ -70,27 +70,50 @@ tasks.named("test").configure { doLast { List signatures = file("${projectDir}/build/testrun/test/temp/esql/functions/signature").list().findAll {it.endsWith("svg")} List types = file("${projectDir}/build/testrun/test/temp/esql/functions/types").list().findAll {it.endsWith("asciidoc")} - int count = signatures == null ? 0 : signatures.size() + int count = types == null ? 0 : types.size() + Closure readExample = line -> { + line.replaceAll(/read-example::([^\[]+)\[tag=([^,\]]+)(, ?json)?\]/, { + String file = it[1] + String tag = it[2] + boolean isJson = it[3] + String allExamples = new File("${projectDir}/qa/testFixtures/src/main/resources/${file}").text + int start = allExamples.indexOf("tag::${tag}[]") + int end = allExamples.indexOf("end::${tag}[]", start) + if (start < 0 || end < 0) { + throw new IllegalAccessException("can't find example ${file}::${tag}") + } + // Slice out the newlines + start = allExamples.indexOf('\n', start) + 1 + end = allExamples.lastIndexOf('\n', end) + String example = allExamples.substring(start, end) + if (isJson) { + example = example.replace("\"", "\\\"").replace("\n", "\\n") + } + return example; + }) + } if (count == 0) { logger.quiet("ESQL Docs: No function signatures created. Skipping sync.") } else if (count == 1) { - logger.quiet("ESQL Docs: Only updated $signatures and $types, patching them into place") + logger.quiet("ESQL Docs: Only files related to $types, patching them into place") project.sync { from "${projectDir}/build/testrun/test/temp/esql/functions" into "${rootDir}/docs/reference/esql/functions" - include '**/*.asciidoc', '**/*.svg' + include '**/*.asciidoc', '**/*.svg', '**/*.md', '**/*.json' preserve { - include '/*.asciidoc', '**/*.asciidoc', '**/*.svg', 'README.md' + include '/*.asciidoc', '**/*.asciidoc', '**/*.md', '**/*.json', '**/*.svg', 'README.md' } + filter readExample } } else { project.sync { from "${projectDir}/build/testrun/test/temp/esql/functions" into "${rootDir}/docs/reference/esql/functions" - include '**/*.asciidoc', '**/*.svg' + include '**/*.asciidoc', '**/*.svg', '**/*.md', '**/*.json' preserve { include '/*.asciidoc', 'README.md' } + filter readExample } } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index 178c714950b05..a1a7c95ece2f9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -108,6 +108,7 @@ import java.lang.reflect.Constructor; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.Locale; @@ -245,7 +246,21 @@ public static String normalizeName(String name) { return name.toLowerCase(Locale.ROOT); } - public record ArgSignature(String name, String[] type, String description, boolean optional) {} + public record ArgSignature(String name, String[] type, String description, boolean optional) { + @Override + public String toString() { + return "ArgSignature{" + + "name='" + + name + + "', type=" + + Arrays.toString(type) + + ", description='" + + description + + "', optional=" + + optional + + '}'; + } + } public record FunctionDescription( String name, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 889dfbf4c9b17..bc7a67d9eaefa 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -7,6 +7,9 @@ package org.elasticsearch.xpack.esql.expression.function; +import com.carrotsearch.randomizedtesting.ClassModel; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.sandbox.document.HalfFloatPoint; import org.apache.lucene.util.BytesRef; @@ -33,7 +36,9 @@ import org.elasticsearch.geo.ShapeTestUtils; import org.elasticsearch.indices.CrankyCircuitBreakerService; import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.evaluator.EvalMapper; @@ -61,14 +66,11 @@ import org.hamcrest.Matcher; import org.junit.After; import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.rules.TestRule; -import org.junit.runner.Description; -import org.junit.runners.model.Statement; import java.io.IOException; import java.io.UncheckedIOException; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; import java.nio.file.Files; import java.nio.file.Path; import java.time.Duration; @@ -99,6 +101,7 @@ import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.GEO; import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; @@ -524,45 +527,28 @@ public void testSerializationOfSimple() { assertSerialization(buildFieldExpression(testCase)); } - private static boolean ranAllTests = false; - - @ClassRule - public static TestRule rule = new TestRule() { - @Override - public Statement apply(Statement base, Description description) { - for (Description d : description.getChildren()) { - if (d.getChildren().size() > 1) { - ranAllTests = true; - return base; - } - } - return base; - } - }; - @AfterClass public static void testFunctionInfo() { - if (ranAllTests == false) { - LogManager.getLogger(getTestClass()).info("Skipping function info checks because we're running a portion of the tests"); - return; - } + Logger log = LogManager.getLogger(getTestClass()); FunctionDefinition definition = definition(functionName()); if (definition == null) { - LogManager.getLogger(getTestClass()).info("Skipping function info checks because the function isn't registered"); + log.info("Skipping function info checks because the function isn't registered"); return; } - LogManager.getLogger(getTestClass()).info("Running function info checks"); + // TODO fix case tests to include all supported types + assumeFalse("CASE test incomplete", definition.name().equals("case")); + log.info("Running function info checks"); EsqlFunctionRegistry.FunctionDescription description = EsqlFunctionRegistry.description(definition); List args = description.args(); - assertTrue("expect description to be defined", description.description() != null && description.description().length() > 0); + assertTrue("expect description to be defined", description.description() != null && false == description.description().isEmpty()); List> typesFromSignature = new ArrayList<>(); Set returnFromSignature = new HashSet<>(); for (int i = 0; i < args.size(); i++) { typesFromSignature.add(new HashSet<>()); } - for (Map.Entry, DataType> entry : signatures.entrySet()) { + for (Map.Entry, DataType> entry : signatures().entrySet()) { List types = entry.getKey(); for (int i = 0; i < args.size() && i < types.size(); i++) { typesFromSignature.get(i).add(signatureType(types.get(i))); @@ -571,15 +557,18 @@ public static void testFunctionInfo() { } for (int i = 0; i < args.size(); i++) { - Set annotationTypes = Arrays.stream(args.get(i).type()).collect(Collectors.toCollection(() -> new TreeSet<>())); + EsqlFunctionRegistry.ArgSignature arg = args.get(i); + Set annotationTypes = Arrays.stream(arg.type()).collect(Collectors.toCollection(TreeSet::new)); Set signatureTypes = typesFromSignature.get(i); if (signatureTypes.isEmpty()) { + log.info("{}: skipping", arg.name()); continue; } + log.info("{}: tested {} vs annotated {}", arg.name(), signatureTypes, annotationTypes); assertEquals(signatureTypes, annotationTypes); } - Set returnTypes = Arrays.stream(description.returnType()).collect(Collectors.toCollection(() -> new TreeSet<>())); + Set returnTypes = Arrays.stream(description.returnType()).collect(Collectors.toCollection(TreeSet::new)); assertEquals(returnFromSignature, returnTypes); } @@ -994,10 +983,6 @@ public static void renderSignature() throws IOException { if (System.getProperty("generateDocs") == null) { return; } - if (ranAllTests == false) { - LogManager.getLogger(getTestClass()).info("Skipping rendering signature because we're running a portion of the tests"); - return; - } String rendered = buildSignatureSvg(functionName()); if (rendered == null) { LogManager.getLogger(getTestClass()).info("Skipping rendering signature because the function isn't registered"); @@ -1023,37 +1008,39 @@ private static String buildSignatureSvg(String name) throws IOException { return null; } + private static Class classGeneratingSignatures = null; /** - * Unique signatures encountered by this test. - *

    - * We clear this at the beginning of the test class with - * {@link #clearSignatures} out of paranoia. It is - * shared by many tests, after all. - *

    - *

    - * After each test method we add the signature it operated on via - * {@link #trackSignature}. Once the test class is done we render - * all the unique signatures to a temp file with {@link #renderTypes}. - * We use a temp file because that's all we're allowed to write to. - * Gradle will move the files into the docs after this is done. - *

    + * Unique signatures in this test's parameters. */ - private static final Map, DataType> signatures = new HashMap<>(); - - @BeforeClass - public static void clearSignatures() { - signatures.clear(); - } + private static Map, DataType> signatures; - @After - public void trackSignature() { - if (testCase.getExpectedTypeError() != null) { - return; + private static Map, DataType> signatures() { + Class testClass = getTestClass(); + if (signatures != null && classGeneratingSignatures == testClass) { + return signatures; } - if (testCase.getData().stream().anyMatch(t -> t.type() == DataTypes.NULL)) { - return; + signatures = new HashMap<>(); + Set paramsFactories = new ClassModel(testClass).getAnnotatedLeafMethods(ParametersFactory.class).keySet(); + assertThat(paramsFactories, hasSize(1)); + Method paramsFactory = paramsFactories.iterator().next(); + List params; + try { + params = (List) paramsFactory.invoke(null); + } catch (InvocationTargetException | IllegalAccessException e) { + throw new RuntimeException(e); } - signatures.putIfAbsent(testCase.getData().stream().map(TestCaseSupplier.TypedData::type).toList(), testCase.expectedType()); + for (Object p : params) { + TestCaseSupplier tcs = (TestCaseSupplier) ((Object[]) p)[0]; + TestCaseSupplier.TestCase tc = tcs.get(); + if (tc.getExpectedTypeError() != null) { + continue; + } + if (tc.getData().stream().anyMatch(t -> t.type() == DataTypes.NULL)) { + continue; + } + signatures.putIfAbsent(tc.getData().stream().map(TestCaseSupplier.TypedData::type).toList(), tc.expectedType()); + } + return signatures; } @AfterClass @@ -1079,6 +1066,17 @@ public static void renderDocs() throws IOException { renderDescription(description.description(), info.note()); boolean hasExamples = renderExamples(info); renderFullLayout(name, hasExamples); + renderKibanaInlineDocs(name, info); + List args = description.args(); + if (name.equals("case")) { + EsqlFunctionRegistry.ArgSignature falseValue = args.get(1); + args = List.of( + args.get(0), + falseValue, + new EsqlFunctionRegistry.ArgSignature("falseValue", falseValue.type(), falseValue.description(), true) + ); + } + renderKibanaFunctionDefinition(name, info, args, description.variadic()); return; } LogManager.getLogger(getTestClass()).info("Skipping rendering types because the function '" + name + "' isn't registered"); @@ -1095,7 +1093,7 @@ private static void renderTypes(List argNames) throws IOException { header.append("result"); List table = new ArrayList<>(); - for (Map.Entry, DataType> sig : signatures.entrySet()) { + for (Map.Entry, DataType> sig : signatures().entrySet()) { // TODO flip to using sortedSignatures if (sig.getKey().size() != argNames.size()) { continue; } @@ -1198,6 +1196,130 @@ private static void renderFullLayout(String name, boolean hasExamples) throws IO writeToTempDir("layout", rendered, "asciidoc"); } + private static void renderKibanaInlineDocs(String name, FunctionInfo info) throws IOException { + StringBuilder builder = new StringBuilder(); + builder.append(""" + + + """); + builder.append("### ").append(name.toUpperCase(Locale.ROOT)).append("\n"); + builder.append(info.description()).append("\n\n"); + + if (info.examples().length > 0) { + Example example = info.examples()[0]; + builder.append("```\n"); + builder.append("read-example::").append(example.file()).append(".csv-spec[tag=").append(example.tag()).append("]\n"); + builder.append("```\n"); + } + if (Strings.isNullOrEmpty(info.note()) == false) { + builder.append("Note: ").append(info.note()).append("\n"); + } + String rendered = builder.toString(); + LogManager.getLogger(getTestClass()).info("Writing kibana inline docs for [{}]:\n{}", functionName(), rendered); + writeToTempDir("kibana/docs", rendered, "md"); + } + + private static void renderKibanaFunctionDefinition( + String name, + FunctionInfo info, + List args, + boolean variadic + ) throws IOException { + + XContentBuilder builder = JsonXContent.contentBuilder().prettyPrint().lfAtEnd().startObject(); + builder.field( + "comment", + "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it." + ); + builder.field("type", "eval"); // TODO aggs in here too + builder.field("name", name); + builder.field("description", removeAsciidocLinks(info.description())); + if (Strings.isNullOrEmpty(info.note()) == false) { + builder.field("note", removeAsciidocLinks(info.note())); + } + // TODO aliases + + builder.startArray("signatures"); + if (args.isEmpty()) { + builder.startObject(); + builder.startArray("params"); + builder.endArray(); + // There should only be one return type so just use that as the example + builder.field("returnType", signatures().values().iterator().next().typeName()); + builder.endObject(); + } else { + int minArgCount = (int) args.stream().filter(a -> false == a.optional()).count(); + for (Map.Entry, DataType> sig : sortedSignatures()) { + if (variadic && sig.getKey().size() > args.size()) { + // For variadic functions we test much longer signatures, let's just stop at the last one + continue; + } + // TODO make constants for auto_bucket so the signatures get recognized + if (name.equals("auto_bucket") == false && sig.getKey().size() < minArgCount) { + throw new IllegalArgumentException("signature " + sig.getKey() + " is missing non-optional arg for " + args); + } + builder.startObject(); + builder.startArray("params"); + for (int i = 0; i < sig.getKey().size(); i++) { + EsqlFunctionRegistry.ArgSignature arg = args.get(i); + builder.startObject(); + builder.field("name", arg.name()); + builder.field("type", sig.getKey().get(i).typeName()); + builder.field("optional", arg.optional()); + builder.field("description", arg.description()); + builder.endObject(); + } + builder.endArray(); + builder.field("variadic", variadic); + builder.field("returnType", sig.getValue().typeName()); + builder.endObject(); + } + } + builder.endArray(); + + if (info.examples().length > 0) { + builder.startArray("examples"); + for (Example example : info.examples()) { + builder.value("read-example::" + example.file() + ".csv-spec[tag=" + example.tag() + ", json]"); + } + builder.endArray(); + } + + String rendered = Strings.toString(builder.endObject()); + LogManager.getLogger(getTestClass()).info("Writing kibana function definition for [{}]:\n{}", functionName(), rendered); + writeToTempDir("kibana/definition", rendered, "json"); + } + + private static String removeAsciidocLinks(String asciidoc) { + return asciidoc.replaceAll("[^ ]+\\[([^\\]]+)\\]", "$1"); + } + + private static List, DataType>> sortedSignatures() { + List, DataType>> sortedSignatures = new ArrayList<>(signatures().entrySet()); + Collections.sort(sortedSignatures, new Comparator<>() { + @Override + public int compare(Map.Entry, DataType> lhs, Map.Entry, DataType> rhs) { + int maxlen = Math.max(lhs.getKey().size(), rhs.getKey().size()); + for (int i = 0; i < maxlen; i++) { + if (lhs.getKey().size() <= i) { + return -1; + } + if (rhs.getKey().size() <= i) { + return 1; + } + int c = lhs.getKey().get(i).typeName().compareTo(rhs.getKey().get(i).typeName()); + if (c != 0) { + return c; + } + } + return lhs.getValue().typeName().compareTo(rhs.getValue().typeName()); + } + }); + return sortedSignatures; + } + protected static String functionName() { Class testClass = getTestClass(); if (testClass.isAnnotationPresent(FunctionName.class)) { @@ -1262,7 +1384,7 @@ private static void writeToTempDir(String subdir, String str, String extension) Files.createDirectories(dir); Path file = dir.resolve(functionName() + "." + extension); Files.writeString(file, str); - LogManager.getLogger(getTestClass()).info("Wrote function types for [{}] to file: {}", functionName(), file); + LogManager.getLogger(getTestClass()).info("Wrote to file: {}", file); } private final List breakers = Collections.synchronizedList(new ArrayList<>()); From 12398ee6a9ea703dac0ea752628214d556f6c221 Mon Sep 17 00:00:00 2001 From: Niels Bauman <33722607+nielsbauman@users.noreply.github.com> Date: Tue, 9 Apr 2024 21:37:20 +0200 Subject: [PATCH 206/264] Don't overwrite `DataStream.rolloverOnWrite` flag on failure store rollover (#107247) --- .../java/org/elasticsearch/cluster/metadata/DataStream.java | 2 +- .../org/elasticsearch/cluster/metadata/DataStreamTests.java | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java index 364a1b31ceeba..d4fd57427793b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java @@ -527,7 +527,7 @@ public DataStream unsafeRolloverFailureStore(Index writeIndex, long generation) lifecycle, failureStore, failureIndices, - false, + rolloverOnWrite, autoShardingEvent ); } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java index f086b52c1b491..d54fcbd8a9e41 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java @@ -177,6 +177,8 @@ public void testRollover() { assertThat(rolledDs.getIndices().size(), equalTo(ds.getIndices().size() + 1)); assertTrue(rolledDs.getIndices().containsAll(ds.getIndices())); assertTrue(rolledDs.getIndices().contains(rolledDs.getWriteIndex())); + // Irrespective of whether the rollover was performed lazily, rolloverOnWrite should always be set to false after rollover. + assertFalse(rolledDs.rolloverOnWrite()); } public void testRolloverWithConflictingBackingIndexName() { @@ -272,6 +274,8 @@ public void testRolloverFailureStore() { assertThat(rolledDs.getName(), equalTo(ds.getName())); assertThat(rolledDs.getGeneration(), equalTo(ds.getGeneration() + 1)); assertThat(rolledDs.getIndices().size(), equalTo(ds.getIndices().size())); + // Ensure that the rolloverOnWrite flag hasn't changed when rolling over a failure store. + assertThat(rolledDs.rolloverOnWrite(), equalTo(ds.rolloverOnWrite())); assertThat(rolledDs.getFailureIndices().size(), equalTo(ds.getFailureIndices().size() + 1)); assertTrue(rolledDs.getIndices().containsAll(ds.getIndices())); assertTrue(rolledDs.getIndices().contains(rolledDs.getWriteIndex())); From 6ff6dc1fb4a344deea3ad8f858a2c051c8880bb8 Mon Sep 17 00:00:00 2001 From: Pat Whelan Date: Tue, 9 Apr 2024 18:16:42 -0400 Subject: [PATCH 207/264] [Transform] Only trigger action once per thread (#107232) TransformScheduler can trigger its tasks on multiple threads. TransformTask uses an AtomicReference to manage one trigger event per thread by cycling between "Started" and "Indexing". The Retry Listener now has the same protection. "shouldRunAction" will cycle to false during execution and back to true if the action fails and should be retried. Fix #107215 --- docs/changelog/107232.yaml | 6 ++ .../TransformRetryableStartUpListener.java | 21 +++--- ...ransformRetryableStartUpListenerTests.java | 64 +++++++++++++++++++ 3 files changed, 80 insertions(+), 11 deletions(-) create mode 100644 docs/changelog/107232.yaml diff --git a/docs/changelog/107232.yaml b/docs/changelog/107232.yaml new file mode 100644 index 0000000000000..1422848cb1c91 --- /dev/null +++ b/docs/changelog/107232.yaml @@ -0,0 +1,6 @@ +pr: 107232 +summary: Only trigger action once per thread +area: Transform +type: bug +issues: + - 107215 diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformRetryableStartUpListener.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformRetryableStartUpListener.java index 17548fd8d427f..33b20d5513bc5 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformRetryableStartUpListener.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformRetryableStartUpListener.java @@ -22,7 +22,7 @@ class TransformRetryableStartUpListener implements TransformScheduler. private final Supplier shouldRetry; private final TransformContext context; private final AtomicBoolean isFirstRun; - private final AtomicBoolean isRunning; + private final AtomicBoolean shouldRunAction; /** * @param transformId the transform associated with this listener. All events to this listener must be for the same transformId. @@ -53,30 +53,28 @@ class TransformRetryableStartUpListener implements TransformScheduler. this.shouldRetry = shouldRetry; this.context = context; this.isFirstRun = new AtomicBoolean(true); - this.isRunning = new AtomicBoolean(true); + this.shouldRunAction = new AtomicBoolean(true); } @Override public void triggered(TransformScheduler.Event event) { - if (isRunning.get() && transformId.equals(event.transformId())) { + if (transformId.equals(event.transformId()) && shouldRunAction.compareAndSet(true, false)) { action.accept(ActionListener.wrap(this::actionSucceeded, this::actionFailed)); } } - private void markDone() { - if (isRunning.compareAndSet(true, false)) { - synchronized (context) { - context.resetStartUpFailureCount(); - } - } - } - private void actionSucceeded(Response r) { maybeNotifyRetryListener(false); markDone(); actionListener.onResponse(r); } + private void markDone() { + synchronized (context) { + context.resetStartUpFailureCount(); + } + } + private void maybeNotifyRetryListener(boolean response) { if (isFirstRun.compareAndSet(true, false)) { retryScheduledListener.onResponse(response); @@ -87,6 +85,7 @@ private void actionFailed(Exception e) { if (shouldRetry.get()) { maybeNotifyRetryListener(true); recordError(e); + shouldRunAction.set(true); } else { maybeNotifyRetryListener(false); markDone(); diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformRetryableStartUpListenerTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformRetryableStartUpListenerTests.java index 1a2bbfd434455..77b290e015d9a 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformRetryableStartUpListenerTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformRetryableStartUpListenerTests.java @@ -18,6 +18,7 @@ import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; import static org.mockito.Mockito.only; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; @@ -236,4 +237,67 @@ public void testCancelRetryImmediately() { assertFalse("Retries should not be scheduled.", retryResult.get()); verify(context, only()).resetStartUpFailureCount(); } + + /** + * Given triggered has been called + * When we call trigger a second time + * And the first call has not finished + * Then we should not take any action + * + * Given the first call has finished + * When we call trigger a third time + * Then we should successfully call the action + */ + public void testRunOneAtATime() { + var retryResult = new AtomicReference(); + var responseResult = new AtomicInteger(0); + var context = mock(TransformContext.class); + + var savedListener = new AtomicReference>(); + Consumer> action = l -> { + if (savedListener.compareAndSet(null, l) == false) { + fail("Action should only be called once."); + } + }; + + var listener = new TransformRetryableStartUpListener<>( + "transformId", + action, + responseListener(responseResult), + retryListener(retryResult), + () -> true, + context + ); + + callThreeTimes("transformId", listener); + + // verify the action has been called + assertNotNull(savedListener.get()); + + // assert the listener has not been called yet + assertEquals("Response Listener should never be called once.", 0, responseResult.get()); + assertNull("Retry Listener should not be called.", retryResult.get()); + verifyNoInteractions(context); + + savedListener.get().onFailure(new IllegalStateException("first call fails")); + + // assert only 1 retry and 0 success + assertEquals("Response Listener should only be called once.", 0, responseResult.get()); + assertNotNull("Retry Listener should be called.", retryResult.get()); + assertTrue("Retries should be scheduled.", retryResult.get()); + verify(context, times(1)).incrementAndGetStartUpFailureCount(any(IllegalStateException.class)); + verify(context, never()).resetStartUpFailureCount(); + + // rerun and succeed + savedListener.set(null); + callThreeTimes("transformId", listener); + savedListener.get().onResponse(null); + + // assert only 1 retry and 1 failure + assertEquals("Response Listener should only be called once.", 1, responseResult.get()); + assertNotNull("Retry Listener should be called.", retryResult.get()); + assertTrue("Retries should be scheduled.", retryResult.get()); + verify(context, times(1)).incrementAndGetStartUpFailureCount(any(IllegalStateException.class)); + verify(context, times(1)).resetStartUpFailureCount(); + } } From 13f95fdcec25bafe699ca9d0130970d7d86a6a06 Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Wed, 10 Apr 2024 07:27:59 +0200 Subject: [PATCH 208/264] [Profiling] Divide into more packages (#107201) With this commit we split the Universal Profiling plugin into three packages: * `persistence` contains everything w.r.t index management * `rest` contains the REST API * `action` contains the transport API The `action` / `rest` structure follows the already established structure in the rest of the code base. We divide this plugin into multiple packages mainly because the different functionalities will be maintained by different teams in the future. This restructuring helps clarify boundaries. --- .../{ => action}/CancellationIT.java | 2 +- .../{ => action}/GetFlameGraphActionIT.java | 2 +- .../{ => action}/GetStackTracesActionIT.java | 2 +- .../{ => action}/GetStatusActionIT.java | 2 +- .../GetTopNFunctionsActionIT.java | 2 +- .../LocalStateProfilingXPackPlugin.java | 3 +- .../{ => action}/ProfilingTestCase.java | 4 ++- .../xpack/profiling/ProfilingPlugin.java | 19 ++++++++++++ .../profiling/{ => action}/CO2Calculator.java | 2 +- .../{ => action}/CloudProviders.java | 2 +- .../{ => action}/CostCalculator.java | 2 +- .../profiling/{ => action}/CostEntry.java | 2 +- .../xpack/profiling/{ => action}/Frame.java | 2 +- .../profiling/{ => action}/FrameGroupID.java | 2 +- .../{ => action}/GetFlamegraphAction.java | 2 +- .../{ => action}/GetFlamegraphResponse.java | 2 +- .../{ => action}/GetStackTracesAction.java | 2 +- .../{ => action}/GetStackTracesRequest.java | 3 +- .../{ => action}/GetStackTracesResponse.java | 2 +- .../GetStackTracesResponseBuilder.java | 2 +- .../{ => action}/GetStatusAction.java | 2 +- .../{ => action}/GetTopNFunctionsAction.java | 2 +- .../GetTopNFunctionsResponse.java | 2 +- .../profiling/{ => action}/HostMetadata.java | 2 +- .../{ => action}/IndexAllocation.java | 2 +- .../profiling/{ => action}/InstanceType.java | 2 +- .../{ => action}/InstanceTypeService.java | 2 +- .../{ => action}/KvIndexResolver.java | 2 +- .../profiling/{ => action}/NumberUtils.java | 2 +- .../ProfilingInfoTransportAction.java | 2 +- .../{ => action}/ProfilingLicenseChecker.java | 2 +- .../ProfilingUsageTransportAction.java | 2 +- .../profiling/{ => action}/Resampler.java | 2 +- .../profiling/{ => action}/StackFrame.java | 2 +- .../profiling/{ => action}/StackTrace.java | 2 +- .../profiling/{ => action}/StopWatch.java | 2 +- .../profiling/{ => action}/TopNFunction.java | 2 +- .../profiling/{ => action}/TraceEvent.java | 2 +- .../TransportGetFlamegraphAction.java | 2 +- .../TransportGetStackTracesAction.java | 4 ++- .../TransportGetStatusAction.java | 8 ++++- .../TransportGetTopNFunctionsAction.java | 2 +- .../AbstractProfilingPersistenceManager.java | 2 +- .../{ => persistence}/EventsIndex.java | 2 +- .../{ => persistence}/IndexState.java | 2 +- .../{ => persistence}/IndexStateResolver.java | 6 ++-- .../{ => persistence}/IndexStatus.java | 2 +- .../{ => persistence}/Migration.java | 2 +- .../ProfilingDataStreamManager.java | 11 +++++-- .../ProfilingIndexAbstraction.java | 2 +- .../ProfilingIndexManager.java | 23 +++++++++------ .../ProfilingIndexTemplateRegistry.java | 2 +- .../{ => rest}/RestGetFlamegraphAction.java | 4 ++- .../{ => rest}/RestGetStackTracesAction.java | 4 ++- .../{ => rest}/RestGetStatusAction.java | 3 +- .../RestGetTopNFunctionsAction.java | 4 ++- .../{ => action}/CO2CalculatorTests.java | 2 +- .../{ => action}/CarthesianCombinator.java | 2 +- .../{ => action}/CostCalculatorTests.java | 2 +- .../{ => action}/FrameGroupIDTests.java | 2 +- .../GetStackTracesRequestTests.java | 2 +- .../GetStackTracesResponseTests.java | 2 +- .../{ => action}/HostMetadataTests.java | 2 +- .../{ => action}/IndexAllocationTests.java | 2 +- .../{ => action}/KvIndexResolverTests.java | 2 +- .../{ => action}/NumberUtilsTests.java | 2 +- .../ProfilingInfoTransportActionTests.java | 2 +- .../{ => action}/ResamplerTests.java | 2 +- .../{ => action}/StackFrameTests.java | 2 +- .../{ => action}/StackTraceTests.java | 2 +- .../{ => action}/TopNFunctionTests.java | 2 +- .../TransportGetFlamegraphActionTests.java | 2 +- .../TransportGetStackTracesActionTests.java | 2 +- .../TransportGetTopNFunctionsActionTests.java | 2 +- .../{ => persistence}/EventsIndexTests.java | 2 +- .../ProfilingDataStreamManagerTests.java | 2 +- .../ProfilingIndexManagerTests.java | 2 +- .../ProfilingIndexTemplateRegistryTests.java | 2 +- .../{ => persistence}/VerifyingClient.java | 4 +-- .../RestGetStackTracesActionTests.java | 29 ++++++++----------- 80 files changed, 151 insertions(+), 108 deletions(-) rename x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/{ => action}/CancellationIT.java (99%) rename x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/{ => action}/GetFlameGraphActionIT.java (96%) rename x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/{ => action}/GetStackTracesActionIT.java (99%) rename x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/{ => action}/GetStatusActionIT.java (98%) rename x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/{ => action}/GetTopNFunctionsActionIT.java (98%) rename x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/{ => action}/LocalStateProfilingXPackPlugin.java (89%) rename x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/{ => action}/ProfilingTestCase.java (97%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/CO2Calculator.java (98%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/CloudProviders.java (99%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/CostCalculator.java (98%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/CostEntry.java (95%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/Frame.java (88%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/FrameGroupID.java (96%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/GetFlamegraphAction.java (92%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/GetFlamegraphResponse.java (99%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/GetStackTracesAction.java (92%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/GetStackTracesRequest.java (99%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/GetStackTracesResponse.java (99%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/GetStackTracesResponseBuilder.java (99%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/GetStatusAction.java (99%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/GetTopNFunctionsAction.java (92%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/GetTopNFunctionsResponse.java (97%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/HostMetadata.java (98%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/IndexAllocation.java (97%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/InstanceType.java (99%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/InstanceTypeService.java (98%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/KvIndexResolver.java (99%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/NumberUtils.java (95%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/ProfilingInfoTransportAction.java (96%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/ProfilingLicenseChecker.java (96%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/ProfilingUsageTransportAction.java (97%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/Resampler.java (97%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/StackFrame.java (98%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/StackTrace.java (99%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/StopWatch.java (94%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/TopNFunction.java (99%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/TraceEvent.java (96%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/TransportGetFlamegraphAction.java (99%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/TransportGetStackTracesAction.java (99%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/TransportGetStatusAction.java (94%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/TransportGetTopNFunctionsAction.java (99%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => persistence}/AbstractProfilingPersistenceManager.java (99%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => persistence}/EventsIndex.java (98%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => persistence}/IndexState.java (95%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => persistence}/IndexStateResolver.java (97%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => persistence}/IndexStatus.java (92%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => persistence}/Migration.java (99%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => persistence}/ProfilingDataStreamManager.java (96%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => persistence}/ProfilingIndexAbstraction.java (94%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => persistence}/ProfilingIndexManager.java (95%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => persistence}/ProfilingIndexTemplateRegistry.java (99%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => rest}/RestGetFlamegraphAction.java (90%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => rest}/RestGetStackTracesAction.java (90%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => rest}/RestGetStatusAction.java (93%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => rest}/RestGetTopNFunctionsAction.java (90%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => action}/CO2CalculatorTests.java (98%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => action}/CarthesianCombinator.java (97%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => action}/CostCalculatorTests.java (98%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => action}/FrameGroupIDTests.java (98%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => action}/GetStackTracesRequestTests.java (99%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => action}/GetStackTracesResponseTests.java (98%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => action}/HostMetadataTests.java (99%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => action}/IndexAllocationTests.java (99%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => action}/KvIndexResolverTests.java (99%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => action}/NumberUtilsTests.java (95%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => action}/ProfilingInfoTransportActionTests.java (97%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => action}/ResamplerTests.java (99%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => action}/StackFrameTests.java (98%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => action}/StackTraceTests.java (99%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => action}/TopNFunctionTests.java (98%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => action}/TransportGetFlamegraphActionTests.java (99%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => action}/TransportGetStackTracesActionTests.java (98%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => action}/TransportGetTopNFunctionsActionTests.java (99%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => persistence}/EventsIndexTests.java (97%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => persistence}/ProfilingDataStreamManagerTests.java (99%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => persistence}/ProfilingIndexManagerTests.java (99%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => persistence}/ProfilingIndexTemplateRegistryTests.java (99%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => persistence}/VerifyingClient.java (94%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => rest}/RestGetStackTracesActionTests.java (87%) diff --git a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/CancellationIT.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/CancellationIT.java similarity index 99% rename from x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/CancellationIT.java rename to x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/CancellationIT.java index ef5198499ff09..183ef3786a62d 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/CancellationIT.java +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/CancellationIT.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; diff --git a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetFlameGraphActionIT.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/GetFlameGraphActionIT.java similarity index 96% rename from x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetFlameGraphActionIT.java rename to x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/GetFlameGraphActionIT.java index 20519d53459ba..49a5cfa7ca067 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetFlameGraphActionIT.java +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/GetFlameGraphActionIT.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; public class GetFlameGraphActionIT extends ProfilingTestCase { public void testGetStackTracesUnfiltered() throws Exception { diff --git a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStackTracesActionIT.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/GetStackTracesActionIT.java similarity index 99% rename from x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStackTracesActionIT.java rename to x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/GetStackTracesActionIT.java index 30de2173e8903..9de148c33c467 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStackTracesActionIT.java +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/GetStackTracesActionIT.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; diff --git a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStatusActionIT.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/GetStatusActionIT.java similarity index 98% rename from x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStatusActionIT.java rename to x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/GetStatusActionIT.java index f3417dbf5d472..27fe2b8acb79b 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStatusActionIT.java +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/GetStatusActionIT.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.core.TimeValue; import org.elasticsearch.rest.RestStatus; diff --git a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsActionIT.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/GetTopNFunctionsActionIT.java similarity index 98% rename from x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsActionIT.java rename to x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/GetTopNFunctionsActionIT.java index 05d0e1cb0471b..ab5bbc3812eb5 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsActionIT.java +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/GetTopNFunctionsActionIT.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; diff --git a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/LocalStateProfilingXPackPlugin.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/LocalStateProfilingXPackPlugin.java similarity index 89% rename from x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/LocalStateProfilingXPackPlugin.java rename to x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/LocalStateProfilingXPackPlugin.java index 3a033e2686b2b..1953007a6c39a 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/LocalStateProfilingXPackPlugin.java +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/LocalStateProfilingXPackPlugin.java @@ -5,10 +5,11 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; +import org.elasticsearch.xpack.profiling.ProfilingPlugin; import java.nio.file.Path; diff --git a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/ProfilingTestCase.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/ProfilingTestCase.java similarity index 97% rename from x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/ProfilingTestCase.java rename to x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/ProfilingTestCase.java index 58b018a13e096..67825f6ce8570 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/ProfilingTestCase.java +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/ProfilingTestCase.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse; @@ -25,6 +25,8 @@ import org.elasticsearch.xpack.core.ilm.LifecycleSettings; import org.elasticsearch.xpack.countedkeyword.CountedKeywordMapperPlugin; import org.elasticsearch.xpack.ilm.IndexLifecycle; +import org.elasticsearch.xpack.profiling.ProfilingPlugin; +import org.elasticsearch.xpack.profiling.persistence.ProfilingIndexManager; import org.elasticsearch.xpack.unsignedlong.UnsignedLongMapperPlugin; import org.elasticsearch.xpack.versionfield.VersionFieldPlugin; import org.junit.After; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingPlugin.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingPlugin.java index 0615bef7a4980..6962dedb734ae 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingPlugin.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingPlugin.java @@ -35,6 +35,25 @@ import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; +import org.elasticsearch.xpack.profiling.action.GetFlamegraphAction; +import org.elasticsearch.xpack.profiling.action.GetStackTracesAction; +import org.elasticsearch.xpack.profiling.action.GetStatusAction; +import org.elasticsearch.xpack.profiling.action.GetTopNFunctionsAction; +import org.elasticsearch.xpack.profiling.action.ProfilingInfoTransportAction; +import org.elasticsearch.xpack.profiling.action.ProfilingLicenseChecker; +import org.elasticsearch.xpack.profiling.action.ProfilingUsageTransportAction; +import org.elasticsearch.xpack.profiling.action.TransportGetFlamegraphAction; +import org.elasticsearch.xpack.profiling.action.TransportGetStackTracesAction; +import org.elasticsearch.xpack.profiling.action.TransportGetStatusAction; +import org.elasticsearch.xpack.profiling.action.TransportGetTopNFunctionsAction; +import org.elasticsearch.xpack.profiling.persistence.IndexStateResolver; +import org.elasticsearch.xpack.profiling.persistence.ProfilingDataStreamManager; +import org.elasticsearch.xpack.profiling.persistence.ProfilingIndexManager; +import org.elasticsearch.xpack.profiling.persistence.ProfilingIndexTemplateRegistry; +import org.elasticsearch.xpack.profiling.rest.RestGetFlamegraphAction; +import org.elasticsearch.xpack.profiling.rest.RestGetStackTracesAction; +import org.elasticsearch.xpack.profiling.rest.RestGetStatusAction; +import org.elasticsearch.xpack.profiling.rest.RestGetTopNFunctionsAction; import java.util.ArrayList; import java.util.Collection; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CO2Calculator.java similarity index 98% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CO2Calculator.java index fcdc116cab725..398a004edd448 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CO2Calculator.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.core.UpdateForV9; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CloudProviders.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CloudProviders.java similarity index 99% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CloudProviders.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CloudProviders.java index 0245df13f8fad..de2feb727a029 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CloudProviders.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CloudProviders.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import java.util.Map; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CostCalculator.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CostCalculator.java similarity index 98% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CostCalculator.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CostCalculator.java index 3db9b543bdb88..b8ee54f5f29e8 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CostCalculator.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CostCalculator.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import java.util.Map; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CostEntry.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CostEntry.java similarity index 95% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CostEntry.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CostEntry.java index b6795294e7f06..ded99eec428f2 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CostEntry.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CostEntry.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import java.util.Map; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/Frame.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/Frame.java similarity index 88% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/Frame.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/Frame.java index b2a37b7cfa903..5bd2d82237fc3 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/Frame.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/Frame.java @@ -5,6 +5,6 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; public record Frame(String fileName, String functionName, int functionOffset, int lineNumber, boolean inline, boolean last) {} diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/FrameGroupID.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/FrameGroupID.java similarity index 96% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/FrameGroupID.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/FrameGroupID.java index 32273d56d0176..4674a2cb0e12f 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/FrameGroupID.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/FrameGroupID.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.common.Strings; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetFlamegraphAction.java similarity index 92% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetFlamegraphAction.java index 3719722ad2d62..6866281c8dbeb 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetFlamegraphAction.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.action.ActionType; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphResponse.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetFlamegraphResponse.java similarity index 99% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphResponse.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetFlamegraphResponse.java index c851b372cb2db..e4ea3c1521d22 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphResponse.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetFlamegraphResponse.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.TransportAction; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesAction.java similarity index 92% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesAction.java index 1fd87740d6292..6871cc9e296f2 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesAction.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.action.ActionType; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesRequest.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesRequest.java similarity index 99% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesRequest.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesRequest.java index 038a576cd77fc..be30c9662fddb 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesRequest.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesRequest.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; @@ -19,6 +19,7 @@ import org.elasticsearch.tasks.TaskId; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.profiling.persistence.EventsIndex; import java.io.IOException; import java.util.ArrayList; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesResponse.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesResponse.java similarity index 99% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesResponse.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesResponse.java index 4cad1104f783b..532ad374c3c4b 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesResponse.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesResponse.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.TransportAction; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesResponseBuilder.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesResponseBuilder.java similarity index 99% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesResponseBuilder.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesResponseBuilder.java index 44c9c987fc6c7..1b31642d07be1 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesResponseBuilder.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesResponseBuilder.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import java.time.Instant; import java.util.List; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStatusAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStatusAction.java similarity index 99% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStatusAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStatusAction.java index 59132d45995e3..0d8f3aad27daa 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStatusAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStatusAction.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetTopNFunctionsAction.java similarity index 92% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetTopNFunctionsAction.java index b11e74cbbf93d..5d7dc17cd348e 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetTopNFunctionsAction.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.action.ActionType; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsResponse.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetTopNFunctionsResponse.java similarity index 97% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsResponse.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetTopNFunctionsResponse.java index b8785bc607b18..b16ce6f43685f 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsResponse.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetTopNFunctionsResponse.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.TransportAction; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/HostMetadata.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/HostMetadata.java similarity index 98% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/HostMetadata.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/HostMetadata.java index aae6615114f43..29f3b66956d55 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/HostMetadata.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/HostMetadata.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.xcontent.ToXContentObject; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexAllocation.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/IndexAllocation.java similarity index 97% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexAllocation.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/IndexAllocation.java index 7d1c5bdbf66a3..8b97f1139d6ad 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexAllocation.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/IndexAllocation.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/InstanceType.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/InstanceType.java similarity index 99% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/InstanceType.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/InstanceType.java index d694ffd2cbebc..5628b64ea67b7 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/InstanceType.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/InstanceType.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.xcontent.ToXContentObject; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/InstanceTypeService.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/InstanceTypeService.java similarity index 98% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/InstanceTypeService.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/InstanceTypeService.java index 3a1cad38f7781..05367cc3fbaaf 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/InstanceTypeService.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/InstanceTypeService.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/KvIndexResolver.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/KvIndexResolver.java similarity index 99% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/KvIndexResolver.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/KvIndexResolver.java index 53962c1f93cee..dbc60aa47a235 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/KvIndexResolver.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/KvIndexResolver.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/NumberUtils.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/NumberUtils.java similarity index 95% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/NumberUtils.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/NumberUtils.java index d346dd279f250..f8093091f56c5 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/NumberUtils.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/NumberUtils.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; final class NumberUtils { private NumberUtils() { diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingInfoTransportAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/ProfilingInfoTransportAction.java similarity index 96% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingInfoTransportAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/ProfilingInfoTransportAction.java index 115b165f3e791..1a6809774f7f6 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingInfoTransportAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/ProfilingInfoTransportAction.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.common.inject.Inject; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingLicenseChecker.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/ProfilingLicenseChecker.java similarity index 96% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingLicenseChecker.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/ProfilingLicenseChecker.java index 1100c6b10c5f7..a479dca379c4a 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingLicenseChecker.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/ProfilingLicenseChecker.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.license.License; import org.elasticsearch.license.LicenseUtils; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingUsageTransportAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/ProfilingUsageTransportAction.java similarity index 97% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingUsageTransportAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/ProfilingUsageTransportAction.java index 7e7b431759cd4..738a7a4e52ddb 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingUsageTransportAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/ProfilingUsageTransportAction.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/Resampler.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/Resampler.java similarity index 97% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/Resampler.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/Resampler.java index b70807e472536..54401ce1d3a5a 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/Resampler.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/Resampler.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import java.util.Random; import java.util.random.RandomGenerator; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackFrame.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/StackFrame.java similarity index 98% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackFrame.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/StackFrame.java index 5f7102c63d3d7..b3b2b0b8caea5 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackFrame.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/StackFrame.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackTrace.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/StackTrace.java similarity index 99% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackTrace.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/StackTrace.java index d24127824dafd..2a4e5f42fe657 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackTrace.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/StackTrace.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.xcontent.ObjectPath; import org.elasticsearch.xcontent.ToXContentObject; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StopWatch.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/StopWatch.java similarity index 94% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StopWatch.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/StopWatch.java index c423fe12f3581..6197a0d6a0c4f 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StopWatch.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/StopWatch.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; /** * Measures time and logs it in milliseconds. diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TopNFunction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TopNFunction.java similarity index 99% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TopNFunction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TopNFunction.java index 777d8e247335c..402d2ff012839 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TopNFunction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TopNFunction.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TraceEvent.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TraceEvent.java similarity index 96% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TraceEvent.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TraceEvent.java index adb88848a418e..f020ad9e6a905 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TraceEvent.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TraceEvent.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import java.util.HashMap; import java.util.Map; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetFlamegraphAction.java similarity index 99% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetFlamegraphAction.java index 7a25319d3a1cc..4f3778081563b 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetFlamegraphAction.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStackTracesAction.java similarity index 99% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStackTracesAction.java index d7c9e61b73a3a..5467f0c10ccc8 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStackTracesAction.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -48,6 +48,8 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.ObjectPath; +import org.elasticsearch.xpack.profiling.ProfilingPlugin; +import org.elasticsearch.xpack.profiling.persistence.EventsIndex; import java.time.Duration; import java.time.Instant; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStatusAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStatusAction.java similarity index 94% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStatusAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStatusAction.java index d918a0def7ebb..88f19a62bbedf 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStatusAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStatusAction.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -34,6 +34,12 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackSettings; +import org.elasticsearch.xpack.profiling.ProfilingPlugin; +import org.elasticsearch.xpack.profiling.persistence.EventsIndex; +import org.elasticsearch.xpack.profiling.persistence.IndexStateResolver; +import org.elasticsearch.xpack.profiling.persistence.ProfilingDataStreamManager; +import org.elasticsearch.xpack.profiling.persistence.ProfilingIndexManager; +import org.elasticsearch.xpack.profiling.persistence.ProfilingIndexTemplateRegistry; public class TransportGetStatusAction extends TransportMasterNodeAction { private static final Logger log = LogManager.getLogger(TransportGetStatusAction.class); diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetTopNFunctionsAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetTopNFunctionsAction.java similarity index 99% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetTopNFunctionsAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetTopNFunctionsAction.java index cb5f2da6c3731..05e2202c7b91c 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetTopNFunctionsAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetTopNFunctionsAction.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/AbstractProfilingPersistenceManager.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/AbstractProfilingPersistenceManager.java similarity index 99% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/AbstractProfilingPersistenceManager.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/AbstractProfilingPersistenceManager.java index d74eaa8c5650e..528d6f28a7115 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/AbstractProfilingPersistenceManager.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/AbstractProfilingPersistenceManager.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.persistence; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/EventsIndex.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/EventsIndex.java similarity index 98% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/EventsIndex.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/EventsIndex.java index f246a34f3362d..b87f3345579aa 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/EventsIndex.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/EventsIndex.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.persistence; import java.util.Collection; import java.util.Collections; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexState.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/IndexState.java similarity index 95% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexState.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/IndexState.java index c34858acf5986..81262e6d33cad 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexState.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/IndexState.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.persistence; import org.elasticsearch.index.Index; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexStateResolver.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/IndexStateResolver.java similarity index 97% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexStateResolver.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/IndexStateResolver.java index a09d162c32967..b5efe66423679 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexStateResolver.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/IndexStateResolver.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.persistence; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -21,12 +21,12 @@ import java.util.List; import java.util.Map; -class IndexStateResolver { +public class IndexStateResolver { private static final Logger logger = LogManager.getLogger(IndexStateResolver.class); private volatile boolean checkOutdatedIndices; - IndexStateResolver(boolean checkOutdatedIndices) { + public IndexStateResolver(boolean checkOutdatedIndices) { this.checkOutdatedIndices = checkOutdatedIndices; } diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexStatus.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/IndexStatus.java similarity index 92% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexStatus.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/IndexStatus.java index 389c0de80cc5f..0dc3da7bc7f80 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexStatus.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/IndexStatus.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.persistence; enum IndexStatus { CLOSED(false), diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/Migration.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/Migration.java similarity index 99% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/Migration.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/Migration.java index b6ccc2cee91c9..138c2301fd636 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/Migration.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/Migration.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.persistence; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingDataStreamManager.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/ProfilingDataStreamManager.java similarity index 96% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingDataStreamManager.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/ProfilingDataStreamManager.java index 722a7d1dbac63..331d93b066da5 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingDataStreamManager.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/ProfilingDataStreamManager.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.persistence; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionResponse; @@ -36,7 +36,7 @@ /** * Creates all data streams that are required for using Elastic Universal Profiling. */ -class ProfilingDataStreamManager extends AbstractProfilingPersistenceManager { +public class ProfilingDataStreamManager extends AbstractProfilingPersistenceManager { public static final List PROFILING_DATASTREAMS; static { @@ -51,7 +51,12 @@ class ProfilingDataStreamManager extends AbstractProfilingPersistenceManager { +public class ProfilingIndexManager extends AbstractProfilingPersistenceManager { // For testing public static final List PROFILING_INDICES = List.of( ProfilingIndex.regular( @@ -68,7 +68,12 @@ class ProfilingIndexManager extends AbstractProfilingPersistenceManager client.admin().indices().delete(req, l)); } - enum OnVersionBump { + public enum OnVersionBump { DELETE_OLD, KEEP_OLD } @@ -257,27 +262,27 @@ enum OnVersionBump { /** * An index that is used by Universal Profiling. */ - static class ProfilingIndex implements ProfilingIndexAbstraction { + public static class ProfilingIndex implements ProfilingIndexAbstraction { private final String namePrefix; private final int version; private final String generation; private final OnVersionBump onVersionBump; private final List migrations; - public static ProfilingIndex regular(String name, int version, OnVersionBump onVersionBump) { + static ProfilingIndex regular(String name, int version, OnVersionBump onVersionBump) { return regular(name, version, onVersionBump, null); } - public static ProfilingIndex regular(String name, int version, OnVersionBump onVersionBump, Migration.Builder builder) { + static ProfilingIndex regular(String name, int version, OnVersionBump onVersionBump, Migration.Builder builder) { List migrations = builder != null ? builder.build(version) : null; return new ProfilingIndex(name, version, null, onVersionBump, migrations); } - public static ProfilingIndex kv(String name, int version) { + static ProfilingIndex kv(String name, int version) { return kv(name, version, null); } - public static ProfilingIndex kv(String name, int version, Migration.Builder builder) { + static ProfilingIndex kv(String name, int version, Migration.Builder builder) { List migrations = builder != null ? builder.build(version) : null; // K/V indices will age automatically as per the ILM policy, and we won't force-upgrade them on version bumps return new ProfilingIndex(name, version, "000001", OnVersionBump.KEEP_OLD, migrations); diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistry.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/ProfilingIndexTemplateRegistry.java similarity index 99% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistry.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/ProfilingIndexTemplateRegistry.java index e1698e71afab2..61d3010bddf77 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistry.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/ProfilingIndexTemplateRegistry.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.persistence; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetFlamegraphAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/rest/RestGetFlamegraphAction.java similarity index 90% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetFlamegraphAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/rest/RestGetFlamegraphAction.java index 3b1b2e1789ad1..c6c9309077a34 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetFlamegraphAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/rest/RestGetFlamegraphAction.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.rest; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; @@ -13,6 +13,8 @@ import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestCancellableNodeClient; import org.elasticsearch.rest.action.RestRefCountedChunkedToXContentListener; +import org.elasticsearch.xpack.profiling.action.GetFlamegraphAction; +import org.elasticsearch.xpack.profiling.action.GetStackTracesRequest; import java.io.IOException; import java.util.List; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetStackTracesAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/rest/RestGetStackTracesAction.java similarity index 90% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetStackTracesAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/rest/RestGetStackTracesAction.java index ac7e9943b6566..4161f478bc2f3 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetStackTracesAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/rest/RestGetStackTracesAction.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.rest; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; @@ -13,6 +13,8 @@ import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestCancellableNodeClient; import org.elasticsearch.rest.action.RestRefCountedChunkedToXContentListener; +import org.elasticsearch.xpack.profiling.action.GetStackTracesAction; +import org.elasticsearch.xpack.profiling.action.GetStackTracesRequest; import java.io.IOException; import java.util.List; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetStatusAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/rest/RestGetStatusAction.java similarity index 93% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetStatusAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/rest/RestGetStatusAction.java index 331bab40cdacc..2d5cc7a71669c 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetStatusAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/rest/RestGetStatusAction.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.rest; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; @@ -13,6 +13,7 @@ import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.profiling.action.GetStatusAction; import java.util.List; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetTopNFunctionsAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/rest/RestGetTopNFunctionsAction.java similarity index 90% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetTopNFunctionsAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/rest/RestGetTopNFunctionsAction.java index b9896418d7b79..9c23d31964b5b 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetTopNFunctionsAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/rest/RestGetTopNFunctionsAction.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.rest; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; @@ -13,6 +13,8 @@ import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestCancellableNodeClient; import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.profiling.action.GetStackTracesRequest; +import org.elasticsearch.xpack.profiling.action.GetTopNFunctionsAction; import java.io.IOException; import java.util.List; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/CO2CalculatorTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/CO2CalculatorTests.java similarity index 98% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/CO2CalculatorTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/CO2CalculatorTests.java index 48cc535dbe7e4..a7b9a97b71acc 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/CO2CalculatorTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/CO2CalculatorTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/CarthesianCombinator.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/CarthesianCombinator.java similarity index 97% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/CarthesianCombinator.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/CarthesianCombinator.java index 2982df317a38c..1b41f30c3df8e 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/CarthesianCombinator.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/CarthesianCombinator.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import java.lang.reflect.Array; import java.util.function.Consumer; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/CostCalculatorTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/CostCalculatorTests.java similarity index 98% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/CostCalculatorTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/CostCalculatorTests.java index b6e743a0946dd..eaf6cf618eddb 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/CostCalculatorTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/CostCalculatorTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/FrameGroupIDTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/FrameGroupIDTests.java similarity index 98% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/FrameGroupIDTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/FrameGroupIDTests.java index 50cfdd28a98fc..2bd6d66f82c54 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/FrameGroupIDTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/FrameGroupIDTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesRequestTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/GetStackTracesRequestTests.java similarity index 99% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesRequestTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/GetStackTracesRequestTests.java index cfaa90b8adf85..70bb1abfc40ac 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesRequestTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/GetStackTracesRequestTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesResponseTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/GetStackTracesResponseTests.java similarity index 98% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesResponseTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/GetStackTracesResponseTests.java index 3ebd2ef6a8aeb..973f9ce3df820 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesResponseTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/GetStackTracesResponseTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.test.AbstractChunkedSerializingTestCase; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/HostMetadataTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/HostMetadataTests.java similarity index 99% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/HostMetadataTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/HostMetadataTests.java index 5c24e295909bc..b6b1ecef666c9 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/HostMetadataTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/HostMetadataTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/IndexAllocationTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/IndexAllocationTests.java similarity index 99% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/IndexAllocationTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/IndexAllocationTests.java index bd66645243a92..756636ef84f78 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/IndexAllocationTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/IndexAllocationTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/KvIndexResolverTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/KvIndexResolverTests.java similarity index 99% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/KvIndexResolverTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/KvIndexResolverTests.java index d6b9438611114..5229a398b0367 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/KvIndexResolverTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/KvIndexResolverTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.ClusterState; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/NumberUtilsTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/NumberUtilsTests.java similarity index 95% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/NumberUtilsTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/NumberUtilsTests.java index 0b8a410f9bb66..649759ba0309d 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/NumberUtilsTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/NumberUtilsTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingInfoTransportActionTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/ProfilingInfoTransportActionTests.java similarity index 97% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingInfoTransportActionTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/ProfilingInfoTransportActionTests.java index b66b8a3db50f9..d7eda19e45fbf 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingInfoTransportActionTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/ProfilingInfoTransportActionTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.common.settings.Settings; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ResamplerTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/ResamplerTests.java similarity index 99% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ResamplerTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/ResamplerTests.java index 0b37dcd154ca5..c2537edab6bbd 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ResamplerTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/ResamplerTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.RangeQueryBuilder; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/StackFrameTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/StackFrameTests.java similarity index 98% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/StackFrameTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/StackFrameTests.java index 3e1bc4eba202d..0888133759f45 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/StackFrameTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/StackFrameTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/StackTraceTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/StackTraceTests.java similarity index 99% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/StackTraceTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/StackTraceTests.java index 4f583b55f18f7..ee85c4b9cb01f 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/StackTraceTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/StackTraceTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TopNFunctionTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TopNFunctionTests.java similarity index 98% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TopNFunctionTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TopNFunctionTests.java index f30fd18443550..9623415b41554 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TopNFunctionTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TopNFunctionTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphActionTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TransportGetFlamegraphActionTests.java similarity index 99% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphActionTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TransportGetFlamegraphActionTests.java index e10892f0e73ce..46d8df0a91bbd 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphActionTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TransportGetFlamegraphActionTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesActionTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TransportGetStackTracesActionTests.java similarity index 98% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesActionTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TransportGetStackTracesActionTests.java index 2eccfb45f5958..80962ac5064a5 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesActionTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TransportGetStackTracesActionTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetTopNFunctionsActionTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TransportGetTopNFunctionsActionTests.java similarity index 99% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetTopNFunctionsActionTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TransportGetTopNFunctionsActionTests.java index f248d8e27bd43..6e5ed79579a0f 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetTopNFunctionsActionTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TransportGetTopNFunctionsActionTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/EventsIndexTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/persistence/EventsIndexTests.java similarity index 97% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/EventsIndexTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/persistence/EventsIndexTests.java index 4f943cbb62a7e..8de7c1c974785 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/EventsIndexTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/persistence/EventsIndexTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.persistence; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingDataStreamManagerTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/persistence/ProfilingDataStreamManagerTests.java similarity index 99% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingDataStreamManagerTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/persistence/ProfilingDataStreamManagerTests.java index 87b8aed1811e2..f2245baafe0c0 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingDataStreamManagerTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/persistence/ProfilingDataStreamManagerTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.persistence; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexManagerTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/persistence/ProfilingIndexManagerTests.java similarity index 99% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexManagerTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/persistence/ProfilingIndexManagerTests.java index ae1aa7072510d..db3037e09763d 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexManagerTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/persistence/ProfilingIndexManagerTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.persistence; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistryTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/persistence/ProfilingIndexTemplateRegistryTests.java similarity index 99% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistryTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/persistence/ProfilingIndexTemplateRegistryTests.java index fb1051add3f1b..81d6ed15804b6 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistryTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/persistence/ProfilingIndexTemplateRegistryTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.persistence; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/VerifyingClient.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/persistence/VerifyingClient.java similarity index 94% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/VerifyingClient.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/persistence/VerifyingClient.java index c37404c9209df..38a0c2fdf7e10 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/VerifyingClient.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/persistence/VerifyingClient.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.persistence; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; @@ -19,7 +19,7 @@ /** * A client that delegates to a verifying function for action/request/listener */ -public class VerifyingClient extends NoOpClient { +class VerifyingClient extends NoOpClient { private TriFunction, ActionRequest, ActionListener, ActionResponse> verifier = (a, r, l) -> { Assert.fail("verifier not set"); diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/RestGetStackTracesActionTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/rest/RestGetStackTracesActionTests.java similarity index 87% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/RestGetStackTracesActionTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/rest/RestGetStackTracesActionTests.java index 695bd3be0ef79..d5cd50e65c019 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/RestGetStackTracesActionTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/rest/RestGetStackTracesActionTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.rest; import org.apache.lucene.util.SetOnce; import org.elasticsearch.common.bytes.BytesArray; @@ -17,6 +17,8 @@ import org.elasticsearch.test.rest.RestActionTestCase; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.profiling.action.GetStackTracesRequest; +import org.elasticsearch.xpack.profiling.action.GetStackTracesResponse; import org.junit.Before; import java.util.Collections; @@ -76,22 +78,15 @@ public void testPrepareParameterizedRequest() { assertThat(getStackTracesRequest.getCustomCostPerCoreHour(), is(0.083d)); assertThat(getStackTracesRequest.getQuery(), notNullValue(QueryBuilder.class)); executeCalled.set(true); - - GetStackTracesResponseBuilder responseBuilder = new GetStackTracesResponseBuilder(getStackTracesRequest); - responseBuilder.setSamplingRate(0.04d); - responseBuilder.setTotalFrames(523); - responseBuilder.setTotalSamples(3L); - - GetStackTracesResponse response = responseBuilder.build(); - assertNull(response.getStackTraces()); - assertNull(response.getStackFrames()); - assertNull(response.getExecutables()); - assertNull(response.getStackTraceEvents()); - assertEquals(response.getSamplingRate(), 0.04d, 0.0001d); - assertEquals(response.getTotalFrames(), 523); - assertEquals(response.getTotalSamples(), 3L); - - return response; + return new GetStackTracesResponse( + Collections.emptyMap(), + Collections.emptyMap(), + Collections.emptyMap(), + Collections.emptyMap(), + 523, + 0.04d, + 3L + ); }); RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.POST) .withPath("/_profiling/stacktraces") From ec2a4ca8b355116639d86028bc001b3a5394813c Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 10 Apr 2024 06:52:14 +0100 Subject: [PATCH 209/264] Expand release note for #105044 (#107257) Users of supposedly-S3-compatible storage may need to be aware of this change, so this commit expands the release notes to link to the relevant S3 documentation. --- docs/reference/release-notes/8.13.0.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/release-notes/8.13.0.asciidoc b/docs/reference/release-notes/8.13.0.asciidoc index 99ee4e5fb86e1..bcb533049f27d 100644 --- a/docs/reference/release-notes/8.13.0.asciidoc +++ b/docs/reference/release-notes/8.13.0.asciidoc @@ -389,7 +389,7 @@ Security:: Snapshot/Restore:: * Add s3 `HeadObject` request to request stats {es-pull}105105[#105105] -* Expose `OperationPurpose` via `CustomQueryParameter` to s3 logs {es-pull}105044[#105044] +* Expose `OperationPurpose` in S3 access logs using a https://docs.aws.amazon.com/AmazonS3/latest/userguide/LogFormat.html#LogFormatCustom[custom query-string parameter] {es-pull}105044[#105044] * Fix blob cache race, decay, time dependency {es-pull}104784[#104784] * Pause shard snapshots on graceful shutdown {es-pull}101717[#101717] * Retry indefinitely for s3 indices blob read errors {es-pull}103300[#103300] From de171b8f88bc1084a00df54f4e9d4fc37a2d41c1 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Wed, 10 Apr 2024 08:35:09 +0200 Subject: [PATCH 210/264] Use merge sort instead of hashing to avoid performance issues with many buckets (#107218) --- .../histogram/InternalAutoDateHistogram.java | 162 +++++++++--------- .../org/elasticsearch/TransportVersions.java | 1 + .../bucket/composite/InternalComposite.java | 154 +++++++---------- .../histogram/InternalDateHistogram.java | 99 +++++++++-- .../bucket/histogram/InternalHistogram.java | 100 ++++++++--- .../InternalVariableWidthHistogram.java | 126 ++++++++------ .../bucket/prefix/InternalIpPrefix.java | 95 ++++++---- 7 files changed, 434 insertions(+), 303 deletions(-) diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java index ab531b69be947..68e4dcf0d2d99 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java @@ -7,13 +7,12 @@ */ package org.elasticsearch.aggregations.bucket.histogram; +import org.apache.lucene.util.PriorityQueue; import org.elasticsearch.TransportVersions; import org.elasticsearch.aggregations.bucket.histogram.AutoDateHistogramAggregationBuilder.RoundingInfo; import org.elasticsearch.common.Rounding; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.util.LongObjectPagedHashMap; -import org.elasticsearch.core.Releasables; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.AggregatorReducer; @@ -22,6 +21,7 @@ import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.KeyComparable; import org.elasticsearch.search.aggregations.bucket.BucketReducer; +import org.elasticsearch.search.aggregations.bucket.IteratorAndCurrent; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; @@ -35,6 +35,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.Comparator; import java.util.List; import java.util.ListIterator; import java.util.Map; @@ -232,6 +233,11 @@ public InternalAutoDateHistogram(StreamInput in) throws IOException { } else { bucketInnerInterval = 1; // Calculated on merge. } + // we changed the order format in 8.13 for partial reduce, therefore we need to order them to perform merge sort + if (in.getTransportVersion().between(TransportVersions.ML_MODEL_IN_SERVICE_SETTINGS, TransportVersions.HISTOGRAM_AGGS_KEY_SORTED)) { + // list is mutable by #readCollectionAsList contract + buckets.sort(Comparator.comparingLong(b -> b.key)); + } } @Override @@ -287,6 +293,61 @@ public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) return new Bucket(prototype.key, prototype.docCount, prototype.format, aggregations); } + /** + * This method works almost exactly the same as + * InternalDateHistogram#reduceBuckets(List, ReduceContext), the different + * here is that we need to round all the keys we see using the highest level + * rounding returned across all the shards so the resolution of the buckets + * is the same and they can be reduced together. + */ + private BucketReduceResult reduceBuckets( + PriorityQueue> pq, + int reduceRoundingIdx, + long min, + long max, + AggregationReduceContext reduceContext + ) { + // First we need to find the highest level rounding used across all the + // shards + Rounding.Prepared reduceRounding = prepare(reduceRoundingIdx, min, max); + + List reducedBuckets = new ArrayList<>(); + if (pq.size() > 0) { + // list of buckets coming from different shards that have the same key + List currentBuckets = new ArrayList<>(); + long key = reduceRounding.round(pq.top().current().key); + + do { + final IteratorAndCurrent top = pq.top(); + + if (reduceRounding.round(top.current().key) != key) { + // the key changes, reduce what we already buffered and reset the buffer for current buckets + final Bucket reduced = reduceBucket(currentBuckets, reduceContext); + reducedBuckets.add(reduced); + currentBuckets.clear(); + key = reduceRounding.round(top.current().key); + } + + currentBuckets.add(top.current()); + + if (top.hasNext()) { + top.next(); + assert top.current().key > key : "shards must return data sorted by key"; + pq.updateTop(); + } else { + pq.pop(); + } + } while (pq.size() > 0); + + if (currentBuckets.isEmpty() == false) { + final Bucket reduced = reduceBucket(currentBuckets, reduceContext); + reducedBuckets.add(reduced); + } + } + + return mergeBucketsIfNeeded(new BucketReduceResult(reducedBuckets, reduceRoundingIdx, 1, reduceRounding, min, max), reduceContext); + } + private BucketReduceResult mergeBucketsIfNeeded(BucketReduceResult firstPassResult, AggregationReduceContext reduceContext) { int idx = firstPassResult.roundingIdx; RoundingInfo info = bucketInfo.roundingInfos[idx]; @@ -338,13 +399,12 @@ private List mergeBuckets( private Bucket reduceBucket(List buckets, AggregationReduceContext context) { assert buckets.isEmpty() == false; - long docCount = 0; - for (Bucket bucket : buckets) { - docCount += bucket.docCount; + try (BucketReducer reducer = new BucketReducer<>(buckets.get(0), context, buckets.size())) { + for (Bucket bucket : buckets) { + reducer.accept(bucket); + } + return createBucket(reducer.getProto().key, reducer.getDocCount(), reducer.getAggregations()); } - final List aggregations = new BucketAggregationList<>(buckets); - final InternalAggregations aggs = InternalAggregations.reduce(aggregations, context); - return new InternalAutoDateHistogram.Bucket(buckets.get(0).key, docCount, format, aggs); } private record BucketReduceResult( @@ -434,87 +494,33 @@ static int getAppropriateRounding(long minKey, long maxKey, int roundingIdx, Rou return currentRoundingIdx - 1; } - /** - * This method works almost exactly the same as - * InternalDateHistogram#reduceBuckets(List, ReduceContext), the different - * here is that we need to round all the keys we see using the highest level - * rounding returned across all the shards so the resolution of the buckets - * is the same and they can be reduced together. - */ @Override protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceContext, int size) { return new AggregatorReducer() { - private final LongObjectPagedHashMap> bucketsReducer = new LongObjectPagedHashMap<>( - getBuckets().size(), - reduceContext.bigArrays() - ); - int reduceRoundingIdx = 0; - long min = Long.MAX_VALUE; - long max = Long.MIN_VALUE; + private final PriorityQueue> pq = new PriorityQueue<>(size) { + @Override + protected boolean lessThan(IteratorAndCurrent a, IteratorAndCurrent b) { + return a.current().key < b.current().key; + } + }; + private int reduceRoundingIdx = 0; + private long min = Long.MAX_VALUE; + private long max = Long.MIN_VALUE; @Override public void accept(InternalAggregation aggregation) { - final InternalAutoDateHistogram histogram = (InternalAutoDateHistogram) aggregation; + InternalAutoDateHistogram histogram = (InternalAutoDateHistogram) aggregation; reduceRoundingIdx = Math.max(histogram.bucketInfo.roundingIdx, reduceRoundingIdx); - if (false == histogram.buckets.isEmpty()) { + if (histogram.buckets.isEmpty() == false) { min = Math.min(min, histogram.buckets.get(0).key); max = Math.max(max, histogram.buckets.get(histogram.buckets.size() - 1).key); - for (Bucket bucket : histogram.buckets) { - BucketReducer reducer = bucketsReducer.get(bucket.key); - if (reducer == null) { - reducer = new BucketReducer<>(bucket, reduceContext, size); - bucketsReducer.put(bucket.key, reducer); - } - reducer.accept(bucket); - } + pq.add(new IteratorAndCurrent<>(histogram.buckets.iterator())); } } @Override public InternalAggregation get() { - // First we need to find the highest level rounding used across all the - // shards - final Rounding.Prepared reduceRounding = prepare(reduceRoundingIdx, min, max); - - final long[] keys = new long[(int) bucketsReducer.size()]; - { - // fill the array and sort it - final int[] index = new int[] { 0 }; - bucketsReducer.forEach(c -> keys[index[0]++] = c.key); - Arrays.sort(keys); - } - - final List reducedBuckets = new ArrayList<>(); - if (keys.length > 0) { - // list of buckets coming from different shards that have the same key - BucketReducer currentReducer = null; - long key = reduceRounding.round(keys[0]); - for (long top : keys) { - if (reduceRounding.round(top) != key) { - assert currentReducer != null; - // the key changes, reduce what we already buffered and reset the buffer for current buckets - reducedBuckets.add(createBucket(key, currentReducer.getDocCount(), currentReducer.getAggregations())); - currentReducer = null; - key = reduceRounding.round(top); - } - - final BucketReducer nextReducer = bucketsReducer.get(top); - if (currentReducer == null) { - currentReducer = nextReducer; - } else { - currentReducer.accept(createBucket(key, nextReducer.getDocCount(), nextReducer.getAggregations())); - } - } - - if (currentReducer != null) { - reducedBuckets.add(createBucket(key, currentReducer.getDocCount(), currentReducer.getAggregations())); - } - } - - BucketReduceResult reducedBucketsResult = mergeBucketsIfNeeded( - new BucketReduceResult(reducedBuckets, reduceRoundingIdx, 1, reduceRounding, min, max), - reduceContext - ); + BucketReduceResult reducedBucketsResult = reduceBuckets(pq, reduceRoundingIdx, min, max, reduceContext); if (reduceContext.isFinalReduce()) { // adding empty buckets if needed @@ -543,12 +549,6 @@ public InternalAggregation get() { reducedBucketsResult.innerInterval ); } - - @Override - public void close() { - bucketsReducer.forEach(c -> Releasables.close(c.value)); - Releasables.close(bucketsReducer); - } }; } diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index e05487c9c88fe..8589e183a150e 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -165,6 +165,7 @@ static TransportVersion def(int id) { public static final TransportVersion ML_INFERENCE_COHERE_RERANK = def(8_624_00_0); public static final TransportVersion INDEXING_PRESSURE_DOCUMENT_REJECTIONS_COUNT = def(8_625_00_0); public static final TransportVersion ALIAS_ACTION_RESULTS = def(8_626_00_0); + public static final TransportVersion HISTOGRAM_AGGS_KEY_SORTED = def(8_627_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java index 22c967bb2ea14..1263532117ac0 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java @@ -9,12 +9,9 @@ package org.elasticsearch.search.aggregations.bucket.composite; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.PriorityQueue; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.util.ObjectArrayPriorityQueue; -import org.elasticsearch.common.util.ObjectObjectPagedHashMap; -import org.elasticsearch.core.Releasable; -import org.elasticsearch.core.Releasables; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.AggregatorReducer; @@ -22,7 +19,8 @@ import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.KeyComparable; -import org.elasticsearch.search.aggregations.bucket.DelayedBucketReducer; +import org.elasticsearch.search.aggregations.bucket.BucketReducer; +import org.elasticsearch.search.aggregations.bucket.IteratorAndCurrent; import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; @@ -204,36 +202,63 @@ int[] getReverseMuls() { @Override protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceContext, int size) { return new AggregatorReducer() { - final BucketsQueue queue = new BucketsQueue(reduceContext); - boolean earlyTerminated = false; + private final PriorityQueue> pq = new PriorityQueue<>(size) { + @Override + protected boolean lessThan(IteratorAndCurrent a, IteratorAndCurrent b) { + return a.current().compareKey(b.current()) < 0; + } + }; + private boolean earlyTerminated = false; @Override public void accept(InternalAggregation aggregation) { - InternalComposite sortedAgg = (InternalComposite) aggregation; + final InternalComposite sortedAgg = (InternalComposite) aggregation; earlyTerminated |= sortedAgg.earlyTerminated; - for (InternalBucket bucket : sortedAgg.getBuckets()) { - if (queue.add(bucket) == false) { - // if the bucket is not competitive, we can break - // because incoming buckets are sorted - break; - } + if (sortedAgg.buckets.isEmpty() == false) { + pq.add(new IteratorAndCurrent<>(sortedAgg.buckets.iterator())); } } @Override public InternalAggregation get() { - final List result = queue.get(); + InternalBucket lastBucket = null; + final List buckets = new ArrayList<>(); + final List result = new ArrayList<>(); + while (pq.size() > 0) { + IteratorAndCurrent top = pq.top(); + if (lastBucket != null && top.current().compareKey(lastBucket) != 0) { + InternalBucket reduceBucket = reduceBucket(buckets, reduceContext); + buckets.clear(); + result.add(reduceBucket); + if (result.size() >= getSize()) { + break; + } + } + lastBucket = top.current(); + buckets.add(top.current()); + if (top.hasNext()) { + top.next(); + pq.updateTop(); + } else { + pq.pop(); + } + } + if (buckets.size() > 0) { + InternalBucket reduceBucket = reduceBucket(buckets, reduceContext); + result.add(reduceBucket); + } + List reducedFormats = formats; CompositeKey lastKey = null; - if (result.isEmpty() == false) { - InternalBucket lastBucket = result.get(result.size() - 1); + if (result.size() > 0) { + lastBucket = result.get(result.size() - 1); /* Attach the formats from the last bucket to the reduced composite * so that we can properly format the after key. */ reducedFormats = lastBucket.formats; lastKey = lastBucket.getRawKey(); } reduceContext.consumeBucketsAndMaybeBreak(result.size()); - InternalComposite reduced = new InternalComposite( + final InternalComposite reduced = new InternalComposite( name, getSize(), sourceNames, @@ -248,85 +273,9 @@ public InternalAggregation get() { reduced.validateAfterKey(); return reduced; } - - @Override - public void close() { - Releasables.close(queue); - } }; } - private class BucketsQueue implements Releasable { - private final ObjectObjectPagedHashMap> bucketReducers; - private final ObjectArrayPriorityQueue queue; - private final AggregationReduceContext reduceContext; - - private BucketsQueue(AggregationReduceContext reduceContext) { - this.reduceContext = reduceContext; - bucketReducers = new ObjectObjectPagedHashMap<>(getSize(), reduceContext.bigArrays()); - queue = new ObjectArrayPriorityQueue<>(getSize(), reduceContext.bigArrays()) { - @Override - protected boolean lessThan(InternalBucket a, InternalBucket b) { - return b.compareKey(a) < 0; - } - }; - } - - /** adds a bucket to the queue. Return false if the bucket is not competitive, otherwise true.*/ - boolean add(InternalBucket bucket) { - DelayedBucketReducer delayed = bucketReducers.get(bucket.key); - if (delayed == null) { - final InternalBucket out = queue.insertWithOverflow(bucket); - if (out == null) { - // bucket is added - delayed = new DelayedBucketReducer<>(bucket, reduceContext); - } else if (out == bucket) { - // bucket is not competitive - return false; - } else { - // bucket replaces existing bucket - delayed = bucketReducers.remove(out.key); - assert delayed != null; - delayed.reset(bucket); - } - bucketReducers.put(bucket.key, delayed); - } - delayed.accept(bucket); - return true; - } - - /** Return the list of reduced buckets */ - List get() { - final int bucketsSize = (int) bucketReducers.size(); - final InternalBucket[] result = new InternalBucket[bucketsSize]; - for (int i = bucketsSize - 1; i >= 0; i--) { - final InternalBucket bucket = queue.pop(); - assert bucket != null; - /* Use the formats from the bucket because they'll be right to format - * the key. The formats on the InternalComposite doing the reducing are - * just whatever formats make sense for *its* index. This can be real - * trouble when the index doing the reducing is unmapped. */ - final var reducedFormats = bucket.formats; - final DelayedBucketReducer reducer = Objects.requireNonNull(bucketReducers.get(bucket.key)); - result[i] = new InternalBucket( - sourceNames, - reducedFormats, - bucket.key, - reverseMuls, - missingOrders, - reducer.getDocCount(), - reducer.getAggregations() - ); - } - return List.of(result); - } - - @Override - public void close() { - Releasables.close(bucketReducers, queue); - } - } - @Override public InternalAggregation finalizeSampling(SamplingContext samplingContext) { return new InternalComposite( @@ -343,6 +292,23 @@ public InternalAggregation finalizeSampling(SamplingContext samplingContext) { ); } + private InternalBucket reduceBucket(List buckets, AggregationReduceContext context) { + assert buckets.isEmpty() == false; + try (BucketReducer reducer = new BucketReducer<>(buckets.get(0), context, buckets.size())) { + for (InternalBucket bucket : buckets) { + reducer.accept(bucket); + } + /* Use the formats from the bucket because they'll be right to format + * the key. The formats on the InternalComposite doing the reducing are + * just whatever formats make sense for *its* index. This can be real + * trouble when the index doing the reducing is unmapped. */ + final var reducedFormats = reducer.getProto().formats; + final long docCount = reducer.getDocCount(); + final InternalAggregations aggs = reducer.getAggregations(); + return new InternalBucket(sourceNames, reducedFormats, reducer.getProto().key, reverseMuls, missingOrders, docCount, aggs); + } + } + @Override public boolean equals(Object obj) { if (this == obj) return true; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java index ce42145f2ceb1..4939c3bc88744 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java @@ -8,12 +8,12 @@ package org.elasticsearch.search.aggregations.bucket.histogram; import org.apache.lucene.util.CollectionUtil; +import org.apache.lucene.util.PriorityQueue; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.common.Rounding; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.core.Releasables; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.AggregatorReducer; @@ -23,6 +23,8 @@ import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.InternalOrder; import org.elasticsearch.search.aggregations.KeyComparable; +import org.elasticsearch.search.aggregations.bucket.BucketReducer; +import org.elasticsearch.search.aggregations.bucket.IteratorAndCurrent; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; @@ -262,6 +264,11 @@ public InternalDateHistogram(StreamInput in) throws IOException { downsampledResultsOffset = false; } buckets = in.readCollectionAsList(stream -> new Bucket(stream, keyed, format)); + // we changed the order format in 8.13 for partial reduce, therefore we need to order them to perform merge sort + if (in.getTransportVersion().between(TransportVersions.ML_MODEL_IN_SERVICE_SETTINGS, TransportVersions.HISTOGRAM_AGGS_KEY_SORTED)) { + // list is mutable by #readCollectionAsList contract + buckets.sort(Comparator.comparingLong(b -> b.key)); + } } @Override @@ -323,6 +330,71 @@ public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) return new Bucket(prototype.key, prototype.docCount, prototype.keyed, prototype.format, aggregations); } + private List reduceBuckets(final PriorityQueue> pq, AggregationReduceContext reduceContext) { + int consumeBucketCount = 0; + List reducedBuckets = new ArrayList<>(); + if (pq.size() > 0) { + // list of buckets coming from different shards that have the same key + List currentBuckets = new ArrayList<>(); + double key = pq.top().current().key; + + do { + final IteratorAndCurrent top = pq.top(); + + if (top.current().key != key) { + // the key changes, reduce what we already buffered and reset the buffer for current buckets + final Bucket reduced = reduceBucket(currentBuckets, reduceContext); + if (reduced.getDocCount() >= minDocCount || reduceContext.isFinalReduce() == false) { + if (consumeBucketCount++ >= REPORT_EMPTY_EVERY) { + reduceContext.consumeBucketsAndMaybeBreak(consumeBucketCount); + consumeBucketCount = 0; + } + reducedBuckets.add(reduced); + } + currentBuckets.clear(); + key = top.current().key; + } + + currentBuckets.add(top.current()); + + if (top.hasNext()) { + top.next(); + assert top.current().key > key : "shards must return data sorted by key"; + pq.updateTop(); + } else { + pq.pop(); + } + } while (pq.size() > 0); + + if (currentBuckets.isEmpty() == false) { + final Bucket reduced = reduceBucket(currentBuckets, reduceContext); + if (reduced.getDocCount() >= minDocCount || reduceContext.isFinalReduce() == false) { + reducedBuckets.add(reduced); + if (consumeBucketCount++ >= REPORT_EMPTY_EVERY) { + reduceContext.consumeBucketsAndMaybeBreak(consumeBucketCount); + consumeBucketCount = 0; + } + } + } + } + reduceContext.consumeBucketsAndMaybeBreak(consumeBucketCount); + return reducedBuckets; + } + + /** + * Reduce a list of same-keyed buckets (from multiple shards) to a single bucket. This + * requires all buckets to have the same key. + */ + private Bucket reduceBucket(List buckets, AggregationReduceContext context) { + assert buckets.isEmpty() == false; + try (BucketReducer reducer = new BucketReducer<>(buckets.get(0), context, buckets.size())) { + for (Bucket bucket : buckets) { + reducer.accept(bucket); + } + return createBucket(reducer.getProto().key, reducer.getDocCount(), reducer.getAggregations()); + } + } + private void addEmptyBuckets(List list, AggregationReduceContext reduceContext) { /* * Make sure we have space for the empty buckets we're going to add by @@ -433,31 +505,25 @@ private void iterateEmptyBuckets(List list, ListIterator iter, L @Override protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceContext, int size) { return new AggregatorReducer() { - - final LongKeyedMultiBucketsAggregatorReducer reducer = new LongKeyedMultiBucketsAggregatorReducer<>( - reduceContext, - size, - minDocCount - ) { + private final PriorityQueue> pq = new PriorityQueue<>(size) { @Override - protected Bucket createBucket(long key, long docCount, InternalAggregations aggregations) { - return InternalDateHistogram.this.createBucket(key, docCount, aggregations); + protected boolean lessThan(IteratorAndCurrent a, IteratorAndCurrent b) { + return a.current().key < b.current().key; } }; @Override public void accept(InternalAggregation aggregation) { - InternalDateHistogram dateHistogram = (InternalDateHistogram) aggregation; - for (Bucket bucket : dateHistogram.buckets) { - reducer.accept(bucket.key, bucket); + final InternalDateHistogram histogram = (InternalDateHistogram) aggregation; + if (histogram.buckets.isEmpty() == false) { + pq.add(new IteratorAndCurrent<>(histogram.buckets.iterator())); } } @Override public InternalAggregation get() { - List reducedBuckets = reducer.get(); + List reducedBuckets = reduceBuckets(pq, reduceContext); if (reduceContext.isFinalReduce()) { - reducedBuckets.sort(Comparator.comparingLong(b -> b.key)); if (minDocCount == 0) { addEmptyBuckets(reducedBuckets, reduceContext); } @@ -486,11 +552,6 @@ public InternalAggregation get() { getMetadata() ); } - - @Override - public void close() { - Releasables.close(reducer); - } }; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java index 980f11ab0aa61..4ff01c5648486 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java @@ -8,10 +8,10 @@ package org.elasticsearch.search.aggregations.bucket.histogram; import org.apache.lucene.util.CollectionUtil; -import org.apache.lucene.util.NumericUtils; +import org.apache.lucene.util.PriorityQueue; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.core.Releasables; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.AggregatorReducer; @@ -21,6 +21,8 @@ import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.InternalOrder; import org.elasticsearch.search.aggregations.KeyComparable; +import org.elasticsearch.search.aggregations.bucket.BucketReducer; +import org.elasticsearch.search.aggregations.bucket.IteratorAndCurrent; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; @@ -177,6 +179,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeDouble(minBound); out.writeDouble(maxBound); subAggregations.writeTo(out); + } @Override @@ -240,6 +243,11 @@ public InternalHistogram(StreamInput in) throws IOException { format = in.readNamedWriteable(DocValueFormat.class); keyed = in.readBoolean(); buckets = in.readCollectionAsList(stream -> new Bucket(stream, keyed, format)); + // we changed the order format in 8.13 for partial reduce, therefore we need to order them to perform merge sort + if (in.getTransportVersion().between(TransportVersions.ML_MODEL_IN_SERVICE_SETTINGS, TransportVersions.HISTOGRAM_AGGS_KEY_SORTED)) { + // list is mutable by #readCollectionAsList contract + buckets.sort(Comparator.comparingDouble(b -> b.key)); + } } @Override @@ -282,6 +290,69 @@ public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) return new Bucket(prototype.key, prototype.docCount, prototype.keyed, prototype.format, aggregations); } + private List reduceBuckets(PriorityQueue> pq, AggregationReduceContext reduceContext) { + int consumeBucketCount = 0; + List reducedBuckets = new ArrayList<>(); + if (pq.size() > 0) { + // list of buckets coming from different shards that have the same key + List currentBuckets = new ArrayList<>(); + double key = pq.top().current().key; + + do { + final IteratorAndCurrent top = pq.top(); + + if (Double.compare(top.current().key, key) != 0) { + // The key changes, reduce what we already buffered and reset the buffer for current buckets. + // Using Double.compare instead of != to handle NaN correctly. + final Bucket reduced = reduceBucket(currentBuckets, reduceContext); + if (reduced.getDocCount() >= minDocCount || reduceContext.isFinalReduce() == false) { + reducedBuckets.add(reduced); + if (consumeBucketCount++ >= REPORT_EMPTY_EVERY) { + reduceContext.consumeBucketsAndMaybeBreak(consumeBucketCount); + consumeBucketCount = 0; + } + } + currentBuckets.clear(); + key = top.current().key; + } + + currentBuckets.add(top.current()); + + if (top.hasNext()) { + top.next(); + assert Double.compare(top.current().key, key) > 0 : "shards must return data sorted by key"; + pq.updateTop(); + } else { + pq.pop(); + } + } while (pq.size() > 0); + + if (currentBuckets.isEmpty() == false) { + final Bucket reduced = reduceBucket(currentBuckets, reduceContext); + if (reduced.getDocCount() >= minDocCount || reduceContext.isFinalReduce() == false) { + reducedBuckets.add(reduced); + if (consumeBucketCount++ >= REPORT_EMPTY_EVERY) { + reduceContext.consumeBucketsAndMaybeBreak(consumeBucketCount); + consumeBucketCount = 0; + } + } + } + } + + reduceContext.consumeBucketsAndMaybeBreak(consumeBucketCount); + return reducedBuckets; + } + + private Bucket reduceBucket(List buckets, AggregationReduceContext context) { + assert buckets.isEmpty() == false; + try (BucketReducer reducer = new BucketReducer<>(buckets.get(0), context, buckets.size())) { + for (Bucket bucket : buckets) { + reducer.accept(bucket); + } + return createBucket(reducer.getProto().key, reducer.getDocCount(), reducer.getAggregations()); + } + } + private double nextKey(double key) { return round(key + emptyBucketInfo.interval + emptyBucketInfo.interval / 2); } @@ -376,31 +447,25 @@ private void iterateEmptyBuckets(List list, ListIterator iter, D @Override protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceContext, int size) { return new AggregatorReducer() { - - final LongKeyedMultiBucketsAggregatorReducer reducer = new LongKeyedMultiBucketsAggregatorReducer<>( - reduceContext, - size, - minDocCount - ) { + final PriorityQueue> pq = new PriorityQueue<>(size) { @Override - protected Bucket createBucket(long key, long docCount, InternalAggregations aggregations) { - return InternalHistogram.this.createBucket(NumericUtils.sortableLongToDouble(key), docCount, aggregations); + protected boolean lessThan(IteratorAndCurrent a, IteratorAndCurrent b) { + return Double.compare(a.current().key, b.current().key) < 0; } }; @Override public void accept(InternalAggregation aggregation) { - InternalHistogram histogram = (InternalHistogram) aggregation; - for (Bucket bucket : histogram.buckets) { - reducer.accept(NumericUtils.doubleToSortableLong(bucket.key), bucket); + final InternalHistogram histogram = (InternalHistogram) aggregation; + if (histogram.buckets.isEmpty() == false) { + pq.add(new IteratorAndCurrent<>(histogram.buckets.iterator())); } } @Override public InternalAggregation get() { - List reducedBuckets = reducer.get(); + List reducedBuckets = reduceBuckets(pq, reduceContext); if (reduceContext.isFinalReduce()) { - reducedBuckets.sort(Comparator.comparingDouble(b -> b.key)); if (minDocCount == 0) { addEmptyBuckets(reducedBuckets, reduceContext); } @@ -418,11 +483,6 @@ public InternalAggregation get() { } return new InternalHistogram(getName(), reducedBuckets, order, minDocCount, emptyBucketInfo, format, keyed, getMetadata()); } - - @Override - public void close() { - Releasables.close(reducer); - } }; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogram.java index 27a79095eb49d..05944b75d06d5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogram.java @@ -8,11 +8,10 @@ package org.elasticsearch.search.aggregations.bucket.histogram; -import org.apache.lucene.util.NumericUtils; +import org.apache.lucene.util.PriorityQueue; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.util.LongObjectPagedHashMap; -import org.elasticsearch.core.Releasables; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.AggregatorReducer; @@ -21,6 +20,7 @@ import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.KeyComparable; import org.elasticsearch.search.aggregations.bucket.BucketReducer; +import org.elasticsearch.search.aggregations.bucket.IteratorAndCurrent; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; @@ -260,6 +260,11 @@ public InternalVariableWidthHistogram(StreamInput in) throws IOException { format = in.readNamedWriteable(DocValueFormat.class); buckets = in.readCollectionAsList(stream -> new Bucket(stream, format)); targetNumBuckets = in.readVInt(); + // we changed the order format in 8.13 for partial reduce, therefore we need to order them to perform merge sort + if (in.getTransportVersion().between(TransportVersions.ML_MODEL_IN_SERVICE_SETTINGS, TransportVersions.HISTOGRAM_AGGS_KEY_SORTED)) { + // list is mutable by #readCollectionAsList contract + buckets.sort(Comparator.comparingDouble(b -> b.centroid)); + } } @Override @@ -309,21 +314,62 @@ public Number getKey(MultiBucketsAggregation.Bucket bucket) { } private Bucket reduceBucket(List buckets, AggregationReduceContext context) { - long docCount = 0; + assert buckets.isEmpty() == false; double min = Double.POSITIVE_INFINITY; double max = Double.NEGATIVE_INFINITY; double sum = 0; - for (InternalVariableWidthHistogram.Bucket bucket : buckets) { - docCount += bucket.docCount; - min = Math.min(min, bucket.bounds.min); - max = Math.max(max, bucket.bounds.max); - sum += bucket.docCount * bucket.centroid; - } - final List aggregations = new BucketAggregationList<>(buckets); - final InternalAggregations aggs = InternalAggregations.reduce(aggregations, context); - final double centroid = sum / docCount; - final Bucket.BucketBounds bounds = new Bucket.BucketBounds(min, max); - return new Bucket(centroid, bounds, docCount, format, aggs); + try (BucketReducer reducer = new BucketReducer<>(buckets.get(0), context, buckets.size())) { + for (Bucket bucket : buckets) { + min = Math.min(min, bucket.bounds.min); + max = Math.max(max, bucket.bounds.max); + sum += bucket.docCount * bucket.centroid; + reducer.accept(bucket); + } + final double centroid = sum / reducer.getDocCount(); + final Bucket.BucketBounds bounds = new Bucket.BucketBounds(min, max); + return new Bucket(centroid, bounds, reducer.getDocCount(), format, reducer.getAggregations()); + } + } + + public List reduceBuckets(PriorityQueue> pq, AggregationReduceContext reduceContext) { + List reducedBuckets = new ArrayList<>(); + if (pq.size() > 0) { + double key = pq.top().current().centroid(); + // list of buckets coming from different shards that have the same key + final List currentBuckets = new ArrayList<>(); + do { + IteratorAndCurrent top = pq.top(); + + if (Double.compare(top.current().centroid(), key) != 0) { + // The key changes, reduce what we already buffered and reset the buffer for current buckets. + final Bucket reduced = reduceBucket(currentBuckets, reduceContext); + reduceContext.consumeBucketsAndMaybeBreak(1); + reducedBuckets.add(reduced); + currentBuckets.clear(); + key = top.current().centroid(); + } + + currentBuckets.add(top.current()); + + if (top.hasNext()) { + Bucket prev = top.current(); + top.next(); + assert top.current().compareKey(prev) >= 0 : "shards must return data sorted by centroid"; + pq.updateTop(); + } else { + pq.pop(); + } + } while (pq.size() > 0); + + if (currentBuckets.isEmpty() == false) { + final Bucket reduced = reduceBucket(currentBuckets, reduceContext); + reduceContext.consumeBucketsAndMaybeBreak(1); + reducedBuckets.add(reduced); + } + } + + mergeBucketsIfNeeded(reducedBuckets, targetNumBuckets, reduceContext); + return reducedBuckets; } static class BucketRange { @@ -479,42 +525,24 @@ private static void adjustBoundsForOverlappingBuckets(List buckets) { @Override protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceContext, int size) { return new AggregatorReducer() { - - final LongObjectPagedHashMap bucketsReducer = new LongObjectPagedHashMap<>( - getBuckets().size(), - reduceContext.bigArrays() - ); + private final PriorityQueue> pq = new PriorityQueue<>(size) { + @Override + protected boolean lessThan(IteratorAndCurrent a, IteratorAndCurrent b) { + return Double.compare(a.current().centroid, b.current().centroid) < 0; + } + }; @Override public void accept(InternalAggregation aggregation) { - InternalVariableWidthHistogram histogram = (InternalVariableWidthHistogram) aggregation; - for (Bucket bucket : histogram.getBuckets()) { - long key = NumericUtils.doubleToSortableLong(bucket.centroid()); - ReducerAndExtraInfo reducer = bucketsReducer.get(key); - if (reducer == null) { - reducer = new ReducerAndExtraInfo(new BucketReducer<>(bucket, reduceContext, size)); - bucketsReducer.put(key, reducer); - reduceContext.consumeBucketsAndMaybeBreak(1); - } - reducer.min[0] = Math.min(reducer.min[0], bucket.bounds.min); - reducer.max[0] = Math.max(reducer.max[0], bucket.bounds.max); - reducer.sum[0] += bucket.docCount * bucket.centroid; - reducer.reducer.accept(bucket); + final InternalVariableWidthHistogram histogram = (InternalVariableWidthHistogram) aggregation; + if (histogram.buckets.isEmpty() == false) { + pq.add(new IteratorAndCurrent<>(histogram.buckets.iterator())); } } @Override public InternalAggregation get() { - final List reducedBuckets = new ArrayList<>((int) bucketsReducer.size()); - bucketsReducer.forEach(entry -> { - final double centroid = entry.value.sum[0] / entry.value.reducer.getDocCount(); - final Bucket.BucketBounds bounds = new Bucket.BucketBounds(entry.value.min[0], entry.value.max[0]); - reducedBuckets.add( - new Bucket(centroid, bounds, entry.value.reducer.getDocCount(), format, entry.value.reducer.getAggregations()) - ); - }); - reducedBuckets.sort(Comparator.comparing(Bucket::centroid)); - mergeBucketsIfNeeded(reducedBuckets, targetNumBuckets, reduceContext); + final List reducedBuckets = reduceBuckets(pq, reduceContext); if (reduceContext.isFinalReduce()) { buckets.sort(Comparator.comparing(Bucket::min)); mergeBucketsWithSameMin(reducedBuckets, reduceContext); @@ -522,21 +550,9 @@ public InternalAggregation get() { } return new InternalVariableWidthHistogram(getName(), reducedBuckets, emptyBucketInfo, targetNumBuckets, format, metadata); } - - @Override - public void close() { - bucketsReducer.forEach(entry -> Releasables.close(entry.value.reducer)); - Releasables.close(bucketsReducer); - } }; } - private record ReducerAndExtraInfo(BucketReducer reducer, double[] min, double[] max, double[] sum) { - private ReducerAndExtraInfo(BucketReducer reducer) { - this(reducer, new double[] { Double.POSITIVE_INFINITY }, new double[] { Double.NEGATIVE_INFINITY }, new double[] { 0 }); - } - } - @Override public InternalAggregation finalizeSampling(SamplingContext samplingContext) { return new InternalVariableWidthHistogram( diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefix.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefix.java index 3557947bb9ea7..48b11524df792 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefix.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefix.java @@ -9,10 +9,9 @@ package org.elasticsearch.search.aggregations.bucket.prefix; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.PriorityQueue; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.util.ObjectObjectPagedHashMap; -import org.elasticsearch.core.Releasables; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.AggregatorReducer; @@ -21,12 +20,12 @@ import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.KeyComparable; import org.elasticsearch.search.aggregations.bucket.BucketReducer; +import org.elasticsearch.search.aggregations.bucket.IteratorAndCurrent; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; -import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.Objects; @@ -225,51 +224,69 @@ protected void doWriteTo(StreamOutput out) throws IOException { @Override protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceContext, int size) { return new AggregatorReducer() { - final ObjectObjectPagedHashMap> buckets = new ObjectObjectPagedHashMap<>( - getBuckets().size(), - reduceContext.bigArrays() - ); + private final PriorityQueue> pq = new PriorityQueue<>(size) { + @Override + protected boolean lessThan(IteratorAndCurrent a, IteratorAndCurrent b) { + return a.current().key.compareTo(b.current().key) < 0; + } + }; @Override public void accept(InternalAggregation aggregation) { final InternalIpPrefix ipPrefix = (InternalIpPrefix) aggregation; - for (Bucket bucket : ipPrefix.getBuckets()) { - BucketReducer bucketReducer = buckets.get(bucket.key); - if (bucketReducer == null) { - bucketReducer = new BucketReducer<>(bucket, reduceContext, size); - boolean success = false; - try { - buckets.put(bucket.key, bucketReducer); - success = true; - } finally { - if (success == false) { - Releasables.close(bucketReducer); - } - } - } - bucketReducer.accept(bucket); + if (ipPrefix.buckets.isEmpty() == false) { + pq.add(new IteratorAndCurrent<>(ipPrefix.buckets.iterator())); } } @Override public InternalAggregation get() { - final List reducedBuckets = new ArrayList<>(Math.toIntExact(buckets.size())); - buckets.forEach(entry -> { - if (false == reduceContext.isFinalReduce() || entry.value.getDocCount() >= minDocCount) { - reducedBuckets.add(createBucket(entry.value.getProto(), entry.value.getAggregations(), entry.value.getDocCount())); - } - }); + final List reducedBuckets = reduceBuckets(pq, reduceContext); reduceContext.consumeBucketsAndMaybeBreak(reducedBuckets.size()); - reducedBuckets.sort(Comparator.comparing(a -> a.key)); return new InternalIpPrefix(getName(), format, keyed, minDocCount, reducedBuckets, metadata); } + }; + } - @Override - public void close() { - buckets.forEach(entry -> Releasables.close(entry.value)); - Releasables.close(buckets); + private List reduceBuckets(PriorityQueue> pq, AggregationReduceContext reduceContext) { + List reducedBuckets = new ArrayList<>(); + if (pq.size() > 0) { + // list of buckets coming from different shards that have the same value + List currentBuckets = new ArrayList<>(); + BytesRef value = pq.top().current().key; + + do { + final IteratorAndCurrent top = pq.top(); + if (top.current().key.equals(value) == false) { + final Bucket reduced = reduceBucket(currentBuckets, reduceContext); + if (false == reduceContext.isFinalReduce() || reduced.getDocCount() >= minDocCount) { + reducedBuckets.add(reduced); + } + currentBuckets.clear(); + value = top.current().key; + } + + currentBuckets.add(top.current()); + + if (top.hasNext()) { + top.next(); + assert top.current().key.compareTo(value) > 0 + : "shards must return data sorted by value [" + top.current().key + "] and [" + value + "]"; + pq.updateTop(); + } else { + pq.pop(); + } + } while (pq.size() > 0); + + if (currentBuckets.isEmpty() == false) { + final Bucket reduced = reduceBucket(currentBuckets, reduceContext); + if (false == reduceContext.isFinalReduce() || reduced.getDocCount() >= minDocCount) { + reducedBuckets.add(reduced); + } } - }; + } + + return reducedBuckets; } @Override @@ -322,6 +339,16 @@ private Bucket createBucket(Bucket prototype, InternalAggregations aggregations, ); } + private Bucket reduceBucket(List buckets, AggregationReduceContext context) { + assert buckets.isEmpty() == false; + try (BucketReducer reducer = new BucketReducer<>(buckets.get(0), context, buckets.size())) { + for (Bucket bucket : buckets) { + reducer.accept(bucket); + } + return createBucket(reducer.getProto(), reducer.getAggregations(), reducer.getDocCount()); + } + } + @Override public List getBuckets() { return Collections.unmodifiableList(buckets); From 6507ba572dacb5bcf1ff973418dd51b72a1d0d9a Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Wed, 10 Apr 2024 09:04:10 +0200 Subject: [PATCH 211/264] [Profiling] Limit TopN functions to available data (#107296) With this commit we consider a case in the TopN functions API where the specified limit is larger than the available number of TopN functions. Currently this throws an error (`IndexOutOfBoundException`). With this check in place we just return the list as is. --- .../TransportGetTopNFunctionsAction.java | 4 +- .../action/TopNFunctionsBuilderTests.java | 87 +++++++++++++++++++ 2 files changed, 89 insertions(+), 2 deletions(-) create mode 100644 x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TopNFunctionsBuilderTests.java diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetTopNFunctionsAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetTopNFunctionsAction.java index 05e2202c7b91c..e5d67c0b005e2 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetTopNFunctionsAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetTopNFunctionsAction.java @@ -120,7 +120,7 @@ static GetTopNFunctionsResponse buildTopNFunctions(GetStackTracesResponse respon return builder.build(); } - private static class TopNFunctionsBuilder { + static class TopNFunctionsBuilder { private final Integer limit; private final HashMap topNFunctions; @@ -141,7 +141,7 @@ public GetTopNFunctionsResponse build() { sumTotalCount += topNFunction.getTotalCount(); } // limit at the end so global stats are independent of the limit - if (limit != null && limit > 0) { + if (limit != null && limit > 0 && limit < functions.size()) { functions = functions.subList(0, limit); } return new GetTopNFunctionsResponse(sumSelfCount, sumTotalCount, functions); diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TopNFunctionsBuilderTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TopNFunctionsBuilderTests.java new file mode 100644 index 0000000000000..26c0f066dd092 --- /dev/null +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TopNFunctionsBuilderTests.java @@ -0,0 +1,87 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.profiling.action; + +import org.elasticsearch.test.ESTestCase; + +public class TopNFunctionsBuilderTests extends ESTestCase { + public void testBuildFunctions() { + TransportGetTopNFunctionsAction.TopNFunctionsBuilder builder = new TransportGetTopNFunctionsAction.TopNFunctionsBuilder(null); + TopNFunction foo = foo(); + TopNFunction bar = bar(); + builder.addTopNFunction(foo); + builder.addTopNFunction(bar); + + GetTopNFunctionsResponse response = builder.build(); + + assertEquals(7L, response.getSelfCount()); + assertEquals(14L, response.getTotalCount()); + assertEquals(2, response.getTopN().size()); + assertEquals(foo, response.getTopN().get(0)); + assertEquals(bar, response.getTopN().get(1)); + } + + public void testBuildFunctionsWithLimitSmallerThanAvailableFunctionCount() { + TransportGetTopNFunctionsAction.TopNFunctionsBuilder builder = new TransportGetTopNFunctionsAction.TopNFunctionsBuilder(1); + TopNFunction foo = foo(); + TopNFunction bar = bar(); + builder.addTopNFunction(foo); + builder.addTopNFunction(bar); + + GetTopNFunctionsResponse response = builder.build(); + + // total counts are independent of the limit + assertEquals(7L, response.getSelfCount()); + assertEquals(14L, response.getTotalCount()); + assertEquals(1, response.getTopN().size()); + assertEquals(foo, response.getTopN().get(0)); + } + + public void testBuildFunctionsWithLimitHigherThanAvailableFunctionCount() { + TransportGetTopNFunctionsAction.TopNFunctionsBuilder builder = new TransportGetTopNFunctionsAction.TopNFunctionsBuilder(5); + TopNFunction foo = foo(); + TopNFunction bar = bar(); + builder.addTopNFunction(foo); + builder.addTopNFunction(bar); + + GetTopNFunctionsResponse response = builder.build(); + + assertEquals(7L, response.getSelfCount()); + assertEquals(14L, response.getTotalCount()); + // still limited to the available two functions + assertEquals(2, response.getTopN().size()); + assertEquals(foo, response.getTopN().get(0)); + assertEquals(bar, response.getTopN().get(1)); + } + + private TopNFunction foo() { + TopNFunction foo = function("foo"); + foo.addSelfCount(5L); + foo.addTotalCount(10L); + foo.addSelfAnnualCO2Tons(1.0d); + foo.addTotalAnnualCO2Tons(2.0d); + foo.addSelfAnnualCostsUSD(32.2d); + foo.addTotalAnnualCostsUSD(64.4d); + return foo; + } + + private TopNFunction bar() { + TopNFunction bar = function("bar"); + bar.addSelfCount(2L); + bar.addTotalCount(4L); + bar.addSelfAnnualCO2Tons(0.5d); + bar.addTotalAnnualCO2Tons(1.0d); + bar.addSelfAnnualCostsUSD(16.0d); + bar.addTotalAnnualCostsUSD(32.0d); + return bar; + } + + private TopNFunction function(String name) { + return new TopNFunction(name, 3, false, 0, name, "main.c", 1, "demo"); + } +} From e21f2e30fb9c92b8df6c951b8b4afeb6cd4581bf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Przemys=C5=82aw=20Witek?= Date: Wed, 10 Apr 2024 10:17:59 +0200 Subject: [PATCH 212/264] [Transform] Make force-stopping the transform always remove persistent task from cluster state (#106989) --- docs/changelog/106989.yaml | 7 ++ .../transform/integration/TransformIT.java | 27 +++--- .../integration/TransformRestTestCase.java | 3 - .../integration/TransformRobustnessIT.java | 51 ++++++++++- .../TransformTaskFailedStateIT.java | 9 ++ .../action/TransportStopTransformAction.java | 91 +++++++++---------- .../transforms/TransformNodeAssignments.java | 14 +++ .../TransformNodeAssignmentsTests.java | 52 +++++++++-- 8 files changed, 177 insertions(+), 77 deletions(-) create mode 100644 docs/changelog/106989.yaml diff --git a/docs/changelog/106989.yaml b/docs/changelog/106989.yaml new file mode 100644 index 0000000000000..47df5fe5b47d7 --- /dev/null +++ b/docs/changelog/106989.yaml @@ -0,0 +1,7 @@ +pr: 106989 +summary: Make force-stopping the transform always remove persistent task from cluster + state +area: Transform +type: bug +issues: + - 106811 diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIT.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIT.java index e7d54028caa20..4db0d0d8baaf1 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIT.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIT.java @@ -241,38 +241,39 @@ public void testTransformLifecycleInALoop() throws Exception { long sleepAfterStartMillis = randomLongBetween(0, 5_000); boolean force = randomBoolean(); try { - // Create the continuous transform + // Create the continuous transform. putTransform(transformId, config, RequestOptions.DEFAULT); assertThat(getTransformTasks(), is(empty())); assertThat(getTransformTasksFromClusterState(transformId), is(empty())); startTransform(transformId, RequestOptions.DEFAULT); - // There is 1 transform task after start + // There is 1 transform task after start. assertThat(getTransformTasks(), hasSize(1)); assertThat(getTransformTasksFromClusterState(transformId), hasSize(1)); Thread.sleep(sleepAfterStartMillis); - // There should still be 1 transform task as the transform is continuous + // There should still be 1 transform task as the transform is continuous. assertThat(getTransformTasks(), hasSize(1)); assertThat(getTransformTasksFromClusterState(transformId), hasSize(1)); - // Stop the transform with force set randomly + // Stop the transform with force set randomly. stopTransform(transformId, true, null, false, force); - // After the transform is stopped, there should be no transform task left - assertThat(getTransformTasks(), is(empty())); + if (force) { + // If the "force" has been used, then the persistent task is removed from the cluster state but the local task can still + // be seen by the PersistentTasksNodeService. We need to wait until PersistentTasksNodeService reconciles the state. + assertBusy(() -> assertThat(getTransformTasks(), is(empty()))); + } else { + // If the "force" hasn't been used then we can expect the local task to be already gone. + assertThat(getTransformTasks(), is(empty())); + } + // After the transform is stopped, there should be no transform task left in the cluster state. assertThat(getTransformTasksFromClusterState(transformId), is(empty())); // Delete the transform deleteTransform(transformId); } catch (AssertionError | Exception e) { throw new AssertionError( - format( - "Failure at iteration %d (sleepAfterStartMillis=%s,force=%s): %s", - i, - sleepAfterStartMillis, - force, - e.getMessage() - ), + format("Failure at iteration %d (sleepAfterStart=%sms,force=%s): %s", i, sleepAfterStartMillis, force, e.getMessage()), e ); } diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java index eb1a1258d5a96..4cc9a31c8eff5 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java @@ -65,9 +65,6 @@ public abstract class TransformRestTestCase extends TransformCommonRestTestCase { - protected static final String AUTH_KEY = "Authorization"; - protected static final String SECONDARY_AUTH_KEY = "es-secondary-authorization"; - private final Set createdTransformIds = new HashSet<>(); protected void cleanUp() throws Exception { diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRobustnessIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRobustnessIT.java index 0f807fbae45d1..4b7c42968f557 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRobustnessIT.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRobustnessIT.java @@ -10,7 +10,6 @@ import org.elasticsearch.client.Request; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.xcontent.support.XContentMapValues; -import org.elasticsearch.core.Strings; import org.elasticsearch.xpack.core.transform.TransformField; import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; @@ -19,6 +18,7 @@ import java.util.Map; import java.util.concurrent.TimeUnit; +import static org.elasticsearch.core.Strings.format; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; @@ -86,10 +86,10 @@ public void testTaskRemovalAfterInternalIndexGotDeleted() throws Exception { deleteTransform(transformId); } - public void testCreateAndDeleteTransformInALoop() throws IOException { + public void testBatchTransformLifecycltInALoop() throws IOException { createReviewsIndex(); - String transformId = "test_create_and_delete_in_a_loop"; + String transformId = "test_batch_lifecycle_in_a_loop"; String destIndex = transformId + "-dest"; for (int i = 0; i < 100; ++i) { try { @@ -108,7 +108,48 @@ public void testCreateAndDeleteTransformInALoop() throws IOException { // Delete the transform deleteTransform(transformId); } catch (AssertionError | Exception e) { - fail("Failure at iteration " + i + ": " + e.getMessage()); + throw new AssertionError(format("Failure at iteration %d: %s", i, e.getMessage()), e); + } + } + } + + public void testContinuousTransformLifecycleInALoop() throws Exception { + createReviewsIndex(); + + String transformId = "test_cont_lifecycle_in_a_loop"; + String destIndex = transformId + "-dest"; + for (int i = 0; i < 100; ++i) { + long sleepAfterStartMillis = randomLongBetween(0, 5_000); + boolean force = randomBoolean(); + try { + // Create the continuous transform. + createContinuousPivotReviewsTransform(transformId, destIndex, null); + assertThat(getTransformTasks(), is(empty())); + assertThat(getTransformTasksFromClusterState(transformId), is(empty())); + + startTransform(transformId); + // There is 1 transform task after start. + assertThat(getTransformTasks(), hasSize(1)); + assertThat(getTransformTasksFromClusterState(transformId), hasSize(1)); + + Thread.sleep(sleepAfterStartMillis); + // There should still be 1 transform task as the transform is continuous. + assertThat(getTransformTasks(), hasSize(1)); + assertThat(getTransformTasksFromClusterState(transformId), hasSize(1)); + + // Stop the transform with force set randomly. + stopTransform(transformId, force); + // After the transform is stopped, there should be no transform task left. + assertThat(getTransformTasks(), is(empty())); + assertThat(getTransformTasksFromClusterState(transformId), is(empty())); + + // Delete the transform. + deleteTransform(transformId); + } catch (AssertionError | Exception e) { + throw new AssertionError( + format("Failure at iteration %d (sleepAfterStart=%sms,force=%s): %s", i, sleepAfterStartMillis, force, e.getMessage()), + e + ); } } } @@ -168,7 +209,7 @@ private void beEvilAndDeleteTheTransformIndex() throws IOException { } private static String createConfig(String sourceIndex, String destIndex) { - return Strings.format(""" + return format(""" { "source": { "index": "%s" diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformTaskFailedStateIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformTaskFailedStateIT.java index bccd97f22b4a1..5ab65ca023506 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformTaskFailedStateIT.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformTaskFailedStateIT.java @@ -65,6 +65,7 @@ public void testForceStopFailedTransform() throws Exception { createContinuousPivotReviewsTransform(transformId, transformIndex, null); assertThat(getTransformTasks(), is(empty())); + assertThat(getTransformTasksFromClusterState(transformId), is(empty())); startTransform(transformId); awaitState(transformId, TransformStats.State.FAILED); @@ -78,6 +79,7 @@ public void testForceStopFailedTransform() throws Exception { assertThat((String) XContentMapValues.extractValue("reason", fullState), startsWith(failureReason)); assertThat(getTransformTasks(), hasSize(1)); + assertThat(getTransformTasksFromClusterState(transformId), hasSize(1)); // verify that we cannot stop a failed transform ResponseException ex = expectThrows(ResponseException.class, () -> stopTransform(transformId, false)); @@ -99,6 +101,7 @@ public void testForceStopFailedTransform() throws Exception { assertThat(XContentMapValues.extractValue("reason", fullState), is(nullValue())); assertThat(getTransformTasks(), is(empty())); + assertThat(getTransformTasksFromClusterState(transformId), is(empty())); } public void testForceResetFailedTransform() throws Exception { @@ -109,6 +112,7 @@ public void testForceResetFailedTransform() throws Exception { createContinuousPivotReviewsTransform(transformId, transformIndex, null); assertThat(getTransformTasks(), is(empty())); + assertThat(getTransformTasksFromClusterState(transformId), is(empty())); startTransform(transformId); awaitState(transformId, TransformStats.State.FAILED); @@ -122,6 +126,7 @@ public void testForceResetFailedTransform() throws Exception { assertThat((String) XContentMapValues.extractValue("reason", fullState), startsWith(failureReason)); assertThat(getTransformTasks(), hasSize(1)); + assertThat(getTransformTasksFromClusterState(transformId), hasSize(1)); // verify that we cannot reset a failed transform ResponseException ex = expectThrows(ResponseException.class, () -> resetTransform(transformId, false)); @@ -135,6 +140,7 @@ public void testForceResetFailedTransform() throws Exception { resetTransform(transformId, true); assertThat(getTransformTasks(), is(empty())); + assertThat(getTransformTasksFromClusterState(transformId), is(empty())); } public void testStartFailedTransform() throws Exception { @@ -145,6 +151,7 @@ public void testStartFailedTransform() throws Exception { createContinuousPivotReviewsTransform(transformId, transformIndex, null); assertThat(getTransformTasks(), is(empty())); + assertThat(getTransformTasksFromClusterState(transformId), is(empty())); startTransform(transformId); awaitState(transformId, TransformStats.State.FAILED); @@ -158,6 +165,7 @@ public void testStartFailedTransform() throws Exception { assertThat((String) XContentMapValues.extractValue("reason", fullState), startsWith(failureReason)); assertThat(getTransformTasks(), hasSize(1)); + assertThat(getTransformTasksFromClusterState(transformId), hasSize(1)); var expectedFailure = "Unable to start transform [test-force-start-failed-transform] " + "as it is in a failed state. Use force stop and then restart the transform once error is resolved. More details: [" @@ -172,6 +180,7 @@ public void testStartFailedTransform() throws Exception { stopTransform(transformId, true); assertThat(getTransformTasks(), is(empty())); + assertThat(getTransformTasksFromClusterState(transformId), is(empty())); } private void awaitState(String transformId, TransformStats.State state) throws Exception { diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStopTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStopTransformAction.java index b8ea1fee6e886..1996012ccdf58 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStopTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStopTransformAction.java @@ -164,18 +164,23 @@ protected void doExecute(Task task, Request request, ActionListener li state ); - final ActionListener doExecuteListener; - if (transformNodeAssignments.getWaitingForAssignment().size() > 0) { - doExecuteListener = cancelTransformTasksWithNoAssignment(finalListener, transformNodeAssignments); - } else { - doExecuteListener = finalListener; - } + final ActionListener doExecuteListener = cancelTransformTasksListener( + transformNodeAssignments.getWaitingForAssignment(), + finalListener + ); - if (transformNodeAssignments.getExecutorNodes().size() > 0) { + if (request.isForce()) { + // When force==true, we **do not** fan out to individual tasks (i.e. taskOperation method will not be called) as we + // want to make sure that the persistent tasks will be removed from cluster state even if these tasks are no longer + // visible by the PersistentTasksService. + cancelTransformTasksListener(transformNodeAssignments.getAssigned(), doExecuteListener).onResponse( + new Response(true) + ); + } else if (transformNodeAssignments.getExecutorNodes().isEmpty()) { + doExecuteListener.onResponse(new Response(true)); + } else { request.setNodes(transformNodeAssignments.getExecutorNodes().toArray(new String[0])); super.doExecute(task, request, doExecuteListener); - } else { - doExecuteListener.onResponse(new Response(true)); } }, e -> { if (e instanceof ResourceNotFoundException) { @@ -189,13 +194,10 @@ protected void doExecute(Task task, Request request, ActionListener li listener.onFailure(e); // found transforms without a config } else if (request.isForce()) { - final ActionListener doExecuteListener; - - if (transformNodeAssignments.getWaitingForAssignment().size() > 0) { - doExecuteListener = cancelTransformTasksWithNoAssignment(finalListener, transformNodeAssignments); - } else { - doExecuteListener = finalListener; - } + final ActionListener doExecuteListener = cancelTransformTasksListener( + transformNodeAssignments.getWaitingForAssignment(), + finalListener + ); if (transformNodeAssignments.getExecutorNodes().size() > 0) { request.setExpandedIds(transformNodeAssignments.getAssigned()); @@ -235,7 +237,6 @@ protected void taskOperation( TransformTask transformTask, ActionListener listener ) { - Set ids = request.getExpandedIds(); if (ids == null) { listener.onFailure(new IllegalStateException("Request does not have expandedIds set")); @@ -243,20 +244,6 @@ protected void taskOperation( } if (ids.contains(transformTask.getTransformId())) { - if (request.isForce()) { - // If force==true, we skip the additional step (setShouldStopAtCheckpoint) and move directly to shutting down the task. - // This way we ensure that the persistent task is removed ASAP (as opposed to being removed in one of the listeners). - try { - // Here the task is deregistered in scheduler and marked as completed in persistent task service. - transformTask.shutdown(); - // Here the indexer is aborted so that its thread finishes work ASAP. - transformTask.onCancelled(); - listener.onResponse(new Response(true)); - } catch (ElasticsearchException ex) { - listener.onFailure(ex); - } - return; - } // move the call to the generic thread pool, so we do not block the network thread threadPool.generic().execute(() -> { transformTask.setShouldStopAtCheckpoint(request.isWaitForCheckpoint(), ActionListener.wrap(r -> { @@ -306,7 +293,6 @@ protected StopTransformAction.Response newResponse( } private ActionListener waitForStopListener(Request request, ActionListener listener) { - ActionListener onStopListener = ActionListener.wrap( waitResponse -> transformConfigManager.refresh(ActionListener.wrap(r -> listener.onResponse(waitResponse), e -> { if ((ExceptionsHelper.unwrapCause(e) instanceof IndexNotFoundException) == false) { @@ -393,6 +379,7 @@ private void waitForTransformStopped( ) { // This map is accessed in the predicate and the listener callbacks final Map exceptions = new ConcurrentHashMap<>(); + persistentTasksService.waitForPersistentTasksCondition(persistentTasksCustomMetadata -> { if (persistentTasksCustomMetadata == null) { return true; @@ -501,34 +488,38 @@ private void waitForTransformStopped( })); } - private ActionListener cancelTransformTasksWithNoAssignment( - final ActionListener finalListener, - final TransformNodeAssignments transformNodeAssignments + /** + * Creates and returns the listener that sends remove request for every task in the given set. + * + * @param transformTasks set of transform tasks that should be removed + * @param finalListener listener that should be called once all the given tasks are removed + * @return listener that removes given tasks in parallel + */ + private ActionListener cancelTransformTasksListener( + final Set transformTasks, + final ActionListener finalListener ) { - final ActionListener doExecuteListener = ActionListener.wrap(response -> { + if (transformTasks.isEmpty()) { + return finalListener; + } + return ActionListener.wrap(response -> { GroupedActionListener> groupedListener = new GroupedActionListener<>( - transformNodeAssignments.getWaitingForAssignment().size(), - ActionListener.wrap(r -> { - finalListener.onResponse(response); - }, finalListener::onFailure) + transformTasks.size(), + ActionListener.wrap(r -> finalListener.onResponse(response), finalListener::onFailure) ); - for (String unassignedTaskId : transformNodeAssignments.getWaitingForAssignment()) { - persistentTasksService.sendRemoveRequest(unassignedTaskId, null, groupedListener); + for (String taskId : transformTasks) { + persistentTasksService.sendRemoveRequest(taskId, null, groupedListener); } - }, e -> { GroupedActionListener> groupedListener = new GroupedActionListener<>( - transformNodeAssignments.getWaitingForAssignment().size(), - ActionListener.wrap(r -> { - finalListener.onFailure(e); - }, finalListener::onFailure) + transformTasks.size(), + ActionListener.wrap(r -> finalListener.onFailure(e), finalListener::onFailure) ); - for (String unassignedTaskId : transformNodeAssignments.getWaitingForAssignment()) { - persistentTasksService.sendRemoveRequest(unassignedTaskId, null, groupedListener); + for (String taskId : transformTasks) { + persistentTasksService.sendRemoveRequest(taskId, null, groupedListener); } }); - return doExecuteListener; } } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformNodeAssignments.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformNodeAssignments.java index 7b61f0c9e8335..46f893a90aba1 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformNodeAssignments.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformNodeAssignments.java @@ -65,4 +65,18 @@ public Set getWaitingForAssignment() { public Set getStopped() { return stopped; } + + @Override + public String toString() { + return new StringBuilder("TransformNodeAssignments[").append("executorNodes=") + .append(executorNodes) + .append(",assigned=") + .append(assigned) + .append(",waitingForAssignment=") + .append(waitingForAssignment) + .append(",stopped=") + .append(stopped) + .append("]") + .toString(); + } } diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformNodeAssignmentsTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformNodeAssignmentsTests.java index f5c0b6046fbfe..2643d1bba652d 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformNodeAssignmentsTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformNodeAssignmentsTests.java @@ -9,8 +9,6 @@ import org.elasticsearch.test.ESTestCase; -import java.util.Arrays; -import java.util.HashSet; import java.util.Set; import static org.hamcrest.Matchers.equalTo; @@ -19,10 +17,11 @@ public class TransformNodeAssignmentsTests extends ESTestCase { public void testConstructorAndGetters() { - Set executorNodes = new HashSet<>(Arrays.asList("executor-1", "executor-2")); - Set assigned = new HashSet<>(Arrays.asList("assigned-1", "assigned-2")); - Set waitingForAssignment = new HashSet<>(Arrays.asList("waiting-1", "waitingv-2")); - Set stopped = new HashSet<>(Arrays.asList("stopped-1", "stopped-2")); + Set executorNodes = Set.of("executor-1", "executor-2"); + Set assigned = Set.of("assigned-1", "assigned-2"); + Set waitingForAssignment = Set.of("waiting-1", "waiting-2"); + Set stopped = Set.of("stopped-1", "stopped-2"); + TransformNodeAssignments assignments = new TransformNodeAssignments(executorNodes, assigned, waitingForAssignment, stopped); assertThat(assignments.getExecutorNodes(), is(equalTo(executorNodes))); @@ -30,4 +29,45 @@ public void testConstructorAndGetters() { assertThat(assignments.getWaitingForAssignment(), is(equalTo(waitingForAssignment))); assertThat(assignments.getStopped(), is(equalTo(stopped))); } + + public void testToString() { + Set executorNodes = Set.of("executor-1"); + Set assigned = Set.of("assigned-1"); + Set waitingForAssignment = Set.of("waiting-1"); + Set stopped = Set.of("stopped-1"); + + TransformNodeAssignments assignments = new TransformNodeAssignments(executorNodes, assigned, waitingForAssignment, stopped); + + assertThat( + assignments.toString(), + is( + equalTo( + "TransformNodeAssignments[" + + "executorNodes=[executor-1]," + + "assigned=[assigned-1]," + + "waitingForAssignment=[waiting-1]," + + "stopped=[stopped-1]" + + "]" + ) + ) + ); + } + + public void testToString_EmptyCollections() { + Set executorNodes = Set.of(); + Set assigned = Set.of(); + Set waitingForAssignment = Set.of(); + Set stopped = Set.of(); + + TransformNodeAssignments assignments = new TransformNodeAssignments(executorNodes, assigned, waitingForAssignment, stopped); + + assertThat( + assignments.toString(), + is( + equalTo( + "TransformNodeAssignments[" + "executorNodes=[]," + "assigned=[]," + "waitingForAssignment=[]," + "stopped=[]" + "]" + ) + ) + ); + } } From c4a11de0046aa8b4b13bd1081e6120ff574d02ee Mon Sep 17 00:00:00 2001 From: Nikolaj Volgushev Date: Wed, 10 Apr 2024 11:28:01 +0200 Subject: [PATCH 213/264] Make API key actions local-only (#107148) Refactoring PR to make create, grant, and update API key actions local-only. Also ports a profiles action since it relies on the same base class as grant API key. --- .../core/security/action/GrantRequest.java | 16 +- .../apikey/AbstractCreateApiKeyRequest.java | 16 +- .../apikey/BaseBulkUpdateApiKeyRequest.java | 14 -- .../apikey/BaseSingleUpdateApiKeyRequest.java | 14 -- .../apikey/BaseUpdateApiKeyRequest.java | 23 +-- .../apikey/BulkUpdateApiKeyRequest.java | 6 - .../action/apikey/CreateApiKeyRequest.java | 51 ------- .../CreateCrossClusterApiKeyRequest.java | 28 ---- .../action/apikey/GrantApiKeyRequest.java | 14 -- .../action/apikey/UpdateApiKeyRequest.java | 6 - .../UpdateCrossClusterApiKeyRequest.java | 6 - .../profile/ActivateProfileRequest.java | 13 -- ...UpdateApiKeyRequestSerializationTests.java | 71 --------- .../apikey/CreateApiKeyRequestTests.java | 61 -------- .../CreateCrossClusterApiKeyRequestTests.java | 137 ------------------ ...UpdateApiKeyRequestSerializationTests.java | 72 --------- .../UpdateCrossClusterApiKeyRequestTests.java | 34 ----- .../xpack/security/apikey/ApiKeyRestIT.java | 2 +- .../security/action/TransportGrantAction.java | 9 +- .../TransportBaseUpdateApiKeyAction.java | 9 +- .../TransportBulkUpdateApiKeyAction.java | 2 +- .../apikey/TransportCreateApiKeyAction.java | 7 +- ...ansportCreateCrossClusterApiKeyAction.java | 15 +- .../apikey/TransportGrantApiKeyAction.java | 10 +- .../apikey/TransportUpdateApiKeyAction.java | 2 +- ...ansportUpdateCrossClusterApiKeyAction.java | 2 +- .../TransportActivateProfileAction.java | 1 - ...rtCreateCrossClusterApiKeyActionTests.java | 2 +- ...rtUpdateCrossClusterApiKeyActionTests.java | 2 +- .../security/authc/ApiKeyServiceTests.java | 28 +++- ...stUpdateCrossClusterApiKeyActionTests.java | 2 +- 31 files changed, 62 insertions(+), 613 deletions(-) delete mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/BulkUpdateApiKeyRequestSerializationTests.java delete mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/CreateCrossClusterApiKeyRequestTests.java delete mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyRequestSerializationTests.java diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/GrantRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/GrantRequest.java index 466ac58e55bf7..9675d66a183a5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/GrantRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/GrantRequest.java @@ -9,7 +9,7 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; @@ -21,23 +21,17 @@ public GrantRequest() { this.grant = new Grant(); } - public GrantRequest(StreamInput in) throws IOException { - super(in); - this.grant = new Grant(in); - } - public Grant getGrant() { return grant; } @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - grant.writeTo(out); + public ActionRequestValidationException validate() { + return grant.validate(null); } @Override - public ActionRequestValidationException validate() { - return grant.validate(null); + public final void writeTo(StreamOutput out) throws IOException { + TransportAction.localOnly(); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/AbstractCreateApiKeyRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/AbstractCreateApiKeyRequest.java index 998d35267be37..6e827a4a66a5d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/AbstractCreateApiKeyRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/AbstractCreateApiKeyRequest.java @@ -9,10 +9,11 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; -import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.support.MetadataUtils; @@ -39,14 +40,6 @@ public AbstractCreateApiKeyRequest() { this.id = UUIDs.base64UUID(); // because auditing can currently only catch requests but not responses, } - @SuppressWarnings("this-escape") - public AbstractCreateApiKeyRequest(StreamInput in) throws IOException { - super(in); - this.id = doReadId(in); - } - - protected abstract String doReadId(StreamInput in) throws IOException; - public String getId() { return id; } @@ -102,4 +95,9 @@ public ActionRequestValidationException validate() { assert refreshPolicy != null : "refresh policy is required"; return validationException; } + + @Override + public final void writeTo(StreamOutput out) throws IOException { + TransportAction.localOnly(); + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BaseBulkUpdateApiKeyRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BaseBulkUpdateApiKeyRequest.java index 34b249d7a8233..0ea772920652b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BaseBulkUpdateApiKeyRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BaseBulkUpdateApiKeyRequest.java @@ -8,13 +8,10 @@ package org.elasticsearch.xpack.core.security.action.apikey; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; -import java.io.IOException; import java.util.List; import java.util.Map; import java.util.Objects; @@ -35,11 +32,6 @@ public BaseBulkUpdateApiKeyRequest( this.ids = Objects.requireNonNull(ids, "API key IDs must not be null"); } - public BaseBulkUpdateApiKeyRequest(StreamInput in) throws IOException { - super(in); - this.ids = in.readStringCollectionAsList(); - } - @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = super.validate(); @@ -49,12 +41,6 @@ public ActionRequestValidationException validate() { return validationException; } - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeStringCollection(ids); - } - public List getIds() { return ids; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BaseSingleUpdateApiKeyRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BaseSingleUpdateApiKeyRequest.java index 725a9fb197b07..a3958b31e4716 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BaseSingleUpdateApiKeyRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BaseSingleUpdateApiKeyRequest.java @@ -7,13 +7,10 @@ package org.elasticsearch.xpack.core.security.action.apikey; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; -import java.io.IOException; import java.util.List; import java.util.Map; import java.util.Objects; @@ -32,17 +29,6 @@ public BaseSingleUpdateApiKeyRequest( this.id = Objects.requireNonNull(id, "API key ID must not be null"); } - public BaseSingleUpdateApiKeyRequest(StreamInput in) throws IOException { - super(in); - this.id = in.readString(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeString(id); - } - public String getId() { return id; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BaseUpdateApiKeyRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BaseUpdateApiKeyRequest.java index e5e3e3f2cabac..a592550484eb1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BaseUpdateApiKeyRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BaseUpdateApiKeyRequest.java @@ -7,10 +7,9 @@ package org.elasticsearch.xpack.core.security.action.apikey; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; @@ -43,17 +42,6 @@ public BaseUpdateApiKeyRequest( this.expiration = expiration; } - public BaseUpdateApiKeyRequest(StreamInput in) throws IOException { - super(in); - this.roleDescriptors = in.readOptionalCollectionAsList(RoleDescriptor::new); - this.metadata = in.readGenericMap(); - if (in.getTransportVersion().onOrAfter(TransportVersions.UPDATE_API_KEY_EXPIRATION_TIME_ADDED)) { - expiration = in.readOptionalTimeValue(); - } else { - expiration = null; - } - } - public Map getMetadata() { return metadata; } @@ -90,12 +78,7 @@ public ActionRequestValidationException validate() { } @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeOptionalCollection(roleDescriptors); - out.writeGenericMap(metadata); - if (out.getTransportVersion().onOrAfter(TransportVersions.UPDATE_API_KEY_EXPIRATION_TIME_ADDED)) { - out.writeOptionalTimeValue(expiration); - } + public final void writeTo(StreamOutput out) throws IOException { + TransportAction.localOnly(); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BulkUpdateApiKeyRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BulkUpdateApiKeyRequest.java index 534c874438e3f..eab74d6250aca 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BulkUpdateApiKeyRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BulkUpdateApiKeyRequest.java @@ -7,12 +7,10 @@ package org.elasticsearch.xpack.core.security.action.apikey; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; -import java.io.IOException; import java.util.Arrays; import java.util.List; import java.util.Map; @@ -41,10 +39,6 @@ public BulkUpdateApiKeyRequest( super(ids, roleDescriptors, metadata, expiration); } - public BulkUpdateApiKeyRequest(StreamInput in) throws IOException { - super(in); - } - @Override public ApiKey.Type getType() { return ApiKey.Type.REST; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/CreateApiKeyRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/CreateApiKeyRequest.java index 32669d5dca447..1d5eb35b99ea7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/CreateApiKeyRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/CreateApiKeyRequest.java @@ -7,18 +7,12 @@ package org.elasticsearch.xpack.core.security.action.apikey; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.common.UUIDs; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xpack.core.security.action.role.RoleDescriptorRequestValidator; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; -import java.io.IOException; import java.util.List; import java.util.Map; @@ -55,32 +49,6 @@ public CreateApiKeyRequest( this.metadata = metadata; } - public CreateApiKeyRequest(StreamInput in) throws IOException { - super(in); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_5_0)) { - this.name = in.readOptionalString(); - } else { - this.name = in.readString(); - } - this.expiration = in.readOptionalTimeValue(); - this.roleDescriptors = in.readCollectionAsImmutableList(RoleDescriptor::new); - this.refreshPolicy = WriteRequest.RefreshPolicy.readFrom(in); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_0_0)) { - this.metadata = in.readGenericMap(); - } else { - this.metadata = null; - } - } - - @Override - protected String doReadId(StreamInput in) throws IOException { - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_10_0)) { - return in.readString(); - } else { - return UUIDs.base64UUID(); - } - } - @Override public ApiKey.Type getType() { return ApiKey.Type.REST; @@ -114,23 +82,4 @@ public ActionRequestValidationException validate() { } return validationException; } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_10_0)) { - out.writeString(id); - } - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_5_0)) { - out.writeOptionalString(name); - } else { - out.writeString(name); - } - out.writeOptionalTimeValue(expiration); - out.writeCollection(getRoleDescriptors()); - refreshPolicy.writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_13_0)) { - out.writeGenericMap(metadata); - } - } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/CreateCrossClusterApiKeyRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/CreateCrossClusterApiKeyRequest.java index a375808def6d7..eea96bcbfcdaf 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/CreateCrossClusterApiKeyRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/CreateCrossClusterApiKeyRequest.java @@ -8,9 +8,6 @@ package org.elasticsearch.xpack.core.security.action.apikey; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Assertions; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; @@ -37,20 +34,6 @@ public CreateCrossClusterApiKeyRequest( this.metadata = metadata; } - public CreateCrossClusterApiKeyRequest(StreamInput in) throws IOException { - super(in); - this.name = in.readString(); - this.expiration = in.readOptionalTimeValue(); - this.roleDescriptors = in.readCollectionAsImmutableList(RoleDescriptor::new); - this.refreshPolicy = WriteRequest.RefreshPolicy.readFrom(in); - this.metadata = in.readGenericMap(); - } - - @Override - protected String doReadId(StreamInput in) throws IOException { - return in.readString(); - } - @Override public ApiKey.Type getType() { return ApiKey.Type.CROSS_CLUSTER; @@ -67,17 +50,6 @@ public ActionRequestValidationException validate() { return super.validate(); } - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeString(id); - out.writeString(name); - out.writeOptionalTimeValue(expiration); - out.writeCollection(roleDescriptors); - refreshPolicy.writeTo(out); - out.writeGenericMap(metadata); - } - @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/GrantApiKeyRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/GrantApiKeyRequest.java index 16a95e349cda8..17d5424b630eb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/GrantApiKeyRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/GrantApiKeyRequest.java @@ -9,11 +9,8 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.core.security.action.GrantRequest; -import java.io.IOException; import java.util.Objects; /** @@ -30,17 +27,6 @@ public GrantApiKeyRequest() { this.apiKey = new CreateApiKeyRequest(); } - public GrantApiKeyRequest(StreamInput in) throws IOException { - super(in); - this.apiKey = new CreateApiKeyRequest(in); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - apiKey.writeTo(out); - } - public WriteRequest.RefreshPolicy getRefreshPolicy() { return apiKey.getRefreshPolicy(); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyRequest.java index 9b1e9194d59fd..ffbc5a836633c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyRequest.java @@ -7,12 +7,10 @@ package org.elasticsearch.xpack.core.security.action.apikey; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; -import java.io.IOException; import java.util.List; import java.util.Map; @@ -30,10 +28,6 @@ public UpdateApiKeyRequest( super(roleDescriptors, metadata, expiration, id); } - public UpdateApiKeyRequest(StreamInput in) throws IOException { - super(in); - } - @Override public ApiKey.Type getType() { return ApiKey.Type.REST; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateCrossClusterApiKeyRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateCrossClusterApiKeyRequest.java index 184ce2c521ce0..04102e571e193 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateCrossClusterApiKeyRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateCrossClusterApiKeyRequest.java @@ -8,11 +8,9 @@ package org.elasticsearch.xpack.core.security.action.apikey; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; -import java.io.IOException; import java.util.List; import java.util.Map; @@ -29,10 +27,6 @@ public UpdateCrossClusterApiKeyRequest( super(roleDescriptorBuilder == null ? null : List.of(roleDescriptorBuilder.build()), metadata, expiration, id); } - public UpdateCrossClusterApiKeyRequest(StreamInput in) throws IOException { - super(in); - } - @Override public ApiKey.Type getType() { return ApiKey.Type.CROSS_CLUSTER; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/ActivateProfileRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/ActivateProfileRequest.java index f572c57232b2e..72005bf319c49 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/ActivateProfileRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/ActivateProfileRequest.java @@ -8,27 +8,14 @@ package org.elasticsearch.xpack.core.security.action.profile; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.core.security.action.GrantRequest; -import java.io.IOException; - public class ActivateProfileRequest extends GrantRequest { public ActivateProfileRequest() { super(); } - public ActivateProfileRequest(StreamInput in) throws IOException { - super(in); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - } - @Override public ActionRequestValidationException validate() { return super.validate(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/BulkUpdateApiKeyRequestSerializationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/BulkUpdateApiKeyRequestSerializationTests.java deleted file mode 100644 index 0221554963892..0000000000000 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/BulkUpdateApiKeyRequestSerializationTests.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.core.security.action.apikey; - -import org.elasticsearch.TransportVersions; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -import static org.hamcrest.Matchers.nullValue; - -public class BulkUpdateApiKeyRequestSerializationTests extends AbstractWireSerializingTestCase { - public void testSerializationBackwardsCompatibility() throws IOException { - BulkUpdateApiKeyRequest testInstance = createTestInstance(); - BulkUpdateApiKeyRequest deserializedInstance = copyInstance(testInstance, TransportVersions.V_8_11_X); - try { - // Transport is on a version before expiration was introduced, so should always be null - assertThat(deserializedInstance.getExpiration(), nullValue()); - } finally { - dispose(deserializedInstance); - } - } - - @Override - protected BulkUpdateApiKeyRequest createTestInstance() { - final boolean roleDescriptorsPresent = randomBoolean(); - final List descriptorList; - if (roleDescriptorsPresent == false) { - descriptorList = null; - } else { - final int numDescriptors = randomIntBetween(0, 4); - descriptorList = new ArrayList<>(); - for (int i = 0; i < numDescriptors; i++) { - descriptorList.add(new RoleDescriptor("role_" + i, new String[] { "all" }, null, null)); - } - } - - final var ids = randomList(randomInt(5), () -> randomAlphaOfLength(10)); - final var metadata = ApiKeyTests.randomMetadata(); - final TimeValue expiration = ApiKeyTests.randomFutureExpirationTime(); - return new BulkUpdateApiKeyRequest(ids, descriptorList, metadata, expiration); - } - - @Override - protected Writeable.Reader instanceReader() { - return BulkUpdateApiKeyRequest::new; - } - - @Override - protected BulkUpdateApiKeyRequest mutateInstance(BulkUpdateApiKeyRequest instance) throws IOException { - Map metadata = ApiKeyTests.randomMetadata(); - long days = randomValueOtherThan(instance.getExpiration().days(), () -> ApiKeyTests.randomFutureExpirationTime().getDays()); - return new BulkUpdateApiKeyRequest( - instance.getIds(), - instance.getRoleDescriptors(), - metadata, - TimeValue.parseTimeValue(days + "d", null, "expiration") - ); - } -} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/CreateApiKeyRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/CreateApiKeyRequestTests.java index 63dd636a31c3f..eee2e6e7da338 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/CreateApiKeyRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/CreateApiKeyRequestTests.java @@ -7,17 +7,10 @@ package org.elasticsearch.xpack.core.security.action.apikey; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; -import java.io.IOException; -import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -129,58 +122,4 @@ public void testRoleDescriptorValidation() { assertThat(ve1.validationErrors().get(5 + i), containsStringIgnoringCase("unknown workflow [" + unknownWorkflows[i] + "]")); } } - - public void testSerialization() throws IOException { - final String name = randomAlphaOfLengthBetween(1, 256); - final TimeValue expiration = randomBoolean() - ? null - : TimeValue.parseTimeValue(randomTimeValue(), "test serialization of create api key"); - final WriteRequest.RefreshPolicy refreshPolicy = randomFrom(WriteRequest.RefreshPolicy.values()); - boolean nullOrEmptyRoleDescriptors = randomBoolean(); - final List descriptorList; - if (nullOrEmptyRoleDescriptors) { - descriptorList = randomBoolean() ? null : List.of(); - } else { - final int numDescriptors = randomIntBetween(1, 4); - descriptorList = new ArrayList<>(); - for (int i = 0; i < numDescriptors; i++) { - descriptorList.add(new RoleDescriptor("role_" + i, new String[] { "all" }, null, null)); - } - } - - final CreateApiKeyRequest request = new CreateApiKeyRequest(); - request.setName(name); - request.setExpiration(expiration); - - if (refreshPolicy != request.getRefreshPolicy() || randomBoolean()) { - request.setRefreshPolicy(refreshPolicy); - } - request.setRoleDescriptors(descriptorList); - - boolean testV710Bwc = randomBoolean(); - - try (BytesStreamOutput out = new BytesStreamOutput()) { - if (testV710Bwc) { - out.setTransportVersion(TransportVersions.V_7_9_0); // a version before 7.10 - } - request.writeTo(out); - try (StreamInput in = out.bytes().streamInput()) { - if (testV710Bwc) { - in.setTransportVersion(TransportVersions.V_7_9_0); - } - final CreateApiKeyRequest serialized = new CreateApiKeyRequest(in); - assertEquals(name, serialized.getName()); - if (false == testV710Bwc) { - assertEquals(request.getId(), serialized.getId()); // API key id is only preserved after v 7.10 - } - assertEquals(expiration, serialized.getExpiration()); - assertEquals(refreshPolicy, serialized.getRefreshPolicy()); - if (nullOrEmptyRoleDescriptors) { - assertThat(serialized.getRoleDescriptors().isEmpty(), is(true)); - } else { - assertEquals(descriptorList, serialized.getRoleDescriptors()); - } - } - } - } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/CreateCrossClusterApiKeyRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/CreateCrossClusterApiKeyRequestTests.java deleted file mode 100644 index a0a9c9b31b430..0000000000000 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/CreateCrossClusterApiKeyRequestTests.java +++ /dev/null @@ -1,137 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.core.security.action.apikey; - -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.core.Tuple; -import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.junit.Before; - -import java.io.IOException; -import java.util.List; -import java.util.Map; - -import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; -import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.NONE; -import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.WAIT_UNTIL; - -public class CreateCrossClusterApiKeyRequestTests extends AbstractWireSerializingTestCase { - - private String access; - private CrossClusterApiKeyRoleDescriptorBuilder roleDescriptorBuilder; - - @Before - public void init() throws IOException { - access = randomCrossClusterApiKeyAccessField(); - roleDescriptorBuilder = CrossClusterApiKeyRoleDescriptorBuilder.parse(access); - } - - @Override - protected Writeable.Reader instanceReader() { - return CreateCrossClusterApiKeyRequest::new; - } - - @Override - protected CreateCrossClusterApiKeyRequest createTestInstance() { - CreateCrossClusterApiKeyRequest request = new CreateCrossClusterApiKeyRequest( - randomAlphaOfLengthBetween(3, 8), - roleDescriptorBuilder, - randomExpiration(), - randomMetadata() - ); - request.setRefreshPolicy(randomFrom(IMMEDIATE, WAIT_UNTIL, NONE)); - return request; - } - - @Override - protected CreateCrossClusterApiKeyRequest mutateInstance(CreateCrossClusterApiKeyRequest instance) throws IOException { - switch (randomIntBetween(1, 4)) { - case 1 -> { - return new CreateCrossClusterApiKeyRequest( - randomValueOtherThan(instance.getName(), () -> randomAlphaOfLengthBetween(3, 8)), - roleDescriptorBuilder, - instance.getExpiration(), - instance.getMetadata() - ); - } - case 2 -> { - return new CreateCrossClusterApiKeyRequest( - instance.getName(), - CrossClusterApiKeyRoleDescriptorBuilder.parse( - randomValueOtherThan(access, CreateCrossClusterApiKeyRequestTests::randomCrossClusterApiKeyAccessField) - ), - instance.getExpiration(), - instance.getMetadata() - ); - } - case 3 -> { - return new CreateCrossClusterApiKeyRequest( - instance.getName(), - roleDescriptorBuilder, - randomValueOtherThan(instance.getExpiration(), CreateCrossClusterApiKeyRequestTests::randomExpiration), - instance.getMetadata() - ); - } - default -> { - return new CreateCrossClusterApiKeyRequest( - instance.getName(), - roleDescriptorBuilder, - instance.getExpiration(), - randomValueOtherThan(instance.getMetadata(), CreateCrossClusterApiKeyRequestTests::randomMetadata) - ); - } - } - } - - private static TimeValue randomExpiration() { - return randomFrom(TimeValue.timeValueHours(randomIntBetween(1, 999)), null); - } - - private static Map randomMetadata() { - return randomFrom( - randomMap( - 0, - 3, - () -> new Tuple<>( - randomAlphaOfLengthBetween(3, 8), - randomFrom(randomAlphaOfLengthBetween(3, 8), randomInt(), randomBoolean()) - ) - ), - null - ); - } - - private static final List ACCESS_CANDIDATES = List.of(""" - { - "search": [ {"names": ["logs"]} ] - }""", """ - { - "search": [ {"names": ["logs"], "query": "abc" } ] - }""", """ - { - "search": [ {"names": ["logs"], "field_security": {"grant": ["*"], "except": ["private"]} } ] - }""", """ - { - "search": [ {"names": ["logs"], "query": "abc", "field_security": {"grant": ["*"], "except": ["private"]} } ] - }""", """ - { - "replication": [ {"names": ["archive"], "allow_restricted_indices": true } ] - }""", """ - { - "replication": [ {"names": ["archive"]} ] - }""", """ - { - "search": [ {"names": ["logs"]} ], - "replication": [ {"names": ["archive"]} ] - }"""); - - public static String randomCrossClusterApiKeyAccessField() { - return randomFrom(ACCESS_CANDIDATES); - } -} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyRequestSerializationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyRequestSerializationTests.java deleted file mode 100644 index 83d74b7e9d413..0000000000000 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyRequestSerializationTests.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.core.security.action.apikey; - -import org.elasticsearch.TransportVersions; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -import static org.hamcrest.Matchers.nullValue; - -public class UpdateApiKeyRequestSerializationTests extends AbstractWireSerializingTestCase { - public void testSerializationBackwardsCompatibility() throws IOException { - UpdateApiKeyRequest testInstance = createTestInstance(); - UpdateApiKeyRequest deserializedInstance = copyInstance(testInstance, TransportVersions.V_8_11_X); - try { - // Transport is on a version before expiration was introduced, so should always be null - assertThat(deserializedInstance.getExpiration(), nullValue()); - } finally { - dispose(deserializedInstance); - } - } - - @Override - protected UpdateApiKeyRequest createTestInstance() { - final boolean roleDescriptorsPresent = randomBoolean(); - final List descriptorList; - if (roleDescriptorsPresent == false) { - descriptorList = null; - } else { - final int numDescriptors = randomIntBetween(0, 4); - descriptorList = new ArrayList<>(); - for (int i = 0; i < numDescriptors; i++) { - descriptorList.add(new RoleDescriptor("role_" + i, new String[] { "all" }, null, null)); - } - } - - final var id = randomAlphaOfLength(10); - final var metadata = ApiKeyTests.randomMetadata(); - final TimeValue expiration = ApiKeyTests.randomFutureExpirationTime(); - return new UpdateApiKeyRequest(id, descriptorList, metadata, expiration); - } - - @Override - protected Writeable.Reader instanceReader() { - return UpdateApiKeyRequest::new; - } - - @Override - protected UpdateApiKeyRequest mutateInstance(UpdateApiKeyRequest instance) throws IOException { - Map metadata = ApiKeyTests.randomMetadata(); - long days = randomValueOtherThan(instance.getExpiration().days(), () -> ApiKeyTests.randomFutureExpirationTime().getDays()); - return new UpdateApiKeyRequest( - instance.getId(), - instance.getRoleDescriptors(), - metadata, - TimeValue.parseTimeValue(days + "d", null, "expiration") - ); - } - -} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateCrossClusterApiKeyRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateCrossClusterApiKeyRequestTests.java index f9faa2731dcc0..f7a0d1a6d35bf 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateCrossClusterApiKeyRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateCrossClusterApiKeyRequestTests.java @@ -8,49 +8,15 @@ package org.elasticsearch.xpack.core.security.action.apikey; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.test.ESTestCase; -import java.io.IOException; import java.util.Map; -import static org.elasticsearch.xpack.core.security.action.apikey.CreateCrossClusterApiKeyRequestTests.randomCrossClusterApiKeyAccessField; import static org.hamcrest.Matchers.contains; -import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.nullValue; public class UpdateCrossClusterApiKeyRequestTests extends ESTestCase { - public void testSerialization() throws IOException { - final var metadata = ApiKeyTests.randomMetadata(); - final TimeValue expiration = ApiKeyTests.randomFutureExpirationTime(); - final CrossClusterApiKeyRoleDescriptorBuilder roleDescriptorBuilder; - if (randomBoolean()) { - roleDescriptorBuilder = CrossClusterApiKeyRoleDescriptorBuilder.parse(randomCrossClusterApiKeyAccessField()); - } else { - roleDescriptorBuilder = null; - } - - final var request = new UpdateCrossClusterApiKeyRequest(randomAlphaOfLength(10), roleDescriptorBuilder, metadata, expiration); - assertThat(request.getType(), is(ApiKey.Type.CROSS_CLUSTER)); - assertThat(request.validate(), nullValue()); - - try (BytesStreamOutput out = new BytesStreamOutput()) { - request.writeTo(out); - try (StreamInput in = out.bytes().streamInput()) { - final var serialized = new UpdateCrossClusterApiKeyRequest(in); - assertEquals(request.getId(), serialized.getId()); - assertEquals(request.getRoleDescriptors(), serialized.getRoleDescriptors()); - assertEquals(metadata, serialized.getMetadata()); - assertEquals(expiration, serialized.getExpiration()); - assertEquals(request.getType(), serialized.getType()); - } - } - } - public void testNotEmptyUpdateValidation() { final var request = new UpdateCrossClusterApiKeyRequest(randomAlphaOfLength(10), null, null, null); final ActionRequestValidationException ve = request.validate(); diff --git a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java index 08bca3ffdaeea..9c22a6bb4d210 100644 --- a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java +++ b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java @@ -45,8 +45,8 @@ import static org.elasticsearch.test.SecuritySettingsSourceField.ES_TEST_ROOT_ROLE; import static org.elasticsearch.test.SecuritySettingsSourceField.ES_TEST_ROOT_ROLE_DESCRIPTOR; -import static org.elasticsearch.xpack.core.security.action.apikey.CreateCrossClusterApiKeyRequestTests.randomCrossClusterApiKeyAccessField; import static org.elasticsearch.xpack.core.security.authc.AuthenticationServiceField.RUN_AS_USER_HEADER; +import static org.elasticsearch.xpack.security.authc.ApiKeyServiceTests.randomCrossClusterApiKeyAccessField; import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/TransportGrantAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/TransportGrantAction.java index 881d1340ebc3f..667b513555594 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/TransportGrantAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/TransportGrantAction.java @@ -11,9 +11,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; @@ -27,7 +25,7 @@ import org.elasticsearch.xpack.security.authc.AuthenticationService; import org.elasticsearch.xpack.security.authz.AuthorizationService; -public abstract class TransportGrantAction extends HandledTransportAction< +public abstract class TransportGrantAction extends TransportAction< Request, Response> { @@ -39,12 +37,11 @@ public TransportGrantAction( String actionName, TransportService transportService, ActionFilters actionFilters, - Writeable.Reader requestReader, AuthenticationService authenticationService, AuthorizationService authorizationService, ThreadContext threadContext ) { - super(actionName, transportService, actionFilters, requestReader, EsExecutors.DIRECT_EXECUTOR_SERVICE); + super(actionName, actionFilters, transportService.getTaskManager()); this.authenticationService = authenticationService; this.authorizationService = authorizationService; this.threadContext = threadContext; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportBaseUpdateApiKeyAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportBaseUpdateApiKeyAction.java index 9d367bf5caf24..33b1e44004454 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportBaseUpdateApiKeyAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportBaseUpdateApiKeyAction.java @@ -10,9 +10,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.SecurityContext; @@ -24,7 +22,7 @@ import java.util.Map; public abstract class TransportBaseUpdateApiKeyAction extends - HandledTransportAction { + TransportAction { private final SecurityContext securityContext; @@ -32,10 +30,9 @@ protected TransportBaseUpdateApiKeyAction( final String actionName, final TransportService transportService, final ActionFilters actionFilters, - final Writeable.Reader requestReader, final SecurityContext context ) { - super(actionName, transportService, actionFilters, requestReader, EsExecutors.DIRECT_EXECUTOR_SERVICE); + super(actionName, actionFilters, transportService.getTaskManager()); this.securityContext = context; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportBulkUpdateApiKeyAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportBulkUpdateApiKeyAction.java index cb8f6c861ecf7..3b978c3e44b4c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportBulkUpdateApiKeyAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportBulkUpdateApiKeyAction.java @@ -38,7 +38,7 @@ public TransportBulkUpdateApiKeyAction( final CompositeRolesStore rolesStore, final NamedXContentRegistry xContentRegistry ) { - super(BulkUpdateApiKeyAction.NAME, transportService, actionFilters, BulkUpdateApiKeyRequest::new, context); + super(BulkUpdateApiKeyAction.NAME, transportService, actionFilters, context); this.apiKeyService = apiKeyService; this.resolver = new ApiKeyUserRoleDescriptorResolver(rolesStore, xContentRegistry); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportCreateApiKeyAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportCreateApiKeyAction.java index 568e0fe5eb075..268afc7f0b32f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportCreateApiKeyAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportCreateApiKeyAction.java @@ -9,9 +9,8 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.NamedXContentRegistry; @@ -28,7 +27,7 @@ /** * Implementation of the action needed to create an API key */ -public final class TransportCreateApiKeyAction extends HandledTransportAction { +public final class TransportCreateApiKeyAction extends TransportAction { private final ApiKeyService apiKeyService; private final ApiKeyUserRoleDescriptorResolver resolver; @@ -43,7 +42,7 @@ public TransportCreateApiKeyAction( CompositeRolesStore rolesStore, NamedXContentRegistry xContentRegistry ) { - super(CreateApiKeyAction.NAME, transportService, actionFilters, CreateApiKeyRequest::new, EsExecutors.DIRECT_EXECUTOR_SERVICE); + super(CreateApiKeyAction.NAME, actionFilters, transportService.getTaskManager()); this.apiKeyService = apiKeyService; this.resolver = new ApiKeyUserRoleDescriptorResolver(rolesStore, xContentRegistry); this.securityContext = context; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportCreateCrossClusterApiKeyAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportCreateCrossClusterApiKeyAction.java index 267a3aafe0c72..eeccd4b833a23 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportCreateCrossClusterApiKeyAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportCreateCrossClusterApiKeyAction.java @@ -9,9 +9,8 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.SecurityContext; @@ -26,9 +25,7 @@ /** * Implementation of the action needed to create an API key */ -public final class TransportCreateCrossClusterApiKeyAction extends HandledTransportAction< - CreateCrossClusterApiKeyRequest, - CreateApiKeyResponse> { +public final class TransportCreateCrossClusterApiKeyAction extends TransportAction { private final ApiKeyService apiKeyService; private final SecurityContext securityContext; @@ -40,13 +37,7 @@ public TransportCreateCrossClusterApiKeyAction( ApiKeyService apiKeyService, SecurityContext context ) { - super( - CreateCrossClusterApiKeyAction.NAME, - transportService, - actionFilters, - CreateCrossClusterApiKeyRequest::new, - EsExecutors.DIRECT_EXECUTOR_SERVICE - ); + super(CreateCrossClusterApiKeyAction.NAME, actionFilters, transportService.getTaskManager()); this.apiKeyService = apiKeyService; this.securityContext = context; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportGrantApiKeyAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportGrantApiKeyAction.java index a6401053634b2..54e073906b815 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportGrantApiKeyAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportGrantApiKeyAction.java @@ -65,15 +65,7 @@ public TransportGrantApiKeyAction( ApiKeyService apiKeyService, ApiKeyUserRoleDescriptorResolver resolver ) { - super( - GrantApiKeyAction.NAME, - transportService, - actionFilters, - GrantApiKeyRequest::new, - authenticationService, - authorizationService, - threadContext - ); + super(GrantApiKeyAction.NAME, transportService, actionFilters, authenticationService, authorizationService, threadContext); this.apiKeyService = apiKeyService; this.resolver = resolver; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportUpdateApiKeyAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportUpdateApiKeyAction.java index 2427b571cf575..b6e0854d6c443 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportUpdateApiKeyAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportUpdateApiKeyAction.java @@ -37,7 +37,7 @@ public TransportUpdateApiKeyAction( final CompositeRolesStore rolesStore, final NamedXContentRegistry xContentRegistry ) { - super(UpdateApiKeyAction.NAME, transportService, actionFilters, UpdateApiKeyRequest::new, context); + super(UpdateApiKeyAction.NAME, transportService, actionFilters, context); this.apiKeyService = apiKeyService; this.resolver = new ApiKeyUserRoleDescriptorResolver(rolesStore, xContentRegistry); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportUpdateCrossClusterApiKeyAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportUpdateCrossClusterApiKeyAction.java index a47bbb0301ebc..f4578bf7a737c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportUpdateCrossClusterApiKeyAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportUpdateCrossClusterApiKeyAction.java @@ -37,7 +37,7 @@ public TransportUpdateCrossClusterApiKeyAction( final ApiKeyService apiKeyService, final SecurityContext context ) { - super(UpdateCrossClusterApiKeyAction.NAME, transportService, actionFilters, UpdateCrossClusterApiKeyRequest::new, context); + super(UpdateCrossClusterApiKeyAction.NAME, transportService, actionFilters, context); this.apiKeyService = apiKeyService; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/profile/TransportActivateProfileAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/profile/TransportActivateProfileAction.java index d7241011d9c09..4d76205d29021 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/profile/TransportActivateProfileAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/profile/TransportActivateProfileAction.java @@ -39,7 +39,6 @@ public TransportActivateProfileAction( ActivateProfileAction.NAME, transportService, actionFilters, - ActivateProfileRequest::new, authenticationService, authorizationService, threadPool.getThreadContext() diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/apikey/TransportCreateCrossClusterApiKeyActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/apikey/TransportCreateCrossClusterApiKeyActionTests.java index f94acab50b6b5..9c1419f67bcf0 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/apikey/TransportCreateCrossClusterApiKeyActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/apikey/TransportCreateCrossClusterApiKeyActionTests.java @@ -23,7 +23,7 @@ import java.io.IOException; import java.util.Set; -import static org.elasticsearch.xpack.core.security.action.apikey.CreateCrossClusterApiKeyRequestTests.randomCrossClusterApiKeyAccessField; +import static org.elasticsearch.xpack.security.authc.ApiKeyServiceTests.randomCrossClusterApiKeyAccessField; import static org.hamcrest.Matchers.containsString; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.ArgumentMatchers.same; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/apikey/TransportUpdateCrossClusterApiKeyActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/apikey/TransportUpdateCrossClusterApiKeyActionTests.java index 70190b70f3f1a..1525b9157a610 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/apikey/TransportUpdateCrossClusterApiKeyActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/apikey/TransportUpdateCrossClusterApiKeyActionTests.java @@ -33,7 +33,7 @@ import java.util.Set; import static org.elasticsearch.test.ActionListenerUtils.anyActionListener; -import static org.elasticsearch.xpack.core.security.action.apikey.CreateCrossClusterApiKeyRequestTests.randomCrossClusterApiKeyAccessField; +import static org.elasticsearch.xpack.security.authc.ApiKeyServiceTests.randomCrossClusterApiKeyAccessField; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java index d2ca214723416..daa2b9cf149de 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java @@ -158,7 +158,6 @@ import static org.elasticsearch.test.SecurityIntegTestCase.getFastStoredHashAlgoForTests; import static org.elasticsearch.test.TestMatchers.throwableWithMessage; import static org.elasticsearch.transport.RemoteClusterPortSettings.TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY; -import static org.elasticsearch.xpack.core.security.action.apikey.CreateCrossClusterApiKeyRequestTests.randomCrossClusterApiKeyAccessField; import static org.elasticsearch.xpack.core.security.authc.AuthenticationField.API_KEY_ID_KEY; import static org.elasticsearch.xpack.core.security.authc.AuthenticationField.API_KEY_METADATA_KEY; import static org.elasticsearch.xpack.core.security.authc.AuthenticationField.API_KEY_TYPE_KEY; @@ -200,6 +199,29 @@ public class ApiKeyServiceTests extends ESTestCase { + private static final List ACCESS_CANDIDATES = List.of(""" + { + "search": [ {"names": ["logs"]} ] + }""", """ + { + "search": [ {"names": ["logs"], "query": "abc" } ] + }""", """ + { + "search": [ {"names": ["logs"], "field_security": {"grant": ["*"], "except": ["private"]} } ] + }""", """ + { + "search": [ {"names": ["logs"], "query": "abc", "field_security": {"grant": ["*"], "except": ["private"]} } ] + }""", """ + { + "replication": [ {"names": ["archive"], "allow_restricted_indices": true } ] + }""", """ + { + "replication": [ {"names": ["archive"]} ] + }""", """ + { + "search": [ {"names": ["logs"]} ], + "replication": [ {"names": ["archive"]} ] + }"""); private ThreadPool threadPool; private Client client; private SecurityIndexManager securityIndex; @@ -2845,6 +2867,10 @@ private static RoleDescriptor randomRoleDescriptorWithWorkflowsRestriction() { ); } + public static String randomCrossClusterApiKeyAccessField() { + return randomFrom(ACCESS_CANDIDATES); + } + public static class Utils { private static final AuthenticationContextSerializer authenticationContextSerializer = new AuthenticationContextSerializer(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestUpdateCrossClusterApiKeyActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestUpdateCrossClusterApiKeyActionTests.java index f9fa9269c4ef1..ddeffc0675498 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestUpdateCrossClusterApiKeyActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestUpdateCrossClusterApiKeyActionTests.java @@ -30,7 +30,7 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.xpack.core.security.action.apikey.CreateCrossClusterApiKeyRequestTests.randomCrossClusterApiKeyAccessField; +import static org.elasticsearch.xpack.security.authc.ApiKeyServiceTests.randomCrossClusterApiKeyAccessField; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; From 62729c9480a9336b6937d0e9581fde6430a5e745 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Wed, 10 Apr 2024 11:52:02 +0200 Subject: [PATCH 214/264] Port DocsTest gradle plugin to java (#107124) * Refactor DocsTest plugin to java * Rework asciidoc parsing to make adding more parser simple --- .../doc/DocsTestPluginFuncTest.groovy | 132 +++ .../gradle/internal/doc/DocsTestPlugin.groovy | 99 --- .../doc/RestTestsFromSnippetsTask.groovy | 503 ----------- .../gradle/internal/doc/SnippetsTask.groovy | 438 --------- .../internal/doc/AsciidocSnippetParser.java | 306 +++++++ .../gradle/internal/doc/DocSnippetTask.java | 88 ++ .../gradle/internal/doc/DocsTestPlugin.java | 106 +++ .../gradle/internal/doc/ParsingUtils.java | 57 ++ .../doc/RestTestsFromDocSnippetTask.java | 526 +++++++++++ .../gradle/internal/doc/Snippet.java | 188 ++++ .../gradle/internal/doc/SnippetParser.java | 17 + .../gradle/internal/doc/Source.java | 21 + .../internal/doc/AsciidocParserSpec.groovy | 184 ++++ .../internal/doc/DocSnippetTaskSpec.groovy | 676 ++++++++++++++ .../RestTestsFromDocSnippetTaskSpec.groovy | 839 ++++++++++++++++++ .../doc/RestTestFromSnippetsTaskTests.java | 57 -- .../internal/doc/SnippetsTaskTests.java | 63 -- 17 files changed, 3140 insertions(+), 1160 deletions(-) create mode 100644 build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/doc/DocsTestPluginFuncTest.groovy delete mode 100644 build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/doc/DocsTestPlugin.groovy delete mode 100644 build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/doc/RestTestsFromSnippetsTask.groovy delete mode 100644 build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/doc/SnippetsTask.groovy create mode 100644 build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/AsciidocSnippetParser.java create mode 100644 build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/DocSnippetTask.java create mode 100644 build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/DocsTestPlugin.java create mode 100644 build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/ParsingUtils.java create mode 100644 build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/RestTestsFromDocSnippetTask.java create mode 100644 build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/Snippet.java create mode 100644 build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/SnippetParser.java create mode 100644 build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/Source.java create mode 100644 build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/AsciidocParserSpec.groovy create mode 100644 build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/DocSnippetTaskSpec.groovy create mode 100644 build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/RestTestsFromDocSnippetTaskSpec.groovy delete mode 100644 build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/doc/RestTestFromSnippetsTaskTests.java delete mode 100644 build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/doc/SnippetsTaskTests.java diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/doc/DocsTestPluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/doc/DocsTestPluginFuncTest.groovy new file mode 100644 index 0000000000000..4c542d371c32c --- /dev/null +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/doc/DocsTestPluginFuncTest.groovy @@ -0,0 +1,132 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc + +import org.elasticsearch.gradle.fixtures.AbstractGradleInternalPluginFuncTest +import org.elasticsearch.gradle.internal.conventions.precommit.PrecommitPlugin +import org.gradle.testkit.runner.TaskOutcome + +class DocsTestPluginFuncTest extends AbstractGradleInternalPluginFuncTest { + Class pluginClassUnderTest = DocsTestPlugin.class + + def setup() { + File docDir = new File(projectDir, 'doc'); + docDir.mkdirs() + addSampleDoc(docDir) + buildApiRestrictionsDisabled = true + configurationCacheCompatible = false; + buildFile << """ +tasks.named('listSnippets') { + docs = fileTree('doc') +} + +tasks.named('listConsoleCandidates') { + docs = fileTree('doc') +} +""" + } + + def "can list snippets"() { + when: + def result = gradleRunner("listSnippets").build() + then: + result.task(":listSnippets").outcome == TaskOutcome.SUCCESS + assertOutputContains(result.output, """ +> Task :listSnippets +mapper-annotated-text.asciidoc[37:39](Painless) +mapper-annotated-text.asciidoc[42:44](js) +mapper-annotated-text.asciidoc[51:69](console)// TEST[setup:seats] +""") + } + + def "can console candidates"() { + when: + def result = gradleRunner("listConsoleCandidates").build() + then: + result.task(":listConsoleCandidates").outcome == TaskOutcome.SUCCESS + assertOutputContains(result.output, """ +> Task :listConsoleCandidates +mapper-annotated-text.asciidoc[42:44](js) +""") + } + + void addSampleDoc(File docFolder) { + new File(docFolder, "mapper-annotated-text.asciidoc").text = """ +[[painless-filter-context]] +=== Filter context + +Use a Painless script as a {ref}/query-dsl-script-query.html[filter] in a +query to include and exclude documents. + + +*Variables* + +`params` (`Map`, read-only):: + User-defined parameters passed in as part of the query. + +`doc` (`Map`, read-only):: + Contains the fields of the current document where each field is a + `List` of values. + +*Return* + +`boolean`:: + Return `true` if the current document should be returned as a result of + the query, and `false` otherwise. + + +*API* + +The standard <> is available. + +*Example* + +To run this example, first follow the steps in +<>. + +This script finds all unsold documents that cost less than \$25. + +[source,Painless] +---- +doc['sold'].value == false && doc['cost'].value < 25 +---- + +[source,js] +---- +curl 'hello world' +---- + +Defining `cost` as a script parameter enables the cost to be configured +in the script query request. For example, the following request finds +all available theatre seats for evening performances that are under \$25. + +[source,console] +---- +GET seats/_search +{ + "query": { + "bool": { + "filter": { + "script": { + "script": { + "source": "doc['sold'].value == false && doc['cost'].value < params.cost", + "params": { + "cost": 25 + } + } + } + } + } + } +} +---- +// TEST[setup:seats] +""" + } +} diff --git a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/doc/DocsTestPlugin.groovy b/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/doc/DocsTestPlugin.groovy deleted file mode 100644 index 38b4cb499eeb9..0000000000000 --- a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/doc/DocsTestPlugin.groovy +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.gradle.internal.doc - -import org.elasticsearch.gradle.OS -import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.VersionProperties -import org.elasticsearch.gradle.internal.test.rest.CopyRestApiTask -import org.elasticsearch.gradle.internal.test.rest.CopyRestTestsTask -import org.gradle.api.Action -import org.gradle.api.Plugin -import org.gradle.api.Project -import org.gradle.api.file.Directory -import org.gradle.api.file.ProjectLayout -import org.gradle.api.internal.file.FileOperations -import org.gradle.api.provider.Provider -import org.gradle.api.tasks.TaskProvider - -import javax.inject.Inject - -/** - * Sets up tests for documentation. - */ -class DocsTestPlugin implements Plugin { - - private FileOperations fileOperations - private ProjectLayout projectLayout - - @Inject - DocsTestPlugin(FileOperations fileOperations, ProjectLayout projectLayout) { - this.projectLayout = projectLayout - this.fileOperations = fileOperations - } - - @Override - void apply(Project project) { - project.pluginManager.apply('elasticsearch.legacy-yaml-rest-test') - - String distribution = System.getProperty('tests.distribution', 'default') - // The distribution can be configured with -Dtests.distribution on the command line - project.testClusters.matching { it.name.equals("yamlRestTest") }.configureEach { testDistribution = distribution.toUpperCase() } - project.testClusters.matching { it.name.equals("yamlRestTest") }.configureEach { nameCustomization = { it.replace("yamlRestTest", "node") } } - // Docs are published separately so no need to assemble - project.tasks.named("assemble").configure {enabled = false } - Map commonDefaultSubstitutions = [ - /* These match up with the asciidoc syntax for substitutions but - * the values may differ. In particular {version} needs to resolve - * to the version being built for testing but needs to resolve to - * the last released version for docs. */ - '\\{version\\}': Version.fromString(VersionProperties.elasticsearch).toString(), - '\\{version_qualified\\}': VersionProperties.elasticsearch, - '\\{lucene_version\\}' : VersionProperties.lucene.replaceAll('-snapshot-\\w+$', ''), - '\\{build_flavor\\}' : distribution, - '\\{build_type\\}' : OS.conditionalString().onWindows({"zip"}).onUnix({"tar"}).supply(), - ] - project.tasks.register('listSnippets', SnippetsTask) { - group 'Docs' - description 'List each snippet' - defaultSubstitutions = commonDefaultSubstitutions - perSnippet = new Action() { - @Override - void execute(SnippetsTask.Snippet snippet) { - println(snippet.toString()) - } - } - } - project.tasks.register('listConsoleCandidates', SnippetsTask) { - group 'Docs' - description - 'List snippets that probably should be marked // CONSOLE' - defaultSubstitutions = commonDefaultSubstitutions - perSnippet = new Action() { - @Override - void execute(SnippetsTask.Snippet snippet) { - if (RestTestsFromSnippetsTask.isConsoleCandidate(it)) { - println(it.toString()) - } - } - } - } - - Provider restRootDir = projectLayout.buildDirectory.dir("rest") - TaskProvider buildRestTests = project.tasks.register('buildRestTests', RestTestsFromSnippetsTask) { - defaultSubstitutions = commonDefaultSubstitutions - testRoot.convention(restRootDir) - doFirst { - getFileOperations().delete(testRoot.get()) - } - } - - // TODO: This effectively makes testRoot not customizable, which we don't do anyway atm - project.sourceSets.yamlRestTest.output.dir(restRootDir, builtBy: buildRestTests) - } -} diff --git a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/doc/RestTestsFromSnippetsTask.groovy b/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/doc/RestTestsFromSnippetsTask.groovy deleted file mode 100644 index 81207181dc9a7..0000000000000 --- a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/doc/RestTestsFromSnippetsTask.groovy +++ /dev/null @@ -1,503 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.gradle.internal.doc - -import groovy.transform.PackageScope -import org.elasticsearch.gradle.internal.doc.SnippetsTask.Snippet -import org.gradle.api.Action -import org.gradle.api.InvalidUserDataException -import org.gradle.api.file.DirectoryProperty -import org.gradle.api.internal.file.FileOperations -import org.gradle.api.tasks.Input -import org.gradle.api.tasks.Internal -import org.gradle.api.tasks.OutputDirectory -import org.gradle.api.model.ObjectFactory - -import javax.inject.Inject; -import java.nio.file.Files -import java.nio.file.Path - -/** - * Generates REST tests for each snippet marked // TEST. - */ -abstract class RestTestsFromSnippetsTask extends SnippetsTask { - /** - * These languages aren't supported by the syntax highlighter so we - * shouldn't use them. - */ - private static final List BAD_LANGUAGES = ['json', 'javascript'] - - /** - * Test setups defined in the build instead of the docs so they can be - * shared between many doc files. - */ - @Input - Map setups = new HashMap() - - /** - * Test teardowns defined in the build instead of the docs so they can be - * shared between many doc files. - */ - @Input - Map teardowns = new HashMap() - - /** - * A list of files that contain snippets that *probably* should be - * converted to `// CONSOLE` but have yet to be converted. If a file is in - * this list and doesn't contain unconverted snippets this task will fail. - * If there are unconverted snippets not in this list then this task will - * fail. All files are paths relative to the docs dir. - */ - @Input - List expectedUnconvertedCandidates = [] - - /** - * Root directory of the tests being generated. To make rest tests happy - * we generate them in a testRoot which is contained in this directory. - */ - private DirectoryProperty testRoot - - @Internal - Set names = new HashSet<>() - - @Inject - abstract FileOperations getFileOperations(); - - @Inject - RestTestsFromSnippetsTask(ObjectFactory objectFactory) { - testRoot = objectFactory.directoryProperty() - TestBuilder builder = new TestBuilder() - perSnippet = new Action() { - @Override - void execute(Snippet snippet) { - builder.handleSnippet(snippet) - } - } - doLast { - builder.checkUnconverted() - builder.finishLastTest() - } - } - - /** - * Root directory containing all the files generated by this task. It is - * contained within testRoot. - */ - File outputRoot() { - return new File(testRoot.get().asFile, '/rest-api-spec/test') - } - - @OutputDirectory - DirectoryProperty getTestRoot() { - return testRoot - } -/** - * Is this snippet a candidate for conversion to `// CONSOLE`? - */ - static isConsoleCandidate(Snippet snippet) { - /* Snippets that are responses or already marked as `// CONSOLE` or - * `// NOTCONSOLE` are not candidates. */ - if (snippet.console != null || snippet.testResponse) { - return false - } - /* js snippets almost always should be marked with `// CONSOLE`. js - * snippets that shouldn't be marked `// CONSOLE`, like examples for - * js client, should always be marked with `// NOTCONSOLE`. - * - * `sh` snippets that contain `curl` almost always should be marked - * with `// CONSOLE`. In the exceptionally rare cases where they are - * not communicating with Elasticsearch, like the examples in the ec2 - * and gce discovery plugins, the snippets should be marked - * `// NOTCONSOLE`. */ - return snippet.language == 'js' || snippet.curl - } - - /** - * Certain requests should not have the shard failure check because the - * format of the response is incompatible i.e. it is not a JSON object. - */ - static shouldAddShardFailureCheck(String path) { - return path.startsWith('_cat') == false && path.startsWith('_ml/datafeeds/') == false - } - - /** - * Converts Kibana's block quoted strings into standard JSON. These - * {@code """} delimited strings can be embedded in CONSOLE and can - * contain newlines and {@code "} without the normal JSON escaping. - * This has to add it. - */ - @PackageScope - static String replaceBlockQuote(String body) { - int start = body.indexOf('"""'); - if (start < 0) { - return body - } - /* - * 1.3 is a fairly wild guess of the extra space needed to hold - * the escaped string. - */ - StringBuilder result = new StringBuilder((int) (body.length() * 1.3)); - int startOfNormal = 0; - while (start >= 0) { - int end = body.indexOf('"""', start + 3); - if (end < 0) { - throw new InvalidUserDataException( - "Invalid block quote starting at $start in:\n$body") - } - result.append(body.substring(startOfNormal, start)); - result.append('"'); - result.append(body.substring(start + 3, end) - .replace('"', '\\"') - .replace("\n", "\\n")); - result.append('"'); - startOfNormal = end + 3; - start = body.indexOf('"""', startOfNormal); - } - result.append(body.substring(startOfNormal)); - return result.toString(); - } - - private class TestBuilder { - private static final String SYNTAX = { - String method = /(?GET|PUT|POST|HEAD|OPTIONS|DELETE)/ - String pathAndQuery = /(?[^\n]+)/ - String badBody = /GET|PUT|POST|HEAD|OPTIONS|DELETE|startyaml|#/ - String body = /(?(?:\n(?!$badBody)[^\n]+)+)/ - String rawRequest = /(?:$method\s+$pathAndQuery$body?)/ - String yamlRequest = /(?:startyaml(?s)(?.+?)(?-s)endyaml)/ - String nonComment = /(?:$rawRequest|$yamlRequest)/ - String comment = /(?#.+)/ - /(?:$comment|$nonComment)\n+/ - }() - - /** - * The file in which we saw the last snippet that made a test. - */ - Path lastDocsPath - - /** - * The file we're building. - */ - PrintWriter current - - /** - * Files containing all snippets that *probably* should be converted - * to `// CONSOLE` but have yet to be converted. All files are paths - * relative to the docs dir. - */ - Set unconvertedCandidates = new HashSet<>() - - /** - * The last non-TESTRESPONSE snippet. - */ - Snippet previousTest - - /** - * Called each time a snippet is encountered. Tracks the snippets and - * calls buildTest to actually build the test. - */ - - void handleSnippet(Snippet snippet) { - if (RestTestsFromSnippetsTask.isConsoleCandidate(snippet)) { - unconvertedCandidates.add(snippet.path.toString() - .replace('\\', '/')) - } - if (BAD_LANGUAGES.contains(snippet.language)) { - throw new InvalidUserDataException( - "$snippet: Use `js` instead of `${snippet.language}`.") - } - if (snippet.testSetup) { - testSetup(snippet) - previousTest = snippet - return - } - if (snippet.testTearDown) { - testTearDown(snippet) - previousTest = snippet - return - } - if (snippet.testResponse || snippet.language == 'console-result') { - if (previousTest == null) { - throw new InvalidUserDataException("$snippet: No paired previous test") - } - if (previousTest.path != snippet.path) { - throw new InvalidUserDataException("$snippet: Result can't be first in file") - } - response(snippet) - return - } - if ((snippet.language == 'js') && (snippet.console)) { - throw new InvalidUserDataException( - "$snippet: Use `[source,console]` instead of `// CONSOLE`.") - } - if (snippet.test || snippet.language == 'console') { - test(snippet) - previousTest = snippet - return - } - // Must be an unmarked snippet.... - } - - private void test(Snippet test) { - setupCurrent(test) - - if (test.continued) { - /* Catch some difficult to debug errors with // TEST[continued] - * and throw a helpful error message. */ - if (previousTest == null || previousTest.path != test.path) { - throw new InvalidUserDataException("// TEST[continued] " + - "cannot be on first snippet in a file: $test") - } - if (previousTest != null && previousTest.testSetup) { - throw new InvalidUserDataException("// TEST[continued] " + - "cannot immediately follow // TESTSETUP: $test") - } - if (previousTest != null && previousTest.testTearDown) { - throw new InvalidUserDataException("// TEST[continued] " + - "cannot immediately follow // TEARDOWN: $test") - } - } else { - current.println('---') - if (test.name != null && test.name.isBlank() == false) { - if(names.add(test.name) == false) { - throw new InvalidUserDataException("Duplicated snippet name '$test.name': $test") - } - current.println("\"$test.name\":") - } else { - current.println("\"line_$test.start\":") - } - /* The Elasticsearch test runner doesn't support quite a few - * constructs unless we output this skip. We don't know if - * we're going to use these constructs, but we might so we - * output the skip just in case. */ - current.println(" - skip:") - current.println(" features: ") - current.println(" - default_shards") - current.println(" - stash_in_key") - current.println(" - stash_in_path") - current.println(" - stash_path_replace") - current.println(" - warnings") - } - if (test.skip) { - if (test.continued) { - throw new InvalidUserDataException("Continued snippets " - + "can't be skipped") - } - current.println(" - always_skip") - current.println(" reason: $test.skip") - } - if (test.setup != null) { - setup(test) - } - - body(test, false) - - if (test.teardown != null) { - teardown(test) - } - } - - private void setup(final Snippet snippet) { - // insert a setup defined outside of the docs - for (final String name : snippet.setup.split(',')) { - final String setup = setups[name] - if (setup == null) { - throw new InvalidUserDataException( - "Couldn't find named setup $name for $snippet" - ) - } - current.println("# Named setup ${name}") - current.println(setup) - } - } - - private void teardown(final Snippet snippet) { - // insert a teardown defined outside of the docs - for (final String name : snippet.teardown.split(',')) { - final String teardown = teardowns[name] - if (teardown == null) { - throw new InvalidUserDataException( - "Couldn't find named teardown $name for $snippet" - ) - } - current.println("# Named teardown ${name}") - current.println(teardown) - } - } - - private void response(Snippet response) { - if (null == response.skip) { - current.println(" - match: ") - current.println(" \$body: ") - replaceBlockQuote(response.contents).eachLine { - current.println(" $it") - } - } - } - - void emitDo(String method, String pathAndQuery, String body, - String catchPart, List warnings, boolean inSetup, boolean skipShardFailures) { - def (String path, String query) = pathAndQuery.tokenize('?') - if (path == null) { - path = '' // Catch requests to the root... - } else { - path = path.replace('<', '%3C').replace('>', '%3E') - } - current.println(" - do:") - if (catchPart != null) { - current.println(" catch: $catchPart") - } - if (false == warnings.isEmpty()) { - current.println(" warnings:") - for (String warning in warnings) { - // Escape " because we're going to quote the warning - String escaped = warning.replaceAll('"', '\\\\"') - /* Quote the warning in case it starts with [ which makes - * it look too much like an array. */ - current.println(" - \"$escaped\"") - } - } - current.println(" raw:") - current.println(" method: $method") - current.println(" path: \"$path\"") - if (query != null) { - for (String param: query.tokenize('&')) { - def (String name, String value) = param.tokenize('=') - if (value == null) { - value = '' - } - current.println(" $name: \"$value\"") - } - } - if (body != null) { - // Throw out the leading newline we get from parsing the body - body = body.substring(1) - // Replace """ quoted strings with valid json ones - body = replaceBlockQuote(body) - current.println(" body: |") - body.eachLine { current.println(" $it") } - } - /* Catch any shard failures. These only cause a non-200 response if - * no shard succeeds. But we need to fail the tests on all of these - * because they mean invalid syntax or broken queries or something - * else that we don't want to teach people to do. The REST test - * framework doesn't allow us to have assertions in the setup - * section so we have to skip it there. We also omit the assertion - * from APIs that don't return a JSON object - */ - if (false == inSetup && skipShardFailures == false && shouldAddShardFailureCheck(path)) { - current.println(" - is_false: _shards.failures") - } - } - - private void testSetup(Snippet snippet) { - if (lastDocsPath == snippet.path) { - throw new InvalidUserDataException("$snippet: wasn't first. TESTSETUP can only be used in the first snippet of a document.") - } - setupCurrent(snippet) - current.println('---') - current.println("setup:") - if (snippet.setup != null) { - setup(snippet) - } - body(snippet, true) - } - - private void testTearDown(Snippet snippet) { - if (previousTest != null && previousTest.testSetup == false && lastDocsPath == snippet.path) { - throw new InvalidUserDataException("$snippet must follow test setup or be first") - } - setupCurrent(snippet) - current.println('---') - current.println('teardown:') - body(snippet, true) - } - - private void body(Snippet snippet, boolean inSetup) { - parse("$snippet", snippet.contents, SYNTAX) { matcher, last -> - if (matcher.group("comment") != null) { - // Comment - return - } - String yamlRequest = matcher.group("yaml"); - if (yamlRequest != null) { - current.println(yamlRequest) - return - } - String method = matcher.group("method") - String pathAndQuery = matcher.group("pathAndQuery") - String body = matcher.group("body") - String catchPart = last ? snippet.catchPart : null - if (pathAndQuery.startsWith('/')) { - // Leading '/'s break the generated paths - pathAndQuery = pathAndQuery.substring(1) - } - emitDo(method, pathAndQuery, body, catchPart, snippet.warnings, - inSetup, snippet.skipShardsFailures) - } - } - - private PrintWriter setupCurrent(Snippet test) { - if (lastDocsPath == test.path) { - return - } - names.clear() - finishLastTest() - lastDocsPath = test.path - - // Make the destination file: - // Shift the path into the destination directory tree - Path dest = outputRoot().toPath().resolve(test.path) - // Replace the extension - String fileName = dest.getName(dest.nameCount - 1) - dest = dest.parent.resolve(fileName.replace('.asciidoc', '.yml')) - - // Now setup the writer - Files.createDirectories(dest.parent) - current = dest.newPrintWriter('UTF-8') - } - - void finishLastTest() { - if (current != null) { - current.close() - current = null - } - } - - void checkUnconverted() { - List listedButNotFound = [] - for (String listed : expectedUnconvertedCandidates) { - if (false == unconvertedCandidates.remove(listed)) { - listedButNotFound.add(listed) - } - } - String message = "" - if (false == listedButNotFound.isEmpty()) { - Collections.sort(listedButNotFound) - listedButNotFound = listedButNotFound.collect {' ' + it} - message += "Expected unconverted snippets but none found in:\n" - message += listedButNotFound.join("\n") - } - if (false == unconvertedCandidates.isEmpty()) { - List foundButNotListed = - new ArrayList<>(unconvertedCandidates) - Collections.sort(foundButNotListed) - foundButNotListed = foundButNotListed.collect {' ' + it} - if (false == "".equals(message)) { - message += "\n" - } - message += "Unexpected unconverted snippets:\n" - message += foundButNotListed.join("\n") - } - if (false == "".equals(message)) { - throw new InvalidUserDataException(message); - } - } - } -} diff --git a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/doc/SnippetsTask.groovy b/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/doc/SnippetsTask.groovy deleted file mode 100644 index 3e4ad91024082..0000000000000 --- a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/doc/SnippetsTask.groovy +++ /dev/null @@ -1,438 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.gradle.internal.doc - -import com.fasterxml.jackson.core.JsonFactory; -import com.fasterxml.jackson.core.JsonParser; -import com.fasterxml.jackson.core.JsonParseException; -import com.fasterxml.jackson.core.JsonToken - -import org.gradle.api.Action; -import org.gradle.api.DefaultTask -import org.gradle.api.InvalidUserDataException -import org.gradle.api.file.ConfigurableFileTree -import org.gradle.api.tasks.Input -import org.gradle.api.tasks.InputFiles -import org.gradle.api.tasks.Internal -import org.gradle.api.tasks.TaskAction - -import java.nio.file.Path -import java.util.regex.Matcher - -/** - * A task which will run a closure on each snippet in the documentation. - */ -class SnippetsTask extends DefaultTask { - private static final String SCHAR = /(?:\\\/|[^\/])/ - private static final String SUBSTITUTION = /s\/($SCHAR+)\/($SCHAR*)\// - private static final String CATCH = /catch:\s*((?:\/[^\/]+\/)|[^ \]]+)/ - private static final String SKIP_REGEX = /skip:([^\]]+)/ - private static final String SETUP = /setup:([^ \]]+)/ - private static final String TEARDOWN = /teardown:([^ \]]+)/ - private static final String WARNING = /warning:(.+)/ - private static final String NON_JSON = /(non_json)/ - private static final String TEST_SYNTAX = - /(?:$CATCH|$SUBSTITUTION|$SKIP_REGEX|(continued)|$SETUP|$TEARDOWN|$WARNING|(skip_shard_failures)) ?/ - - /** - * Action to take on each snippet. Called with a single parameter, an - * instance of Snippet. - */ - @Internal - Action perSnippet - - /** - * The docs to scan. Defaults to every file in the directory exception the - * build.gradle file because that is appropriate for Elasticsearch's docs - * directory. - */ - @InputFiles - ConfigurableFileTree docs - - /** - * Substitutions done on every snippet's contents. - */ - @Input - Map defaultSubstitutions = [:] - - @TaskAction - void executeTask() { - /* - * Walks each line of each file, building snippets as it encounters - * the lines that make up the snippet. - */ - for (File file: docs) { - String lastLanguage - String name - int lastLanguageLine - Snippet snippet = null - StringBuilder contents = null - List substitutions = null - Closure emit = { - snippet.contents = contents.toString() - contents = null - Closure doSubstitution = { String pattern, String subst -> - /* - * $body is really common but it looks like a - * backreference so we just escape it here to make the - * tests cleaner. - */ - subst = subst.replace('$body', '\\$body') - subst = subst.replace('$_path', '\\$_path') - // \n is a new line.... - subst = subst.replace('\\n', '\n') - snippet.contents = snippet.contents.replaceAll( - pattern, subst) - } - defaultSubstitutions.each doSubstitution - if (substitutions != null) { - substitutions.each doSubstitution - substitutions = null - } - if (snippet.language == null) { - throw new InvalidUserDataException("$snippet: " - + "Snippet missing a language. This is required by " - + "Elasticsearch's doc testing infrastructure so we " - + "be sure we don't accidentally forget to test a " - + "snippet.") - } - // Try to detect snippets that contain `curl` - if (snippet.language == 'sh' || snippet.language == 'shell') { - snippet.curl = snippet.contents.contains('curl') - if (snippet.console == false && snippet.curl == false) { - throw new InvalidUserDataException("$snippet: " - + "No need for NOTCONSOLE if snippet doesn't " - + "contain `curl`.") - } - } - if (snippet.testResponse - && ('js' == snippet.language || 'console-result' == snippet.language) - && null == snippet.skip) { - String quoted = snippet.contents - // quote values starting with $ - .replaceAll(/([:,])\s*(\$[^ ,\n}]+)/, '$1 "$2"') - // quote fields starting with $ - .replaceAll(/(\$[^ ,\n}]+)\s*:/, '"$1":') - - JsonFactory jf = new JsonFactory(); - jf.configure(JsonParser.Feature.ALLOW_BACKSLASH_ESCAPING_ANY_CHARACTER,true); - JsonParser jsonParser; - - try { - jsonParser = jf.createParser(quoted); - while(jsonParser.isClosed() == false) { - jsonParser.nextToken(); - } - } catch (JsonParseException e) { - throw new InvalidUserDataException("Invalid json in " - + snippet.toString() + ". The error is:\n" + e.getMessage() + ".\n" - + "After substitutions and munging, the json looks like:\n" + quoted, e); - } - } - perSnippet.execute(snippet) - snippet = null - } - file.eachLine('UTF-8') { String line, int lineNumber -> - Matcher matcher - if (line ==~ /-{4,}\s*/) { // Four dashes looks like a snippet - if (snippet == null) { - Path path = docs.dir.toPath().relativize(file.toPath()) - snippet = new Snippet(path: path, start: lineNumber, name: name) - if (lastLanguageLine == lineNumber - 1) { - snippet.language = lastLanguage - } - name = null - } else { - snippet.end = lineNumber - } - return - } - def source = matchSource(line) - if (source.matches) { - lastLanguage = source.language - lastLanguageLine = lineNumber - name = source.name - return - } - if (line ==~ /\/\/\s*AUTOSENSE\s*/) { - throw new InvalidUserDataException("$file:$lineNumber: " - + "AUTOSENSE has been replaced by CONSOLE.") - } - if (line ==~ /\/\/\s*CONSOLE\s*/) { - if (snippet == null) { - throw new InvalidUserDataException("$file:$lineNumber: " - + "CONSOLE not paired with a snippet") - } - if (snippet.console != null) { - throw new InvalidUserDataException("$file:$lineNumber: " - + "Can't be both CONSOLE and NOTCONSOLE") - } - snippet.console = true - return - } - if (line ==~ /\/\/\s*NOTCONSOLE\s*/) { - if (snippet == null) { - throw new InvalidUserDataException("$file:$lineNumber: " - + "NOTCONSOLE not paired with a snippet") - } - if (snippet.console != null) { - throw new InvalidUserDataException("$file:$lineNumber: " - + "Can't be both CONSOLE and NOTCONSOLE") - } - snippet.console = false - return - } - matcher = line =~ /\/\/\s*TEST(\[(.+)\])?\s*/ - if (matcher.matches()) { - if (snippet == null) { - throw new InvalidUserDataException("$file:$lineNumber: " - + "TEST not paired with a snippet at ") - } - snippet.test = true - if (matcher.group(2) != null) { - String loc = "$file:$lineNumber" - parse(loc, matcher.group(2), TEST_SYNTAX) { - if (it.group(1) != null) { - snippet.catchPart = it.group(1) - return - } - if (it.group(2) != null) { - if (substitutions == null) { - substitutions = [] - } - substitutions.add([it.group(2), it.group(3)]) - return - } - if (it.group(4) != null) { - snippet.skip = it.group(4) - return - } - if (it.group(5) != null) { - snippet.continued = true - return - } - if (it.group(6) != null) { - snippet.setup = it.group(6) - return - } - if (it.group(7) != null) { - snippet.teardown = it.group(7) - return - } - if (it.group(8) != null) { - snippet.warnings.add(it.group(8)) - return - } - if (it.group(9) != null) { - snippet.skipShardsFailures = true - return - } - throw new InvalidUserDataException( - "Invalid test marker: $line") - } - } - return - } - matcher = line =~ /\/\/\s*TESTRESPONSE(\[(.+)\])?\s*/ - if (matcher.matches()) { - if (snippet == null) { - throw new InvalidUserDataException("$file:$lineNumber: " - + "TESTRESPONSE not paired with a snippet") - } - snippet.testResponse = true - if (matcher.group(2) != null) { - if (substitutions == null) { - substitutions = [] - } - String loc = "$file:$lineNumber" - parse(loc, matcher.group(2), /(?:$SUBSTITUTION|$NON_JSON|$SKIP_REGEX) ?/) { - if (it.group(1) != null) { - // TESTRESPONSE[s/adsf/jkl/] - substitutions.add([it.group(1), it.group(2)]) - } else if (it.group(3) != null) { - // TESTRESPONSE[non_json] - substitutions.add(['^', '/']) - substitutions.add(['\n$', '\\\\s*/']) - substitutions.add(['( +)', '$1\\\\s+']) - substitutions.add(['\n', '\\\\s*\n ']) - } else if (it.group(4) != null) { - // TESTRESPONSE[skip:reason] - snippet.skip = it.group(4) - } - } - } - return - } - if (line ==~ /\/\/\s*TESTSETUP\s*/) { - snippet.testSetup = true - return - } - if (line ==~ /\/\/\s*TEARDOWN\s*/) { - snippet.testTearDown = true - return - } - if (snippet == null) { - // Outside - return - } - if (snippet.end == Snippet.NOT_FINISHED) { - // Inside - if (contents == null) { - contents = new StringBuilder() - } - // We don't need the annotations - line = line.replaceAll(/<\d+>/, '') - // Nor any trailing spaces - line = line.replaceAll(/\s+$/, '') - contents.append(line).append('\n') - return - } - // Allow line continuations for console snippets within lists - if (snippet != null && line.trim() == '+') { - return - } - // Just finished - emit() - } - if (snippet != null) emit() - } - } - - static Source matchSource(String line) { - def matcher = line =~ /\["?source"?(?:\.[^,]+)?,\s*"?([-\w]+)"?(,((?!id=).)*(id="?([-\w]+)"?)?(.*))?].*/ - if(matcher.matches()){ - return new Source(matches: true, language: matcher.group(1), name: matcher.group(5)) - } - return new Source(matches: false) - } - - static class Source { - boolean matches - String language - String name - } - - static class Snippet { - static final int NOT_FINISHED = -1 - - /** - * Path to the file containing this snippet. Relative to docs.dir of the - * SnippetsTask that created it. - */ - Path path - int start - int end = NOT_FINISHED - String contents - - Boolean console = null - boolean test = false - boolean testResponse = false - boolean testSetup = false - boolean testTearDown = false - String skip = null - boolean continued = false - String language = null - String catchPart = null - String setup = null - String teardown = null - boolean curl - List warnings = new ArrayList() - boolean skipShardsFailures = false - String name - - @Override - public String toString() { - String result = "$path[$start:$end]" - if (language != null) { - result += "($language)" - } - if (console != null) { - result += console ? '// CONSOLE' : '// NOTCONSOLE' - } - if (test) { - result += '// TEST' - if (catchPart) { - result += "[catch: $catchPart]" - } - if (skip) { - result += "[skip=$skip]" - } - if (continued) { - result += '[continued]' - } - if (setup) { - result += "[setup:$setup]" - } - if (teardown) { - result += "[teardown:$teardown]" - } - for (String warning in warnings) { - result += "[warning:$warning]" - } - if (skipShardsFailures) { - result += '[skip_shard_failures]' - } - } - if (testResponse) { - result += '// TESTRESPONSE' - if (skip) { - result += "[skip=$skip]" - } - } - if (testSetup) { - result += '// TESTSETUP' - } - if (curl) { - result += '(curl)' - } - return result - } - } - - /** - * Repeatedly match the pattern to the string, calling the closure with the - * matchers each time there is a match. If there are characters that don't - * match then blow up. If the closure takes two parameters then the second - * one is "is this the last match?". - */ - protected parse(String location, String s, String pattern, Closure c) { - if (s == null) { - return // Silly null, only real stuff gets to match! - } - Matcher m = s =~ pattern - int offset = 0 - Closure extraContent = { message -> - StringBuilder cutOut = new StringBuilder() - cutOut.append(s[offset - 6..offset - 1]) - cutOut.append('*') - cutOut.append(s[offset..Math.min(offset + 5, s.length() - 1)]) - String cutOutNoNl = cutOut.toString().replace('\n', '\\n') - throw new InvalidUserDataException("$location: Extra content " - + "$message ('$cutOutNoNl') matching [$pattern]: $s") - } - while (m.find()) { - if (m.start() != offset) { - extraContent("between [$offset] and [${m.start()}]") - } - offset = m.end() - if (c.maximumNumberOfParameters == 1) { - c(m) - } else { - c(m, offset == s.length()) - } - } - if (offset == 0) { - throw new InvalidUserDataException("$location: Didn't match " - + "$pattern: $s") - } - if (offset != s.length()) { - extraContent("after [$offset]") - } - } -} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/AsciidocSnippetParser.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/AsciidocSnippetParser.java new file mode 100644 index 0000000000000..7b35fd29fbd1a --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/AsciidocSnippetParser.java @@ -0,0 +1,306 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc; + +import org.gradle.api.InvalidUserDataException; + +import java.io.File; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.function.BiConsumer; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +public class AsciidocSnippetParser implements SnippetParser { + public static final Pattern SNIPPET_PATTERN = Pattern.compile("-{4,}\\s*"); + + private static final String CATCH = "catch:\\s*((?:\\/[^\\/]+\\/)|[^ \\]]+)"; + private static final String SKIP_REGEX = "skip:([^\\]]+)"; + private static final String SETUP = "setup:([^ \\]]+)"; + private static final String TEARDOWN = "teardown:([^ \\]]+)"; + private static final String WARNING = "warning:(.+)"; + private static final String NON_JSON = "(non_json)"; + private static final String SCHAR = "(?:\\\\\\/|[^\\/])"; + private static final String SUBSTITUTION = "s\\/(" + SCHAR + "+)\\/(" + SCHAR + "*)\\/"; + private static final String TEST_SYNTAX = "(?:" + + CATCH + + "|" + + SUBSTITUTION + + "|" + + SKIP_REGEX + + "|(continued)|" + + SETUP + + "|" + + TEARDOWN + + "|" + + WARNING + + "|(skip_shard_failures)) ?"; + + private final Map defaultSubstitutions; + + public AsciidocSnippetParser(Map defaultSubstitutions) { + this.defaultSubstitutions = defaultSubstitutions; + } + + @Override + public List parseDoc(File rootDir, File docFile, List> substitutions) { + String lastLanguage = null; + Snippet snippet = null; + String name = null; + int lastLanguageLine = 0; + StringBuilder contents = null; + List snippets = new ArrayList<>(); + + try (Stream lines = Files.lines(docFile.toPath(), StandardCharsets.UTF_8)) { + List linesList = lines.collect(Collectors.toList()); + for (int lineNumber = 0; lineNumber < linesList.size(); lineNumber++) { + String line = linesList.get(lineNumber); + if (SNIPPET_PATTERN.matcher(line).matches()) { + if (snippet == null) { + Path path = rootDir.toPath().relativize(docFile.toPath()); + snippet = new Snippet(path, lineNumber + 1, name); + snippets.add(snippet); + if (lastLanguageLine == lineNumber - 1) { + snippet.language = lastLanguage; + } + name = null; + } else { + snippet.end = lineNumber + 1; + } + continue; + } + + Source source = matchSource(line); + if (source.matches) { + lastLanguage = source.language; + lastLanguageLine = lineNumber; + name = source.name; + continue; + } + if (consoleHandled(docFile.getName(), lineNumber, line, snippet)) { + continue; + } + if (testHandled(docFile.getName(), lineNumber, line, snippet, substitutions)) { + continue; + } + if (testResponseHandled(docFile.getName(), lineNumber, line, snippet, substitutions)) { + continue; + } + if (line.matches("\\/\\/\s*TESTSETUP\s*")) { + snippet.testSetup = true; + continue; + } + if (line.matches("\\/\\/\s*TEARDOWN\s*")) { + snippet.testTearDown = true; + continue; + } + if (snippet == null) { + // Outside + continue; + } + if (snippet.end == Snippet.NOT_FINISHED) { + // Inside + if (contents == null) { + contents = new StringBuilder(); + } + // We don't need the annotations + line = line.replaceAll("<\\d+>", ""); + // Nor any trailing spaces + line = line.replaceAll("\s+$", ""); + contents.append(line).append("\n"); + continue; + } + // Allow line continuations for console snippets within lists + if (snippet != null && line.trim().equals("+")) { + continue; + } + finalizeSnippet(snippet, contents.toString(), defaultSubstitutions, substitutions); + substitutions = new ArrayList<>(); + ; + snippet = null; + contents = null; + } + if (snippet != null) { + finalizeSnippet(snippet, contents.toString(), defaultSubstitutions, substitutions); + contents = null; + snippet = null; + substitutions = new ArrayList<>(); + } + } catch (IOException e) { + e.printStackTrace(); + } + return snippets; + } + + static Snippet finalizeSnippet( + final Snippet snippet, + String contents, + Map defaultSubstitutions, + Collection> substitutions + ) { + snippet.contents = contents.toString(); + snippet.validate(); + escapeSubstitutions(snippet, defaultSubstitutions, substitutions); + return snippet; + } + + private static void escapeSubstitutions( + Snippet snippet, + Map defaultSubstitutions, + Collection> substitutions + ) { + BiConsumer doSubstitution = (pattern, subst) -> { + /* + * $body is really common but it looks like a + * backreference so we just escape it here to make the + * tests cleaner. + */ + subst = subst.replace("$body", "\\$body"); + subst = subst.replace("$_path", "\\$_path"); + subst = subst.replace("\\n", "\n"); + snippet.contents = snippet.contents.replaceAll(pattern, subst); + }; + defaultSubstitutions.forEach(doSubstitution); + + if (substitutions != null) { + substitutions.forEach(e -> doSubstitution.accept(e.getKey(), e.getValue())); + } + } + + private boolean testResponseHandled( + String name, + int lineNumber, + String line, + Snippet snippet, + final List> substitutions + ) { + Matcher matcher = Pattern.compile("\\/\\/\s*TESTRESPONSE(\\[(.+)\\])?\s*").matcher(line); + if (matcher.matches()) { + if (snippet == null) { + throw new InvalidUserDataException(name + ":" + lineNumber + ": TESTRESPONSE not paired with a snippet at "); + } + snippet.testResponse = true; + if (matcher.group(2) != null) { + String loc = name + ":" + lineNumber; + ParsingUtils.parse( + loc, + matcher.group(2), + "(?:" + SUBSTITUTION + "|" + NON_JSON + "|" + SKIP_REGEX + ") ?", + (Matcher m, Boolean last) -> { + if (m.group(1) != null) { + // TESTRESPONSE[s/adsf/jkl/] + substitutions.add(Map.entry(m.group(1), m.group(2))); + } else if (m.group(3) != null) { + // TESTRESPONSE[non_json] + substitutions.add(Map.entry("^", "/")); + substitutions.add(Map.entry("\n$", "\\\\s*/")); + substitutions.add(Map.entry("( +)", "$1\\\\s+")); + substitutions.add(Map.entry("\n", "\\\\s*\n ")); + } else if (m.group(4) != null) { + // TESTRESPONSE[skip:reason] + snippet.skip = m.group(4); + } + } + ); + } + return true; + } + return false; + } + + private boolean testHandled(String name, int lineNumber, String line, Snippet snippet, List> substitutions) { + Matcher matcher = Pattern.compile("\\/\\/\s*TEST(\\[(.+)\\])?\s*").matcher(line); + if (matcher.matches()) { + if (snippet == null) { + throw new InvalidUserDataException(name + ":" + lineNumber + ": TEST not paired with a snippet at "); + } + snippet.test = true; + if (matcher.group(2) != null) { + String loc = name + ":" + lineNumber; + ParsingUtils.parse(loc, matcher.group(2), TEST_SYNTAX, (Matcher m, Boolean last) -> { + if (m.group(1) != null) { + snippet.catchPart = m.group(1); + return; + } + if (m.group(2) != null) { + substitutions.add(Map.entry(m.group(2), m.group(3))); + return; + } + if (m.group(4) != null) { + snippet.skip = m.group(4); + return; + } + if (m.group(5) != null) { + snippet.continued = true; + return; + } + if (m.group(6) != null) { + snippet.setup = m.group(6); + return; + } + if (m.group(7) != null) { + snippet.teardown = m.group(7); + return; + } + if (m.group(8) != null) { + snippet.warnings.add(m.group(8)); + return; + } + if (m.group(9) != null) { + snippet.skipShardsFailures = true; + return; + } + throw new InvalidUserDataException("Invalid test marker: " + line); + }); + } + return true; + } + return false; + } + + private boolean consoleHandled(String fileName, int lineNumber, String line, Snippet snippet) { + if (line.matches("\\/\\/\s*CONSOLE\s*")) { + if (snippet == null) { + throw new InvalidUserDataException(fileName + ":" + lineNumber + ": CONSOLE not paired with a snippet"); + } + if (snippet.console != null) { + throw new InvalidUserDataException(fileName + ":" + lineNumber + ": Can't be both CONSOLE and NOTCONSOLE"); + } + snippet.console = true; + return true; + } else if (line.matches("\\/\\/\s*NOTCONSOLE\s*")) { + if (snippet == null) { + throw new InvalidUserDataException(fileName + ":" + lineNumber + ": NOTCONSOLE not paired with a snippet"); + } + if (snippet.console != null) { + throw new InvalidUserDataException(fileName + ":" + lineNumber + ": Can't be both CONSOLE and NOTCONSOLE"); + } + snippet.console = false; + return true; + } + return false; + } + + static Source matchSource(String line) { + Pattern pattern = Pattern.compile("\\[\"?source\"?(?:\\.[^,]+)?,\\s*\"?([-\\w]+)\"?(,((?!id=).)*(id=\"?([-\\w]+)\"?)?(.*))?].*"); + Matcher matcher = pattern.matcher(line); + if (matcher.matches()) { + return new Source(true, matcher.group(1), matcher.group(5)); + } + return new Source(false, null, null); + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/DocSnippetTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/DocSnippetTask.java new file mode 100644 index 0000000000000..87f0621d53fba --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/DocSnippetTask.java @@ -0,0 +1,88 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc; + +import org.apache.commons.collections.map.HashedMap; +import org.gradle.api.Action; +import org.gradle.api.DefaultTask; +import org.gradle.api.InvalidUserDataException; +import org.gradle.api.file.ConfigurableFileTree; +import org.gradle.api.tasks.Input; +import org.gradle.api.tasks.InputFiles; +import org.gradle.api.tasks.TaskAction; + +import java.io.File; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +public abstract class DocSnippetTask extends DefaultTask { + + /** + * Action to take on each snippet. Called with a single parameter, an + * instance of Snippet. + */ + private Action perSnippet; + + /** + * The docs to scan. Defaults to every file in the directory exception the + * build.gradle file because that is appropriate for Elasticsearch's docs + * directory. + */ + private ConfigurableFileTree docs; + private Map defaultSubstitutions = new HashedMap(); + + @InputFiles + public ConfigurableFileTree getDocs() { + return docs; + } + + public void setDocs(ConfigurableFileTree docs) { + this.docs = docs; + } + + /** + * Substitutions done on every snippet's contents. + */ + @Input + public Map getDefaultSubstitutions() { + return defaultSubstitutions; + } + + @TaskAction + void executeTask() { + for (File file : docs) { + List snippets = parseDocFile(docs.getDir(), file, new ArrayList<>()); + if (perSnippet != null) { + snippets.forEach(perSnippet::execute); + } + } + } + + List parseDocFile(File rootDir, File docFile, List> substitutions) { + SnippetParser parser = parserForFileType(docFile); + return parser.parseDoc(rootDir, docFile, substitutions); + } + + private SnippetParser parserForFileType(File docFile) { + if (docFile.getName().endsWith(".asciidoc")) { + return new AsciidocSnippetParser(defaultSubstitutions); + } + throw new InvalidUserDataException("Unsupported file type: " + docFile.getName()); + } + + public void setDefaultSubstitutions(Map defaultSubstitutions) { + this.defaultSubstitutions = defaultSubstitutions; + } + + public void setPerSnippet(Action perSnippet) { + this.perSnippet = perSnippet; + } + +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/DocsTestPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/DocsTestPlugin.java new file mode 100644 index 0000000000000..bbb5102dd6699 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/DocsTestPlugin.java @@ -0,0 +1,106 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc; + +import org.elasticsearch.gradle.OS; +import org.elasticsearch.gradle.Version; +import org.elasticsearch.gradle.VersionProperties; +import org.elasticsearch.gradle.testclusters.ElasticsearchCluster; +import org.elasticsearch.gradle.testclusters.TestClustersPlugin; +import org.elasticsearch.gradle.testclusters.TestDistribution; +import org.gradle.api.NamedDomainObjectContainer; +import org.gradle.api.Plugin; +import org.gradle.api.Project; +import org.gradle.api.file.Directory; +import org.gradle.api.file.ProjectLayout; +import org.gradle.api.internal.file.FileOperations; +import org.gradle.api.plugins.JavaPluginExtension; +import org.gradle.api.provider.Provider; +import org.gradle.api.tasks.TaskProvider; + +import java.util.Map; + +import javax.inject.Inject; + +public class DocsTestPlugin implements Plugin { + private FileOperations fileOperations; + private ProjectLayout projectLayout; + + @Inject + DocsTestPlugin(FileOperations fileOperations, ProjectLayout projectLayout) { + this.projectLayout = projectLayout; + this.fileOperations = fileOperations; + } + + @Override + public void apply(Project project) { + project.getPluginManager().apply("elasticsearch.legacy-yaml-rest-test"); + + String distribution = System.getProperty("tests.distribution", "default"); + // The distribution can be configured with -Dtests.distribution on the command line + NamedDomainObjectContainer testClusters = (NamedDomainObjectContainer) project + .getExtensions() + .getByName(TestClustersPlugin.EXTENSION_NAME); + + testClusters.matching((c) -> c.getName().equals("yamlRestTest")).configureEach(c -> { + c.setTestDistribution(TestDistribution.valueOf(distribution.toUpperCase())); + c.setNameCustomization((name) -> name.replace("yamlRestTest", "node")); + }); + + project.getTasks().named("assemble").configure(task -> { task.setEnabled(false); }); + + Map commonDefaultSubstitutions = Map.of( + /* These match up with the asciidoc syntax for substitutions but + * the values may differ. In particular {version} needs to resolve + * to the version being built for testing but needs to resolve to + * the last released version for docs. */ + "\\{version\\}", + Version.fromString(VersionProperties.getElasticsearch()).toString(), + "\\{version_qualified\\}", + VersionProperties.getElasticsearch(), + "\\{lucene_version\\}", + VersionProperties.getLucene().replaceAll("-snapshot-\\w+$", ""), + "\\{build_flavor\\}", + distribution, + "\\{build_type\\}", + OS.conditionalString().onWindows(() -> "zip").onUnix(() -> "tar").supply() + ); + + project.getTasks().register("listSnippets", DocSnippetTask.class, task -> { + task.setGroup("Docs"); + task.setDescription("List each snippet"); + task.setDefaultSubstitutions(commonDefaultSubstitutions); + task.setPerSnippet(snippet -> System.out.println(snippet)); + }); + + project.getTasks().register("listConsoleCandidates", DocSnippetTask.class, task -> { + task.setGroup("Docs"); + task.setDescription("List snippets that probably should be marked // CONSOLE"); + task.setDefaultSubstitutions(commonDefaultSubstitutions); + task.setPerSnippet(snippet -> { + if (snippet.isConsoleCandidate()) { + System.out.println(snippet); + } + }); + }); + + Provider restRootDir = projectLayout.getBuildDirectory().dir("rest"); + TaskProvider buildRestTests = project.getTasks() + .register("buildRestTests", RestTestsFromDocSnippetTask.class, task -> { + task.setDefaultSubstitutions(commonDefaultSubstitutions); + task.getTestRoot().convention(restRootDir); + task.doFirst(task1 -> fileOperations.delete(restRootDir.get())); + }); + + // TODO: This effectively makes testRoot not customizable, which we don't do anyway atm + JavaPluginExtension byType = project.getExtensions().getByType(JavaPluginExtension.class); + byType.getSourceSets().getByName("yamlRestTest").getOutput().dir(Map.of("builtBy", buildRestTests), restRootDir); + } + +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/ParsingUtils.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/ParsingUtils.java new file mode 100644 index 0000000000000..b17dd4c7e21d3 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/ParsingUtils.java @@ -0,0 +1,57 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc; + +import org.gradle.api.InvalidUserDataException; + +import java.util.function.BiConsumer; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +public class ParsingUtils { + + static void extraContent(String message, String content, int offset, String location, String pattern) { + StringBuilder cutOut = new StringBuilder(); + cutOut.append(content.substring(offset - 6, offset)); + cutOut.append('*'); + cutOut.append(content.substring(offset, Math.min(offset + 5, content.length()))); + String cutOutNoNl = cutOut.toString().replace("\n", "\\n"); + throw new InvalidUserDataException( + location + ": Extra content " + message + " ('" + cutOutNoNl + "') matching [" + pattern + "]: " + content + ); + } + + /** + * Repeatedly match the pattern to the string, calling the closure with the + * matchers each time there is a match. If there are characters that don't + * match then blow up. If the closure takes two parameters then the second + * one is "is this the last match?". + */ + static void parse(String location, String content, String pattern, BiConsumer testHandler) { + if (content == null) { + return; // Silly null, only real stuff gets to match! + } + Matcher m = Pattern.compile(pattern).matcher(content); + int offset = 0; + while (m.find()) { + if (m.start() != offset) { + extraContent("between [$offset] and [${m.start()}]", content, offset, location, pattern); + } + offset = m.end(); + testHandler.accept(m, offset == content.length()); + } + if (offset == 0) { + throw new InvalidUserDataException(location + ": Didn't match " + pattern + ": " + content); + } + if (offset != content.length()) { + extraContent("after [" + offset + "]", content, offset, location, pattern); + } + } + +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/RestTestsFromDocSnippetTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/RestTestsFromDocSnippetTask.java new file mode 100644 index 0000000000000..c5b1d67627dd9 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/RestTestsFromDocSnippetTask.java @@ -0,0 +1,526 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc; + +import groovy.transform.PackageScope; + +import org.gradle.api.InvalidUserDataException; +import org.gradle.api.file.DirectoryProperty; +import org.gradle.api.internal.file.FileOperations; +import org.gradle.api.model.ObjectFactory; +import org.gradle.api.tasks.Input; +import org.gradle.api.tasks.Internal; +import org.gradle.api.tasks.OutputDirectory; + +import java.io.File; +import java.io.IOException; +import java.io.PrintWriter; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +import javax.inject.Inject; + +public abstract class RestTestsFromDocSnippetTask extends DocSnippetTask { + + private Map setups = new HashMap<>(); + + private Map teardowns = new HashMap(); + + /** + * Test setups defined in the build instead of the docs so they can be + * shared between many doc files. + */ + @Input + public Map getSetups() { + return setups; + } + + public void setSetups(Map setups) { + this.setups = setups; + } + + /** + * Test teardowns defined in the build instead of the docs so they can be + * shared between many doc files. + */ + @Input + public Map getTeardowns() { + return teardowns; + } + + public void setTeardowns(Map teardowns) { + this.teardowns = teardowns; + } + + /** + * A list of files that contain snippets that *probably* should be + * converted to `// CONSOLE` but have yet to be converted. If a file is in + * this list and doesn't contain unconverted snippets this task will fail. + * If there are unconverted snippets not in this list then this task will + * fail. All files are paths relative to the docs dir. + */ + private List expectedUnconvertedCandidates; + + @Input + public List getExpectedUnconvertedCandidates() { + return expectedUnconvertedCandidates; + } + + public void setExpectedUnconvertedCandidates(List expectedUnconvertedCandidates) { + this.expectedUnconvertedCandidates = expectedUnconvertedCandidates; + } + + /** + * Root directory of the tests being generated. To make rest tests happy + * we generate them in a testRoot which is contained in this directory. + */ + private DirectoryProperty testRoot; + + private Set names = new HashSet<>(); + + @Internal + public Set getNames() { + return names; + } + + public void setNames(Set names) { + this.names = names; + } + + @Inject + public abstract FileOperations getFileOperations(); + + /** + * Root directory containing all the files generated by this task. It is + * contained within testRoot. + */ + @OutputDirectory + File getOutputRoot() { + return new File(testRoot.get().getAsFile(), "/rest-api-spec/test"); + } + + @OutputDirectory + DirectoryProperty getTestRoot() { + return testRoot; + } + + @Inject + public RestTestsFromDocSnippetTask(ObjectFactory objectFactory) { + testRoot = objectFactory.directoryProperty(); + TestBuilder builder = new TestBuilder(); + + setPerSnippet(snippet -> builder.handleSnippet(snippet)); + doLast(task -> { + builder.finishLastTest(); + builder.checkUnconverted(); + }); + } + + /** + * Certain requests should not have the shard failure check because the + * format of the response is incompatible i.e. it is not a JSON object. + */ + static boolean shouldAddShardFailureCheck(String path) { + return path.startsWith("_cat") == false && path.startsWith("_ml/datafeeds/") == false; + } + + /** + * Converts Kibana's block quoted strings into standard JSON. These + * {@code """} delimited strings can be embedded in CONSOLE and can + * contain newlines and {@code "} without the normal JSON escaping. + * This has to add it. + */ + @PackageScope + static String replaceBlockQuote(String body) { + int start = body.indexOf("\"\"\""); + if (start < 0) { + return body; + } + /* + * 1.3 is a fairly wild guess of the extra space needed to hold + * the escaped string. + */ + StringBuilder result = new StringBuilder((int) (body.length() * 1.3)); + int startOfNormal = 0; + while (start >= 0) { + int end = body.indexOf("\"\"\"", start + 3); + if (end < 0) { + throw new InvalidUserDataException("Invalid block quote starting at " + start + " in:\n" + body); + } + result.append(body.substring(startOfNormal, start)); + result.append('"'); + result.append(body.substring(start + 3, end).replace("\"", "\\\"").replace("\n", "\\n")); + result.append('"'); + startOfNormal = end + 3; + start = body.indexOf("\"\"\"", startOfNormal); + } + result.append(body.substring(startOfNormal)); + return result.toString(); + } + + private class TestBuilder { + /** + * These languages aren't supported by the syntax highlighter so we + * shouldn't use them. + */ + private static final List BAD_LANGUAGES = List.of("json", "javascript"); + + String method = "(?GET|PUT|POST|HEAD|OPTIONS|DELETE)"; + String pathAndQuery = "(?[^\\n]+)"; + + String badBody = "GET|PUT|POST|HEAD|OPTIONS|DELETE|startyaml|#"; + String body = "(?(?:\\n(?!" + badBody + ")[^\\n]+)+)"; + + String rawRequest = "(?:" + method + "\\s+" + pathAndQuery + body + "?)"; + + String yamlRequest = "(?:startyaml(?s)(?.+?)(?-s)endyaml)"; + String nonComment = "(?:" + rawRequest + "|" + yamlRequest + ")"; + String comment = "(?#.+)"; + + String SYNTAX = "(?:" + comment + "|" + nonComment + ")\\n+"; + + /** + * Files containing all snippets that *probably* should be converted + * to `// CONSOLE` but have yet to be converted. All files are paths + * relative to the docs dir. + */ + private Set unconvertedCandidates = new HashSet<>(); + + /** + * The last non-TESTRESPONSE snippet. + */ + Snippet previousTest; + + /** + * The file in which we saw the last snippet that made a test. + */ + Path lastDocsPath; + + /** + * The file we're building. + */ + PrintWriter current; + + Set names = new HashSet<>(); + + /** + * Called each time a snippet is encountered. Tracks the snippets and + * calls buildTest to actually build the test. + */ + public void handleSnippet(Snippet snippet) { + if (snippet.isConsoleCandidate()) { + unconvertedCandidates.add(snippet.path.toString().replace('\\', '/')); + } + if (BAD_LANGUAGES.contains(snippet.language)) { + throw new InvalidUserDataException(snippet + ": Use `js` instead of `" + snippet.language + "`."); + } + if (snippet.testSetup) { + testSetup(snippet); + previousTest = snippet; + return; + } + if (snippet.testTearDown) { + testTearDown(snippet); + previousTest = snippet; + return; + } + if (snippet.testResponse || snippet.language.equals("console-result")) { + if (previousTest == null) { + throw new InvalidUserDataException(snippet + ": No paired previous test"); + } + if (previousTest.path.equals(snippet.path) == false) { + throw new InvalidUserDataException(snippet + ": Result can't be first in file"); + } + response(snippet); + return; + } + if (("js".equals(snippet.language)) && snippet.console != null && snippet.console) { + throw new InvalidUserDataException(snippet + ": Use `[source,console]` instead of `// CONSOLE`."); + } + if (snippet.test || snippet.language.equals("console")) { + test(snippet); + previousTest = snippet; + return; + } + // Must be an unmarked snippet.... + } + + private void test(Snippet test) { + setupCurrent(test); + + if (test.continued) { + /* Catch some difficult to debug errors with // TEST[continued] + * and throw a helpful error message. */ + if (previousTest == null || previousTest.path.equals(test.path) == false) { + throw new InvalidUserDataException("// TEST[continued] " + "cannot be on first snippet in a file: " + test); + } + if (previousTest != null && previousTest.testSetup) { + throw new InvalidUserDataException("// TEST[continued] " + "cannot immediately follow // TESTSETUP: " + test); + } + if (previousTest != null && previousTest.testTearDown) { + throw new InvalidUserDataException("// TEST[continued] " + "cannot immediately follow // TEARDOWN: " + test); + } + } else { + current.println("---"); + if (test.name != null && test.name.isBlank() == false) { + if (names.add(test.name) == false) { + throw new InvalidUserDataException("Duplicated snippet name '" + test.name + "': " + test); + } + current.println("\"" + test.name + "\":"); + } else { + current.println("\"line_" + test.start + "\":"); + } + /* The Elasticsearch test runner doesn't support quite a few + * constructs unless we output this skip. We don't know if + * we're going to use these constructs, but we might so we + * output the skip just in case. */ + current.println(" - skip:"); + current.println(" features:"); + current.println(" - default_shards"); + current.println(" - stash_in_key"); + current.println(" - stash_in_path"); + current.println(" - stash_path_replace"); + current.println(" - warnings"); + } + if (test.skip != null) { + if (test.continued) { + throw new InvalidUserDataException("Continued snippets " + "can't be skipped"); + } + current.println(" - always_skip"); + current.println(" reason: " + test.skip); + } + if (test.setup != null) { + setup(test); + } + + body(test, false); + + if (test.teardown != null) { + teardown(test); + } + } + + private void response(Snippet response) { + if (null == response.skip) { + current.println(" - match:"); + current.println(" $body:"); + replaceBlockQuote(response.contents).lines().forEach(line -> current.println(" " + line)); + } + } + + private void teardown(final Snippet snippet) { + // insert a teardown defined outside of the docs + for (final String name : snippet.teardown.split(",")) { + final String teardown = teardowns.get(name); + if (teardown == null) { + throw new InvalidUserDataException("Couldn't find named teardown $name for " + snippet); + } + current.println("# Named teardown " + name); + current.println(teardown); + } + } + + private void testTearDown(Snippet snippet) { + if (previousTest != null && previousTest.testSetup == false && lastDocsPath == snippet.path) { + throw new InvalidUserDataException(snippet + " must follow test setup or be first"); + } + setupCurrent(snippet); + current.println("---"); + current.println("teardown:"); + body(snippet, true); + } + + void emitDo( + String method, + String pathAndQuery, + String body, + String catchPart, + List warnings, + boolean inSetup, + boolean skipShardFailures + ) { + String[] tokenized = pathAndQuery.split("\\?"); + String path = tokenized[0]; + String query = tokenized.length > 1 ? tokenized[1] : null; + if (path == null) { + path = ""; // Catch requests to the root... + } else { + path = path.replace("<", "%3C").replace(">", "%3E"); + } + current.println(" - do:"); + if (catchPart != null) { + current.println(" catch: " + catchPart); + } + if (false == warnings.isEmpty()) { + current.println(" warnings:"); + for (String warning : warnings) { + // Escape " because we're going to quote the warning + String escaped = warning.replaceAll("\"", "\\\\\""); + /* Quote the warning in case it starts with [ which makes + * it look too much like an array. */ + current.println(" - \"" + escaped + "\""); + } + } + current.println(" raw:"); + current.println(" method: " + method); + current.println(" path: \"" + path + "\""); + if (query != null) { + for (String param : query.split("&")) { + String[] tokenizedQuery = param.split("="); + String paramName = tokenizedQuery[0]; + String paramValue = tokenizedQuery.length > 1 ? tokenizedQuery[1] : null; + if (paramValue == null) { + paramValue = ""; + } + current.println(" " + paramName + ": \"" + paramValue + "\""); + } + } + if (body != null) { + // Throw out the leading newline we get from parsing the body + body = body.substring(1); + // Replace """ quoted strings with valid json ones + body = replaceBlockQuote(body); + current.println(" body: |"); + body.lines().forEach(line -> current.println(" " + line)); + } + /* Catch any shard failures. These only cause a non-200 response if + * no shard succeeds. But we need to fail the tests on all of these + * because they mean invalid syntax or broken queries or something + * else that we don't want to teach people to do. The REST test + * framework doesn't allow us to have assertions in the setup + * section so we have to skip it there. We also omit the assertion + * from APIs that don't return a JSON object + */ + if (false == inSetup && skipShardFailures == false && shouldAddShardFailureCheck(path)) { + current.println(" - is_false: _shards.failures"); + } + } + + private void body(Snippet snippet, boolean inSetup) { + ParsingUtils.parse(snippet.getLocation(), snippet.contents, SYNTAX, (matcher, last) -> { + if (matcher.group("comment") != null) { + // Comment + return; + } + String yamlRequest = matcher.group("yaml"); + if (yamlRequest != null) { + current.println(yamlRequest); + return; + } + String method = matcher.group("method"); + String pathAndQuery = matcher.group("pathAndQuery"); + String body = matcher.group("body"); + String catchPart = last ? snippet.catchPart : null; + if (pathAndQuery.startsWith("/")) { + // Leading '/'s break the generated paths + pathAndQuery = pathAndQuery.substring(1); + } + emitDo(method, pathAndQuery, body, catchPart, snippet.warnings, inSetup, snippet.skipShardsFailures); + }); + + } + + private PrintWriter setupCurrent(Snippet test) { + if (test.path.equals(lastDocsPath)) { + return current; + } + names.clear(); + finishLastTest(); + lastDocsPath = test.path; + + // Make the destination file: + // Shift the path into the destination directory tree + Path dest = getOutputRoot().toPath().resolve(test.path); + // Replace the extension + String fileName = dest.getName(dest.getNameCount() - 1).toString(); + dest = dest.getParent().resolve(fileName.replace(".asciidoc", ".yml")); + + // Now setup the writer + try { + Files.createDirectories(dest.getParent()); + current = new PrintWriter(dest.toFile(), "UTF-8"); + return current; + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + private void testSetup(Snippet snippet) { + if (lastDocsPath == snippet.path) { + throw new InvalidUserDataException( + snippet + ": wasn't first. TESTSETUP can only be used in the first snippet of a document." + ); + } + setupCurrent(snippet); + current.println("---"); + current.println("setup:"); + if (snippet.setup != null) { + setup(snippet); + } + body(snippet, true); + } + + private void setup(final Snippet snippet) { + // insert a setup defined outside of the docs + for (final String name : snippet.setup.split(",")) { + final String setup = setups.get(name); + if (setup == null) { + throw new InvalidUserDataException("Couldn't find named setup " + name + " for " + snippet); + } + current.println("# Named setup " + name); + current.println(setup); + } + } + + public void checkUnconverted() { + List listedButNotFound = new ArrayList<>(); + for (String listed : expectedUnconvertedCandidates) { + if (false == unconvertedCandidates.remove(listed)) { + listedButNotFound.add(listed); + } + } + String message = ""; + if (false == listedButNotFound.isEmpty()) { + Collections.sort(listedButNotFound); + listedButNotFound = listedButNotFound.stream().map(notfound -> " " + notfound).collect(Collectors.toList()); + message += "Expected unconverted snippets but none found in:\n"; + message += listedButNotFound.stream().collect(Collectors.joining("\n")); + } + if (false == unconvertedCandidates.isEmpty()) { + List foundButNotListed = new ArrayList<>(unconvertedCandidates); + Collections.sort(foundButNotListed); + foundButNotListed = foundButNotListed.stream().map(f -> " " + f).collect(Collectors.toList()); + if (false == "".equals(message)) { + message += "\n"; + } + message += "Unexpected unconverted snippets:\n"; + message += foundButNotListed.stream().collect(Collectors.joining("\n")); + } + if (false == "".equals(message)) { + throw new InvalidUserDataException(message); + } + } + + public void finishLastTest() { + if (current != null) { + current.close(); + current = null; + } + } + } + +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/Snippet.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/Snippet.java new file mode 100644 index 0000000000000..b8aa864734f44 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/Snippet.java @@ -0,0 +1,188 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc; + +import com.fasterxml.jackson.core.JsonFactory; +import com.fasterxml.jackson.core.JsonParseException; +import com.fasterxml.jackson.core.JsonParser; + +import org.gradle.api.InvalidUserDataException; + +import java.io.IOException; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.List; + +public class Snippet { + static final int NOT_FINISHED = -1; + + /** + * Path to the file containing this snippet. Relative to docs.dir of the + * SnippetsTask that created it. + */ + Path path; + int start; + int end = NOT_FINISHED; + public String contents; + + Boolean console = null; + boolean test = false; + boolean testResponse = false; + boolean testSetup = false; + boolean testTearDown = false; + String skip = null; + boolean continued = false; + String language = null; + String catchPart = null; + String setup = null; + String teardown = null; + boolean curl; + List warnings = new ArrayList(); + boolean skipShardsFailures = false; + String name; + + public Snippet(Path path, int start, String name) { + this.path = path; + this.start = start; + this.name = name; + } + + public void validate() { + if (language == null) { + throw new InvalidUserDataException( + name + + ": " + + "Snippet missing a language. This is required by " + + "Elasticsearch's doc testing infrastructure so we " + + "be sure we don't accidentally forget to test a " + + "snippet." + ); + } + assertValidCurlInput(); + assertValidJsonInput(); + } + + String getLocation() { + return path + "[" + start + ":" + end + "]"; + } + + private void assertValidCurlInput() { + // Try to detect snippets that contain `curl` + if ("sh".equals(language) || "shell".equals(language)) { + curl = contents.contains("curl"); + if (console == Boolean.FALSE && curl == false) { + throw new InvalidUserDataException(name + ": " + "No need for NOTCONSOLE if snippet doesn't " + "contain `curl`."); + } + } + } + + private void assertValidJsonInput() { + if (testResponse && ("js" == language || "console-result" == language) && null == skip) { + String quoted = contents + // quote values starting with $ + .replaceAll("([:,])\\s*(\\$[^ ,\\n}]+)", "$1 \"$2\"") + // quote fields starting with $ + .replaceAll("(\\$[^ ,\\n}]+)\\s*:", "\"$1\":"); + + JsonFactory jf = new JsonFactory(); + jf.configure(JsonParser.Feature.ALLOW_BACKSLASH_ESCAPING_ANY_CHARACTER, true); + JsonParser jsonParser; + + try { + jsonParser = jf.createParser(quoted); + while (jsonParser.isClosed() == false) { + jsonParser.nextToken(); + } + } catch (JsonParseException e) { + throw new InvalidUserDataException( + "Invalid json in " + + name + + ". The error is:\n" + + e.getMessage() + + ".\n" + + "After substitutions and munging, the json looks like:\n" + + quoted, + e + ); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + } + + @Override + public String toString() { + String result = path + "[" + start + ":" + end + "]"; + if (language != null) { + result += "(" + language + ")"; + } + if (console != null) { + result += console ? "// CONSOLE" : "// NOTCONSOLE"; + } + if (test) { + result += "// TEST"; + if (catchPart != null) { + result += "[catch: " + catchPart + "]"; + } + if (skip != null) { + result += "[skip=" + skip + "]"; + } + if (continued) { + result += "[continued]"; + } + if (setup != null) { + result += "[setup:" + setup + "]"; + } + if (teardown != null) { + result += "[teardown:" + teardown + "]"; + } + for (String warning : warnings) { + result += "[warning:" + warning + "]"; + } + if (skipShardsFailures) { + result += "[skip_shard_failures]"; + } + } + if (testResponse) { + result += "// TESTRESPONSE"; + if (skip != null) { + result += "[skip=" + skip + "]"; + } + } + if (testSetup) { + result += "// TESTSETUP"; + } + if (curl) { + result += "(curl)"; + } + return result; + } + + /** + * Is this snippet a candidate for conversion to `// CONSOLE`? + */ + boolean isConsoleCandidate() { + /* Snippets that are responses or already marked as `// CONSOLE` or + * `// NOTCONSOLE` are not candidates. */ + if (console != null || testResponse) { + return false; + } + /* js snippets almost always should be marked with `// CONSOLE`. js + * snippets that shouldn't be marked `// CONSOLE`, like examples for + * js client, should always be marked with `// NOTCONSOLE`. + * + * `sh` snippets that contain `curl` almost always should be marked + * with `// CONSOLE`. In the exceptionally rare cases where they are + * not communicating with Elasticsearch, like the examples in the ec2 + * and gce discovery plugins, the snippets should be marked + * `// NOTCONSOLE`. */ + return language.equals("js") || curl; + } + +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/SnippetParser.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/SnippetParser.java new file mode 100644 index 0000000000000..064c1c460febf --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/SnippetParser.java @@ -0,0 +1,17 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc; + +import java.io.File; +import java.util.List; +import java.util.Map; + +public interface SnippetParser { + List parseDoc(File rootDir, File docFile, List> substitutions); +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/Source.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/Source.java new file mode 100644 index 0000000000000..b7f2f01aa7987 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/Source.java @@ -0,0 +1,21 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc; + +public final class Source { + boolean matches; + String language; + String name; + + public Source(boolean matches, String language, String name) { + this.matches = matches; + this.language = language; + this.name = name; + } +} diff --git a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/AsciidocParserSpec.groovy b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/AsciidocParserSpec.groovy new file mode 100644 index 0000000000000..b7ac363ef7ad3 --- /dev/null +++ b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/AsciidocParserSpec.groovy @@ -0,0 +1,184 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc; + +import spock.lang.Specification +import spock.lang.Unroll + +import org.gradle.api.InvalidUserDataException + +import static org.elasticsearch.gradle.internal.doc.AsciidocSnippetParser.finalizeSnippet; +import static org.elasticsearch.gradle.internal.doc.AsciidocSnippetParser.matchSource; + +class AsciidocParserSpec extends Specification { + + def testMatchSource() { + expect: + with(matchSource("[source,console]")) { + matches == true + language == "console" + name == null + } + + with(matchSource("[source,console,id=snippet-name-1]")) { + matches == true + language == "console" + name == "snippet-name-1" + } + + with(matchSource("[source, console, id=snippet-name-1]")) { + matches == true + language == "console" + name == "snippet-name-1" + } + + with(matchSource("[source, console, id=snippet-name-1]")) { + matches == true + language == "console" + name == "snippet-name-1" + } + + with(matchSource("[source,console,attr=5,id=snippet-name-1,attr2=6]")) { + matches == true + language == "console" + name == "snippet-name-1" + } + + with(matchSource("[source,console, attr=5, id=snippet-name-1, attr2=6]")) { + matches == true + language == "console" + name == "snippet-name-1" + } + + with(matchSource("[\"source\",\"console\",id=\"snippet-name-1\"]")) { + matches == true + language == "console" + name == "snippet-name-1" + } + + with(matchSource("[source,console,id=\"snippet-name-1\"]")) { + matches == true + language == "console" + name == "snippet-name-1" + } + with(matchSource("[source.merge.styled,esql]")) { + matches == true + language == "esql" + } + + with(matchSource("[source.merge.styled,foo-bar]")) { + matches == true + language == "foo-bar" + } + } + + @Unroll + def "checks for valid json for #languageParam"() { + given: + def snippet = snippet() { + language = languageParam + testResponse = true + } + def json = """{ + "name": "John Doe", + "age": 30, + "isMarried": true, + "address": { + "street": "123 Main Street", + "city": "Springfield", + "state": "IL", + "zip": "62701" + }, + "hobbies": ["Reading", "Cooking", "Traveling"] +}""" + when: + def result = finalizeSnippet(snippet, json, [:], [:].entrySet()) + then: + result != null + + when: + finalizeSnippet(snippet, "some no valid json", [:], [:].entrySet()) + then: + def e = thrown(InvalidUserDataException) + e.message.contains("Invalid json in") + + when: + snippet.skip = "true" + result = finalizeSnippet(snippet, "some no valid json", [:], [:].entrySet()) + then: + result != null + + where: + languageParam << ["js", "console-result"] + } + + def "test finalized snippet handles substitutions"() { + given: + def snippet = snippet() { + language = "console" + } + when: + finalizeSnippet(snippet, "snippet-content substDefault subst", [substDefault: "\$body"], [subst: 'substValue'].entrySet()) + then: + snippet.contents == "snippet-content \$body substValue" + } + + def snippetMustHaveLanguage() { + given: + def snippet = snippet() + when: + finalizeSnippet(snippet, "snippet-content", [:], []) + then: + def e = thrown(InvalidUserDataException) + e.message.contains("Snippet missing a language.") + } + + def testEmit() { + given: + def snippet = snippet() { + language = "console" + } + when: + finalizeSnippet(snippet, "snippet-content", [:], []) + then: + snippet.contents == "snippet-content" + } + + def testSnippetsWithCurl() { + given: + def snippet = snippet() { + language = "sh" + name = "snippet-name-1" + } + when: + finalizeSnippet(snippet, "curl substDefault subst", [:], [:].entrySet()) + then: + snippet.curl == true + } + + def "test snippets with no curl no console"() { + given: + def snippet = snippet() { + console = false + language = "shell" + } + when: + finalizeSnippet(snippet, "hello substDefault subst", [:], [:].entrySet()) + then: + def e = thrown(InvalidUserDataException) + e.message.contains("No need for NOTCONSOLE if snippet doesn't contain `curl`") + } + + Snippet snippet(Closure configClosure = {}) { + def snippet = new Snippet(new File("SomePath").toPath(), 0, "snippet-name-1") + configClosure.delegate = snippet + configClosure() + return snippet + } +} diff --git a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/DocSnippetTaskSpec.groovy b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/DocSnippetTaskSpec.groovy new file mode 100644 index 0000000000000..89939645d0f9c --- /dev/null +++ b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/DocSnippetTaskSpec.groovy @@ -0,0 +1,676 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc + +import spock.lang.Specification +import spock.lang.TempDir + +import org.gradle.api.InvalidUserDataException +import org.gradle.testfixtures.ProjectBuilder + +class DocSnippetTaskSpec extends Specification { + + @TempDir + File tempDir + + def "handling test parsing multiple snippets per file"() { + given: + def project = ProjectBuilder.builder().build() + def task = project.tasks.register("docSnippetTask", DocSnippetTask).get() + when: + def substitutions = [] + def snippets = task.parseDocFile( + tempDir, docFile( + """ +[[mapper-annotated-text]] +=== Mapper annotated text plugin + +experimental[] + +The mapper-annotated-text plugin provides the ability to index text that is a +combination of free-text and special markup that is typically used to identify +items of interest such as people or organisations (see NER or Named Entity Recognition +tools). + + +The elasticsearch markup allows one or more additional tokens to be injected, unchanged, into the token +stream at the same position as the underlying text it annotates. + +:plugin_name: mapper-annotated-text +include::install_remove.asciidoc[] + +[[mapper-annotated-text-usage]] +==== Using the `annotated-text` field + +The `annotated-text` tokenizes text content as per the more common {ref}/text.html[`text`] field (see +"limitations" below) but also injects any marked-up annotation tokens directly into +the search index: + +[source,console] +-------------------------- +PUT my-index-000001 +{ + "mappings": { + "properties": { + "my_field": { + "type": "annotated_text" + } + } + } +} +-------------------------- + +Such a mapping would allow marked-up text eg wikipedia articles to be indexed as both text +and structured tokens. The annotations use a markdown-like syntax using URL encoding of +one or more values separated by the `&` symbol. + + +We can use the "_analyze" api to test how an example annotation would be stored as tokens +in the search index: + + +[source,js] +-------------------------- +GET my-index-000001/_analyze +{ + "field": "my_field", + "text":"Investors in [Apple](Apple+Inc.) rejoiced." +} +-------------------------- +// NOTCONSOLE + +Response: + +[source,js] +-------------------------------------------------- +{ + "tokens": [ + { + "token": "investors", + "start_offset": 0, + "end_offset": 9, + "type": "", + "position": 0 + }, + { + "token": "in", + "start_offset": 10, + "end_offset": 12, + "type": "", + "position": 1 + }, + { + "token": "Apple Inc.", <1> + "start_offset": 13, + "end_offset": 18, + "type": "annotation", + "position": 2 + }, + { + "token": "apple", + "start_offset": 13, + "end_offset": 18, + "type": "", + "position": 2 + }, + { + "token": "rejoiced", + "start_offset": 19, + "end_offset": 27, + "type": "", + "position": 3 + } + ] +} +-------------------------------------------------- +// NOTCONSOLE + +<1> Note the whole annotation token `Apple Inc.` is placed, unchanged as a single token in +the token stream and at the same position (position 2) as the text token (`apple`) it annotates. + + +We can now perform searches for annotations using regular `term` queries that don't tokenize +the provided search values. Annotations are a more precise way of matching as can be seen +in this example where a search for `Beck` will not match `Jeff Beck` : + +[source,console] +-------------------------- +# Example documents +PUT my-index-000001/_doc/1 +{ + "my_field": "[Beck](Beck) announced a new tour"<1> +} + +PUT my-index-000001/_doc/2 +{ + "my_field": "[Jeff Beck](Jeff+Beck&Guitarist) plays a strat"<2> +} + +# Example search +GET my-index-000001/_search +{ + "query": { + "term": { + "my_field": "Beck" <3> + } + } +} +-------------------------- + +<1> As well as tokenising the plain text into single words e.g. `beck`, here we +inject the single token value `Beck` at the same position as `beck` in the token stream. +<2> Note annotations can inject multiple tokens at the same position - here we inject both +the very specific value `Jeff Beck` and the broader term `Guitarist`. This enables +broader positional queries e.g. finding mentions of a `Guitarist` near to `strat`. +<3> A benefit of searching with these carefully defined annotation tokens is that a query for +`Beck` will not match document 2 that contains the tokens `jeff`, `beck` and `Jeff Beck` + +WARNING: Any use of `=` signs in annotation values eg `[Prince](person=Prince)` will +cause the document to be rejected with a parse failure. In future we hope to have a use for +the equals signs so wil actively reject documents that contain this today. + + +[[mapper-annotated-text-tips]] +==== Data modelling tips +===== Use structured and unstructured fields + +Annotations are normally a way of weaving structured information into unstructured text for +higher-precision search. + +`Entity resolution` is a form of document enrichment undertaken by specialist software or people +where references to entities in a document are disambiguated by attaching a canonical ID. +The ID is used to resolve any number of aliases or distinguish between people with the +same name. The hyperlinks connecting Wikipedia's articles are a good example of resolved +entity IDs woven into text. + +These IDs can be embedded as annotations in an annotated_text field but it often makes +sense to include them in dedicated structured fields to support discovery via aggregations: + +[source,console] +-------------------------- +PUT my-index-000001 +{ + "mappings": { + "properties": { + "my_unstructured_text_field": { + "type": "annotated_text" + }, + "my_structured_people_field": { + "type": "text", + "fields": { + "keyword" : { + "type": "keyword" + } + } + } + } + } +} +-------------------------- + +Applications would then typically provide content and discover it as follows: + +[source,console] +-------------------------- +# Example documents +PUT my-index-000001/_doc/1 +{ + "my_unstructured_text_field": "[Shay](%40kimchy) created elasticsearch", + "my_twitter_handles": ["@kimchy"] <1> +} + +GET my-index-000001/_search +{ + "query": { + "query_string": { + "query": "elasticsearch OR logstash OR kibana",<2> + "default_field": "my_unstructured_text_field" + } + }, + "aggregations": { + \t"top_people" :{ + \t "significant_terms" : { <3> +\t "field" : "my_twitter_handles.keyword" + \t } + \t} + } +} +-------------------------- + +<1> Note the `my_twitter_handles` contains a list of the annotation values +also used in the unstructured text. (Note the annotated_text syntax requires escaping). +By repeating the annotation values in a structured field this application has ensured that +the tokens discovered in the structured field can be used for search and highlighting +in the unstructured field. +<2> In this example we search for documents that talk about components of the elastic stack +<3> We use the `my_twitter_handles` field here to discover people who are significantly +associated with the elastic stack. + +===== Avoiding over-matching annotations +By design, the regular text tokens and the annotation tokens co-exist in the same indexed +field but in rare cases this can lead to some over-matching. + +The value of an annotation often denotes a _named entity_ (a person, place or company). +The tokens for these named entities are inserted untokenized, and differ from typical text +tokens because they are normally: + +* Mixed case e.g. `Madonna` +* Multiple words e.g. `Jeff Beck` +* Can have punctuation or numbers e.g. `Apple Inc.` or `@kimchy` + +This means, for the most part, a search for a named entity in the annotated text field will +not have any false positives e.g. when selecting `Apple Inc.` from an aggregation result +you can drill down to highlight uses in the text without "over matching" on any text tokens +like the word `apple` in this context: + + the apple was very juicy + +However, a problem arises if your named entity happens to be a single term and lower-case e.g. the +company `elastic`. In this case, a search on the annotated text field for the token `elastic` +may match a text document such as this: + + they fired an elastic band + +To avoid such false matches users should consider prefixing annotation values to ensure +they don't name clash with text tokens e.g. + + [elastic](Company_elastic) released version 7.0 of the elastic stack today + + + + +[[mapper-annotated-text-highlighter]] +==== Using the `annotated` highlighter + +The `annotated-text` plugin includes a custom highlighter designed to mark up search hits +in a way which is respectful of the original markup: + +[source,console] +-------------------------- +# Example documents +PUT my-index-000001/_doc/1 +{ + "my_field": "The cat sat on the [mat](sku3578)" +} + +GET my-index-000001/_search +{ + "query": { + "query_string": { + "query": "cats" + } + }, + "highlight": { + "fields": { + "my_field": { + "type": "annotated", <1> + "require_field_match": false + } + } + } +} +-------------------------- + +<1> The `annotated` highlighter type is designed for use with annotated_text fields + +The annotated highlighter is based on the `unified` highlighter and supports the same +settings but does not use the `pre_tags` or `post_tags` parameters. Rather than using +html-like markup such as `cat` the annotated highlighter uses the same +markdown-like syntax used for annotations and injects a key=value annotation where `_hit_term` +is the key and the matched search term is the value e.g. + + The [cat](_hit_term=cat) sat on the [mat](sku3578) + +The annotated highlighter tries to be respectful of any existing markup in the original +text: + +* If the search term matches exactly the location of an existing annotation then the +`_hit_term` key is merged into the url-like syntax used in the `(...)` part of the +existing annotation. +* However, if the search term overlaps the span of an existing annotation it would break +the markup formatting so the original annotation is removed in favour of a new annotation +with just the search hit information in the results. +* Any non-overlapping annotations in the original text are preserved in highlighter +selections + + +[[mapper-annotated-text-limitations]] +==== Limitations + +The annotated_text field type supports the same mapping settings as the `text` field type +but with the following exceptions: + +* No support for `fielddata` or `fielddata_frequency_filter` +* No support for `index_prefixes` or `index_phrases` indexing + +""" + ), substitutions + ) + then: + snippets*.test == [false, false, false, false, false, false, false] + snippets*.catchPart == [null, null, null, null, null, null, null] + } + + def "handling test parsing"() { + when: + def substitutions = [] + def snippets = task().parseDocFile( + tempDir, docFile( + """ +[source,console] +---- +POST logs-my_app-default/_rollover/ +---- +// TEST[s/_explain\\/1/_explain\\/1?error_trace=false/ catch:/painless_explain_error/] +""" + ), substitutions + ) + then: + snippets*.test == [true] + snippets*.catchPart == ["/painless_explain_error/"] + substitutions.size() == 1 + substitutions[0].key == "_explain\\/1" + substitutions[0].value == "_explain\\/1?error_trace=false" + + when: + substitutions = [] + snippets = task().parseDocFile( + tempDir, docFile( + """ + +[source,console] +---- +PUT _snapshot/my_hdfs_repository +{ + "type": "hdfs", + "settings": { + "uri": "hdfs://namenode:8020/", + "path": "elasticsearch/repositories/my_hdfs_repository", + "conf.dfs.client.read.shortcircuit": "true" + } +} +---- +// TEST[skip:we don't have hdfs set up while testing this] +""" + ), substitutions + ) + then: + snippets*.test == [true] + snippets*.skip == ["we don't have hdfs set up while testing this"] + } + + def "handling testresponse parsing"() { + when: + def substitutions = [] + def snippets = task().parseDocFile( + tempDir, docFile( + """ +[source,console] +---- +POST logs-my_app-default/_rollover/ +---- +// TESTRESPONSE[s/\\.\\.\\./"script_stack": \$body.error.caused_by.script_stack, "script": \$body.error.caused_by.script, "lang": \$body.error.caused_by.lang, "position": \$body.error.caused_by.position, "caused_by": \$body.error.caused_by.caused_by, "reason": \$body.error.caused_by.reason/] +""" + ), substitutions + ) + then: + snippets*.test == [false] + snippets*.testResponse == [true] + substitutions.size() == 1 + substitutions[0].key == "\\.\\.\\." + substitutions[0].value == + "\"script_stack\": \$body.error.caused_by.script_stack, \"script\": \$body.error.caused_by.script, \"lang\": \$body.error.caused_by.lang, \"position\": \$body.error.caused_by.position, \"caused_by\": \$body.error.caused_by.caused_by, \"reason\": \$body.error.caused_by.reason" + + when: + snippets = task().parseDocFile( + tempDir, docFile( + """ +[source,console] +---- +POST logs-my_app-default/_rollover/ +---- +// TESTRESPONSE[skip:no setup made for this example yet] +""" + ), [] + ) + then: + snippets*.test == [false] + snippets*.testResponse == [true] + snippets*.skip == ["no setup made for this example yet"] + + when: + substitutions = [] + snippets = task().parseDocFile( + tempDir, docFile( + """ +[source,txt] +--------------------------------------------------------------------------- +my-index-000001 0 p RELOCATING 3014 31.1mb 192.168.56.10 H5dfFeA -> -> 192.168.56.30 bGG90GE +--------------------------------------------------------------------------- +// TESTRESPONSE[non_json] +""" + ), substitutions + ) + then: + snippets*.test == [false] + snippets*.testResponse == [true] + substitutions.size() == 4 + } + + + def "handling console parsing"() { + when: + def snippets = task().parseDocFile( + tempDir, docFile( + """ +[source,console] +---- + +// $firstToken +---- +""" + ), [] + ) + then: + snippets*.console == [firstToken.equals("CONSOLE")] + + + when: + task().parseDocFile( + tempDir, docFile( + """ +[source,console] +---- +// $firstToken +// $secondToken +---- +""" + ), [] + ) + then: + def e = thrown(InvalidUserDataException) + e.message == "mapping-charfilter.asciidoc:4: Can't be both CONSOLE and NOTCONSOLE" + + when: + task().parseDocFile( + tempDir, docFile( + """ +// $firstToken +// $secondToken +""" + ), [] + ) + then: + e = thrown(InvalidUserDataException) + e.message == "mapping-charfilter.asciidoc:1: $firstToken not paired with a snippet" + + where: + firstToken << ["CONSOLE", "NOTCONSOLE"] + secondToken << ["NOTCONSOLE", "CONSOLE"] + } + + def "test parsing snippet from doc"() { + def doc = docFile( + """ +[source,console] +---- +GET /_analyze +{ + "tokenizer": "keyword", + "char_filter": [ + { + "type": "mapping", + "mappings": [ + "٠ => 0", + "١ => 1", + "٢ => 2" + ] + } + ], + "text": "My license plate is ٢٥٠١٥" +} +---- +""" + ) + def snippets = task().parseDocFile(tempDir, doc, []) + expect: + snippets*.start == [3] + snippets*.language == ["console"] + snippets*.contents == ["""GET /_analyze +{ + "tokenizer": "keyword", + "char_filter": [ + { + "type": "mapping", + "mappings": [ + "٠ => 0", + "١ => 1", + "٢ => 2" + ] + } + ], + "text": "My license plate is ٢٥٠١٥" +} +"""] + } + + def "test parsing snippet from doc2"() { + given: + def doc = docFile( + """ +[role="xpack"] +[[ml-update-snapshot]] += Update model snapshots API +++++ +Update model snapshots +++++ + +Updates certain properties of a snapshot. + +[[ml-update-snapshot-request]] +== {api-request-title} + +`POST _ml/anomaly_detectors//model_snapshots//_update` + +[[ml-update-snapshot-prereqs]] +== {api-prereq-title} + +Requires the `manage_ml` cluster privilege. This privilege is included in the +`machine_learning_admin` built-in role. + +[[ml-update-snapshot-path-parms]] +== {api-path-parms-title} + +``:: +(Required, string) +include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] + +``:: +(Required, string) +include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=snapshot-id] + +[[ml-update-snapshot-request-body]] +== {api-request-body-title} + +The following properties can be updated after the model snapshot is created: + +`description`:: +(Optional, string) A description of the model snapshot. + +`retain`:: +(Optional, Boolean) +include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=retain] + + +[[ml-update-snapshot-example]] +== {api-examples-title} + +[source,console] +-------------------------------------------------- +POST +_ml/anomaly_detectors/it_ops_new_logs/model_snapshots/1491852978/_update +{ + "description": "Snapshot 1", + "retain": true +} +-------------------------------------------------- +// TEST[skip:todo] + +When the snapshot is updated, you receive the following results: +[source,js] +---- +{ + "acknowledged": true, + "model": { + "job_id": "it_ops_new_logs", + "timestamp": 1491852978000, + "description": "Snapshot 1", +... + "retain": true + } +} +---- +""" + ) + def snippets = task().parseDocFile(tempDir, doc, []) + expect: + snippets*.start == [50, 62] + snippets*.language == ["console", "js"] + snippets*.contents == ["""POST +_ml/anomaly_detectors/it_ops_new_logs/model_snapshots/1491852978/_update +{ + "description": "Snapshot 1", + "retain": true +} +""", """{ + "acknowledged": true, + "model": { + "job_id": "it_ops_new_logs", + "timestamp": 1491852978000, + "description": "Snapshot 1", +... + "retain": true + } +} +"""] + } + + + File docFile(String docContent) { + def file = tempDir.toPath().resolve("mapping-charfilter.asciidoc").toFile() + file.text = docContent + return file + } + + + private DocSnippetTask task() { + ProjectBuilder.builder().build().tasks.register("docSnippetTask", DocSnippetTask).get() + } + +} diff --git a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/RestTestsFromDocSnippetTaskSpec.groovy b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/RestTestsFromDocSnippetTaskSpec.groovy new file mode 100644 index 0000000000000..6ef4726e1578a --- /dev/null +++ b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/RestTestsFromDocSnippetTaskSpec.groovy @@ -0,0 +1,839 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc + +import spock.lang.Specification +import spock.lang.TempDir + +import org.gradle.api.InvalidUserDataException +import org.gradle.testfixtures.ProjectBuilder + +import static org.elasticsearch.gradle.internal.doc.RestTestsFromDocSnippetTask.replaceBlockQuote +import static org.elasticsearch.gradle.internal.doc.RestTestsFromDocSnippetTask.shouldAddShardFailureCheck + +class RestTestsFromDocSnippetTaskSpec extends Specification { + + @TempDir + File tempDir; + + def "test simple block quote"() { + expect: + replaceBlockQuote("\"foo\": \"\"\"bort baz\"\"\"") == "\"foo\": \"bort baz\"" + } + + def "test multiple block quotes"() { + expect: + replaceBlockQuote("\"foo\": \"\"\"bort baz\"\"\", \"bar\": \"\"\"other\"\"\"") == "\"foo\": \"bort baz\", \"bar\": \"other\"" + } + + def "test escaping in block quote"() { + expect: + replaceBlockQuote("\"foo\": \"\"\"bort\" baz\"\"\"") == "\"foo\": \"bort\\\" baz\"" + replaceBlockQuote("\"foo\": \"\"\"bort\n baz\"\"\"") == "\"foo\": \"bort\\n baz\"" + } + + def "test invalid block quotes"() { + given: + String input = "\"foo\": \"\"\"bar\""; + when: + RestTestsFromDocSnippetTask.replaceBlockQuote(input); + then: + def e = thrown(InvalidUserDataException) + e.message == "Invalid block quote starting at 7 in:\n" + input + } + + def "test is doc write request"() { + expect: + shouldAddShardFailureCheck("doc-index/_search") == true + shouldAddShardFailureCheck("_cat") == false + shouldAddShardFailureCheck("_ml/datafeeds/datafeed-id/_preview") == false + } + + def "can create rest tests from docs"() { + def build = ProjectBuilder.builder().build() + given: + def task = build.tasks.create("restTestFromSnippet", RestTestsFromDocSnippetTask) +// def task = build.tasks.create("restTestFromSnippet", RestTestsFromSnippetsTask) + task.expectedUnconvertedCandidates = ["ml-update-snapshot.asciidoc", "reference/security/authorization/run-as-privilege.asciidoc"] +// + docs() + task.docs = build.fileTree(new File(tempDir, "docs")) + task.testRoot.convention(build.getLayout().buildDirectory.dir("rest-tests")); + + when: + task.getActions().forEach { it.execute(task) } + def restSpec = new File(task.getTestRoot().get().getAsFile(), "rest-api-spec/test/painless-debugging.yml") + + then: + restSpec.exists() + restSpec.text == """--- +"line_22": + - skip: + features: + - default_shards + - stash_in_key + - stash_in_path + - stash_path_replace + - warnings + - do: + raw: + method: PUT + path: "hockey/_doc/1" + refresh: "" + body: | + {"first":"johnny","last":"gaudreau","goals":[9,27,1],"assists":[17,46,0],"gp":[26,82,1]} + - is_false: _shards.failures + - do: + catch: /painless_explain_error/ + raw: + method: POST + path: "hockey/_explain/1" + error_trace: "false" + body: | + { + "query": { + "script": { + "script": "Debug.explain(doc.goals)" + } + } + } + - is_false: _shards.failures + - match: + \$body: + { + "error": { + "type": "script_exception", + "to_string": "[1, 9, 27]", + "painless_class": "org.elasticsearch.index.fielddata.ScriptDocValues.Longs", + "java_class": "org.elasticsearch.index.fielddata.ScriptDocValues\$Longs", + "script_stack": \$body.error.script_stack, "script": \$body.error.script, "lang": \$body.error.lang, "position": \$body.error.position, "caused_by": \$body.error.caused_by, "root_cause": \$body.error.root_cause, "reason": \$body.error.reason + }, + "status": 400 + } + - do: + catch: /painless_explain_error/ + raw: + method: POST + path: "hockey/_update/1" + error_trace: "false" + body: | + { + "script": "Debug.explain(ctx._source)" + } + - is_false: _shards.failures + - match: + \$body: + { + "error" : { + "root_cause": \$body.error.root_cause, + "type": "illegal_argument_exception", + "reason": "failed to execute script", + "caused_by": { + "type": "script_exception", + "to_string": \$body.error.caused_by.to_string, + "painless_class": "java.util.LinkedHashMap", + "java_class": "java.util.LinkedHashMap", + "script_stack": \$body.error.caused_by.script_stack, "script": \$body.error.caused_by.script, "lang": \$body.error.caused_by.lang, "position": \$body.error.caused_by.position, "caused_by": \$body.error.caused_by.caused_by, "reason": \$body.error.caused_by.reason + } + }, + "status": 400 + } +""" + def restSpec2 = new File(task.testRoot.get().getAsFile(), "rest-api-spec/test/ml-update-snapshot.yml") + restSpec2.exists() + restSpec2.text == """--- +"line_50": + - skip: + features: + - default_shards + - stash_in_key + - stash_in_path + - stash_path_replace + - warnings + - always_skip + reason: todo + - do: + raw: + method: POST + path: "_ml/anomaly_detectors/it_ops_new_logs/model_snapshots/1491852978/_update" + body: | + { + "description": "Snapshot 1", + "retain": true + } + - is_false: _shards.failures +""" + def restSpec3 = new File(task.testRoot.get().getAsFile(), "rest-api-spec/test/reference/sql/getting-started.yml") + restSpec3.exists() + restSpec3.text == """--- +"line_10": + - skip: + features: + - default_shards + - stash_in_key + - stash_in_path + - stash_path_replace + - warnings + - do: + raw: + method: PUT + path: "library/_bulk" + refresh: "" + body: | + {"index":{"_id": "Leviathan Wakes"}} + {"name": "Leviathan Wakes", "author": "James S.A. Corey", "release_date": "2011-06-02", "page_count": 561} + {"index":{"_id": "Hyperion"}} + {"name": "Hyperion", "author": "Dan Simmons", "release_date": "1989-05-26", "page_count": 482} + {"index":{"_id": "Dune"}} + {"name": "Dune", "author": "Frank Herbert", "release_date": "1965-06-01", "page_count": 604} + - is_false: _shards.failures + - do: + raw: + method: POST + path: "_sql" + format: "txt" + body: | + { + "query": "SELECT * FROM library WHERE release_date < '2000-01-01'" + } + - is_false: _shards.failures + - match: + \$body: + / \\s+author \\s+\\| \\s+name \\s+\\| \\s+page_count \\s+\\| \\s+release_date\\s* + ---------------\\+---------------\\+---------------\\+------------------------\\s* + Dan \\s+Simmons \\s+\\|Hyperion \\s+\\|482 \\s+\\|1989-05-26T00:00:00.000Z\\s* + Frank \\s+Herbert \\s+\\|Dune \\s+\\|604 \\s+\\|1965-06-01T00:00:00.000Z\\s*/ +""" + + def restSpec4 = new File(task.testRoot.get().getAsFile(), "rest-api-spec/test/reference/security/authorization/run-as-privilege.yml") + restSpec4.exists() + restSpec4.text == """--- +"line_51": + - skip: + features: + - default_shards + - stash_in_key + - stash_in_path + - stash_path_replace + - warnings + - do: + raw: + method: POST + path: "_security/role/my_director" + refresh: "true" + body: | + { + "cluster": ["manage"], + "indices": [ + { + "names": [ "index1", "index2" ], + "privileges": [ "manage" ] + } + ], + "run_as": [ "jacknich", "rdeniro" ], + "metadata" : { + "version" : 1 + } + } + - is_false: _shards.failures +--- +"line_114": + - skip: + features: + - default_shards + - stash_in_key + - stash_in_path + - stash_path_replace + - warnings + - do: + raw: + method: POST + path: "_security/role/my_admin_role" + refresh: "true" + body: | + { + "cluster": ["manage"], + "indices": [ + { + "names": [ "index1", "index2" ], + "privileges": [ "manage" ] + } + ], + "applications": [ + { + "application": "myapp", + "privileges": [ "admin", "read" ], + "resources": [ "*" ] + } + ], + "run_as": [ "analyst_user" ], + "metadata" : { + "version" : 1 + } + } + - is_false: _shards.failures +--- +"line_143": + - skip: + features: + - default_shards + - stash_in_key + - stash_in_path + - stash_path_replace + - warnings + - do: + raw: + method: POST + path: "_security/role/my_analyst_role" + refresh: "true" + body: | + { + "cluster": [ "monitor"], + "indices": [ + { + "names": [ "index1", "index2" ], + "privileges": ["manage"] + } + ], + "applications": [ + { + "application": "myapp", + "privileges": [ "read" ], + "resources": [ "*" ] + } + ], + "metadata" : { + "version" : 1 + } + } + - is_false: _shards.failures +--- +"line_170": + - skip: + features: + - default_shards + - stash_in_key + - stash_in_path + - stash_path_replace + - warnings + - do: + raw: + method: POST + path: "_security/user/admin_user" + refresh: "true" + body: | + { + "password": "l0ng-r4nd0m-p@ssw0rd", + "roles": [ "my_admin_role" ], + "full_name": "Eirian Zola", + "metadata": { "intelligence" : 7} + } + - is_false: _shards.failures +--- +"line_184": + - skip: + features: + - default_shards + - stash_in_key + - stash_in_path + - stash_path_replace + - warnings + - do: + raw: + method: POST + path: "_security/user/analyst_user" + refresh: "true" + body: | + { + "password": "l0nger-r4nd0mer-p@ssw0rd", + "roles": [ "my_analyst_role" ], + "full_name": "Monday Jaffe", + "metadata": { "innovation" : 8} + } + - is_false: _shards.failures +""" +} + + File docFile(String fileName, String docContent) { + def file = tempDir.toPath().resolve(fileName).toFile() + file.parentFile.mkdirs() + file.text = docContent + return file + } + + + void docs() { + docFile( + "docs/reference/sql/getting-started.asciidoc", """ +[role="xpack"] +[[sql-getting-started]] +== Getting Started with SQL + +To start using {es-sql}, create +an index with some data to experiment with: + +[source,console] +-------------------------------------------------- +PUT /library/_bulk?refresh +{"index":{"_id": "Leviathan Wakes"}} +{"name": "Leviathan Wakes", "author": "James S.A. Corey", "release_date": "2011-06-02", "page_count": 561} +{"index":{"_id": "Hyperion"}} +{"name": "Hyperion", "author": "Dan Simmons", "release_date": "1989-05-26", "page_count": 482} +{"index":{"_id": "Dune"}} +{"name": "Dune", "author": "Frank Herbert", "release_date": "1965-06-01", "page_count": 604} +-------------------------------------------------- + +And now you can execute SQL using the <>: + +[source,console] +-------------------------------------------------- +POST /_sql?format=txt +{ + "query": "SELECT * FROM library WHERE release_date < '2000-01-01'" +} +-------------------------------------------------- +// TEST[continued] + +Which should return something along the lines of: + +[source,text] +-------------------------------------------------- + author | name | page_count | release_date +---------------+---------------+---------------+------------------------ +Dan Simmons |Hyperion |482 |1989-05-26T00:00:00.000Z +Frank Herbert |Dune |604 |1965-06-01T00:00:00.000Z +-------------------------------------------------- +// TESTRESPONSE[s/\\|/\\\\|/ s/\\+/\\\\+/] +// TESTRESPONSE[non_json] + +You can also use the <>. There is a script to start it +shipped in x-pack's bin directory: + +[source,bash] +-------------------------------------------------- +\$ ./bin/elasticsearch-sql-cli +-------------------------------------------------- + +From there you can run the same query: + +[source,sqlcli] +-------------------------------------------------- +sql> SELECT * FROM library WHERE release_date < '2000-01-01'; + author | name | page_count | release_date +---------------+---------------+---------------+------------------------ +Dan Simmons |Hyperion |482 |1989-05-26T00:00:00.000Z +Frank Herbert |Dune |604 |1965-06-01T00:00:00.000Z +-------------------------------------------------- +""" + ) + docFile( + "docs/ml-update-snapshot.asciidoc", + """ +[role="xpack"] +[[ml-update-snapshot]] += Update model snapshots API +++++ +Update model snapshots +++++ + +Updates certain properties of a snapshot. + +[[ml-update-snapshot-request]] +== {api-request-title} + +`POST _ml/anomaly_detectors//model_snapshots//_update` + +[[ml-update-snapshot-prereqs]] +== {api-prereq-title} + +Requires the `manage_ml` cluster privilege. This privilege is included in the +`machine_learning_admin` built-in role. + +[[ml-update-snapshot-path-parms]] +== {api-path-parms-title} + +``:: +(Required, string) +include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] + +``:: +(Required, string) +include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=snapshot-id] + +[[ml-update-snapshot-request-body]] +== {api-request-body-title} + +The following properties can be updated after the model snapshot is created: + +`description`:: +(Optional, string) A description of the model snapshot. + +`retain`:: +(Optional, Boolean) +include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=retain] + + +[[ml-update-snapshot-example]] +== {api-examples-title} + +[source,console] +-------------------------------------------------- +POST +_ml/anomaly_detectors/it_ops_new_logs/model_snapshots/1491852978/_update +{ + "description": "Snapshot 1", + "retain": true +} +-------------------------------------------------- +// TEST[skip:todo] + +When the snapshot is updated, you receive the following results: +[source,js] +---- +{ + "acknowledged": true, + "model": { + "job_id": "it_ops_new_logs", + "timestamp": 1491852978000, + "description": "Snapshot 1", +... + "retain": true + } +} +---- + +""" + ) + + docFile( + "docs/painless-debugging.asciidoc", + """ + +[[painless-debugging]] +=== Painless Debugging + +==== Debug.Explain + +Painless doesn't have a +{wikipedia}/Read%E2%80%93eval%E2%80%93print_loop[REPL] +and while it'd be nice for it to have one day, it wouldn't tell you the +whole story around debugging painless scripts embedded in Elasticsearch because +the data that the scripts have access to or "context" is so important. For now +the best way to debug embedded scripts is by throwing exceptions at choice +places. While you can throw your own exceptions +(`throw new Exception('whatever')`), Painless's sandbox prevents you from +accessing useful information like the type of an object. So Painless has a +utility method, `Debug.explain` which throws the exception for you. For +example, you can use {ref}/search-explain.html[`_explain`] to explore the +context available to a {ref}/query-dsl-script-query.html[script query]. + +[source,console] +--------------------------------------------------------- +PUT /hockey/_doc/1?refresh +{"first":"johnny","last":"gaudreau","goals":[9,27,1],"assists":[17,46,0],"gp":[26,82,1]} + +POST /hockey/_explain/1 +{ + "query": { + "script": { + "script": "Debug.explain(doc.goals)" + } + } +} +--------------------------------------------------------- +// TEST[s/_explain\\/1/_explain\\/1?error_trace=false/ catch:/painless_explain_error/] +// The test system sends error_trace=true by default for easier debugging so +// we have to override it to get a normal shaped response + +Which shows that the class of `doc.first` is +`org.elasticsearch.index.fielddata.ScriptDocValues.Longs` by responding with: + +[source,console-result] +--------------------------------------------------------- +{ + "error": { + "type": "script_exception", + "to_string": "[1, 9, 27]", + "painless_class": "org.elasticsearch.index.fielddata.ScriptDocValues.Longs", + "java_class": "org.elasticsearch.index.fielddata.ScriptDocValues\$Longs", + ... + }, + "status": 400 +} +--------------------------------------------------------- +// TESTRESPONSE[s/\\.\\.\\./"script_stack": \$body.error.script_stack, "script": \$body.error.script, "lang": \$body.error.lang, "position": \$body.error.position, "caused_by": \$body.error.caused_by, "root_cause": \$body.error.root_cause, "reason": \$body.error.reason/] + +You can use the same trick to see that `_source` is a `LinkedHashMap` +in the `_update` API: + +[source,console] +--------------------------------------------------------- +POST /hockey/_update/1 +{ + "script": "Debug.explain(ctx._source)" +} +--------------------------------------------------------- +// TEST[continued s/_update\\/1/_update\\/1?error_trace=false/ catch:/painless_explain_error/] + +The response looks like: + +[source,console-result] +--------------------------------------------------------- +{ + "error" : { + "root_cause": ..., + "type": "illegal_argument_exception", + "reason": "failed to execute script", + "caused_by": { + "type": "script_exception", + "to_string": "{gp=[26, 82, 1], last=gaudreau, assists=[17, 46, 0], first=johnny, goals=[9, 27, 1]}", + "painless_class": "java.util.LinkedHashMap", + "java_class": "java.util.LinkedHashMap", + ... + } + }, + "status": 400 +} +--------------------------------------------------------- +// TESTRESPONSE[s/"root_cause": \\.\\.\\./"root_cause": \$body.error.root_cause/] +// TESTRESPONSE[s/\\.\\.\\./"script_stack": \$body.error.caused_by.script_stack, "script": \$body.error.caused_by.script, "lang": \$body.error.caused_by.lang, "position": \$body.error.caused_by.position, "caused_by": \$body.error.caused_by.caused_by, "reason": \$body.error.caused_by.reason/] +// TESTRESPONSE[s/"to_string": ".+"/"to_string": \$body.error.caused_by.to_string/] + +Once you have a class you can go to <> to see a list of +available methods. + +""" + ) + docFile( + "docs/reference/security/authorization/run-as-privilege.asciidoc", + """[role="xpack"] +[[run-as-privilege]] += Submitting requests on behalf of other users + +{es} roles support a `run_as` privilege that enables an authenticated user to +submit requests on behalf of other users. For example, if your external +application is trusted to authenticate users, {es} can authenticate the external +application and use the _run as_ mechanism to issue authorized requests as +other users without having to re-authenticate each user. + +To "run as" (impersonate) another user, the first user (the authenticating user) +must be authenticated by a mechanism that supports run-as delegation. The second +user (the `run_as` user) must be authorized by a mechanism that supports +delegated run-as lookups by username. + +The `run_as` privilege essentially operates like a secondary form of +<>. Delegated authorization applies +to the authenticating user, and the `run_as` privilege applies to the user who +is being impersonated. + +Authenticating user:: +-- +For the authenticating user, the following realms (plus API keys) all support +`run_as` delegation: `native`, `file`, Active Directory, JWT, Kerberos, LDAP and +PKI. + +Service tokens, the {es} Token Service, SAML 2.0, and OIDC 1.0 do not +support `run_as` delegation. +-- + +`run_as` user:: +-- +{es} supports `run_as` for any realm that supports user lookup. +Not all realms support user lookup. Refer to the list of <> +and ensure that the realm you wish to use is configured in a manner that +supports user lookup. + +The `run_as` user must be retrieved from a <> - it is not +possible to run as a +<>, +<> or +<>. +-- + +To submit requests on behalf of other users, you need to have the `run_as` +privilege in your <>. For example, the following request +creates a `my_director` role that grants permission to submit request on behalf +of `jacknich` or `redeniro`: + +[source,console] +---- +POST /_security/role/my_director?refresh=true +{ + "cluster": ["manage"], + "indices": [ + { + "names": [ "index1", "index2" ], + "privileges": [ "manage" ] + } + ], + "run_as": [ "jacknich", "rdeniro" ], + "metadata" : { + "version" : 1 + } +} +---- + +To submit a request as another user, you specify the user in the +`es-security-runas-user` request header. For example: + +[source,sh] +---- +curl -H "es-security-runas-user: jacknich" -u es-admin -X GET http://localhost:9200/ +---- + +The `run_as` user passed in through the `es-security-runas-user` header must be +available from a realm that supports delegated authorization lookup by username. +Realms that don't support user lookup can't be used by `run_as` delegation from +other realms. + +For example, JWT realms can authenticate external users specified in JWTs, and +execute requests as a `run_as` user in the `native` realm. {es} will retrieve the +indicated `runas` user and execute the request as that user using their roles. + +[[run-as-privilege-apply]] +== Apply the `run_as` privilege to roles +You can apply the `run_as` privilege when creating roles with the +<>. Users who are assigned +a role that contains the `run_as` privilege inherit all privileges from their +role, and can also submit requests on behalf of the indicated users. + +NOTE: Roles for the authenticated user and the `run_as` user are not merged. If +a user authenticates without specifying the `run_as` parameter, only the +authenticated user's roles are used. If a user authenticates and their roles +include the `run_as` parameter, only the `run_as` user's roles are used. + +After a user successfully authenticates to {es}, an authorization process determines whether the user behind an incoming request is allowed to run +that request. If the authenticated user has the `run_as` privilege in their list +of permissions and specifies the run-as header, {es} _discards_ the authenticated +user and associated roles. It then looks in each of the configured realms in the +realm chain until it finds the username that's associated with the `run_as` user, +and uses those roles to execute any requests. + +Consider an admin role and an analyst role. The admin role has higher privileges, +but might also want to submit requests as another user to test and verify their +permissions. + +First, we'll create an admin role named `my_admin_role`. This role has `manage` +<> on the entire cluster, and on a subset of +indices. This role also contains the `run_as` privilege, which enables any user +with this role to submit requests on behalf of the specified `analyst_user`. + +[source,console] +---- +POST /_security/role/my_admin_role?refresh=true +{ + "cluster": ["manage"], + "indices": [ + { + "names": [ "index1", "index2" ], + "privileges": [ "manage" ] + } + ], + "applications": [ + { + "application": "myapp", + "privileges": [ "admin", "read" ], + "resources": [ "*" ] + } + ], + "run_as": [ "analyst_user" ], + "metadata" : { + "version" : 1 + } +} +---- + +Next, we'll create an analyst role named `my_analyst_role`, which has more +restricted `monitor` cluster privileges and `manage` privileges on a subset of +indices. + +[source,console] +---- +POST /_security/role/my_analyst_role?refresh=true +{ + "cluster": [ "monitor"], + "indices": [ + { + "names": [ "index1", "index2" ], + "privileges": ["manage"] + } + ], + "applications": [ + { + "application": "myapp", + "privileges": [ "read" ], + "resources": [ "*" ] + } + ], + "metadata" : { + "version" : 1 + } +} +---- + +We'll create an administrator user and assign them the role named `my_admin_role`, +which allows this user to submit requests as the `analyst_user`. + +[source,console] +---- +POST /_security/user/admin_user?refresh=true +{ + "password": "l0ng-r4nd0m-p@ssw0rd", + "roles": [ "my_admin_role" ], + "full_name": "Eirian Zola", + "metadata": { "intelligence" : 7} +} +---- + +We can also create an analyst user and assign them the role named +`my_analyst_role`. + +[source,console] +---- +POST /_security/user/analyst_user?refresh=true +{ + "password": "l0nger-r4nd0mer-p@ssw0rd", + "roles": [ "my_analyst_role" ], + "full_name": "Monday Jaffe", + "metadata": { "innovation" : 8} +} +---- + +You can then authenticate to {es} as the `admin_user` or `analyst_user`. However, the `admin_user` could optionally submit requests on +behalf of the `analyst_user`. The following request authenticates to {es} with a +`Basic` authorization token and submits the request as the `analyst_user`: + +[source,sh] +---- +curl -s -X GET -H "Authorization: Basic YWRtaW5fdXNlcjpsMG5nLXI0bmQwbS1wQHNzdzByZA==" -H "es-security-runas-user: analyst_user" https://localhost:9200/_security/_authenticate +---- + +The response indicates that the `analyst_user` submitted this request, using the +`my_analyst_role` that's assigned to that user. When the `admin_user` submitted +the request, {es} authenticated that user, discarded their roles, and then used +the roles of the `run_as` user. + +[source,sh] +---- +{"username":"analyst_user","roles":["my_analyst_role"],"full_name":"Monday Jaffe","email":null, +"metadata":{"innovation":8},"enabled":true,"authentication_realm":{"name":"native", +"type":"native"},"lookup_realm":{"name":"native","type":"native"},"authentication_type":"realm"} +% +---- + +The `authentication_realm` and `lookup_realm` in the response both specify +the `native` realm because both the `admin_user` and `analyst_user` are from +that realm. If the two users are in different realms, the values for +`authentication_realm` and `lookup_realm` are different (such as `pki` and +`native`). +""" + ) + + } +} diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/doc/RestTestFromSnippetsTaskTests.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/doc/RestTestFromSnippetsTaskTests.java deleted file mode 100644 index 534134e78d40b..0000000000000 --- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/doc/RestTestFromSnippetsTaskTests.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.gradle.internal.doc; - -import org.gradle.api.InvalidUserDataException; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; - -import static org.elasticsearch.gradle.internal.doc.RestTestsFromSnippetsTask.replaceBlockQuote; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -public class RestTestFromSnippetsTaskTests { - @Rule - public ExpectedException expectedEx = ExpectedException.none(); - - @Test - public void testInvalidBlockQuote() { - String input = "\"foo\": \"\"\"bar\""; - expectedEx.expect(InvalidUserDataException.class); - expectedEx.expectMessage("Invalid block quote starting at 7 in:\n" + input); - replaceBlockQuote(input); - } - - @Test - public void testSimpleBlockQuote() { - assertEquals("\"foo\": \"bort baz\"", replaceBlockQuote("\"foo\": \"\"\"bort baz\"\"\"")); - } - - @Test - public void testMultipleBlockQuotes() { - assertEquals( - "\"foo\": \"bort baz\", \"bar\": \"other\"", - replaceBlockQuote("\"foo\": \"\"\"bort baz\"\"\", \"bar\": \"\"\"other\"\"\"") - ); - } - - @Test - public void testEscapingInBlockQuote() { - assertEquals("\"foo\": \"bort\\\" baz\"", replaceBlockQuote("\"foo\": \"\"\"bort\" baz\"\"\"")); - assertEquals("\"foo\": \"bort\\n baz\"", replaceBlockQuote("\"foo\": \"\"\"bort\n baz\"\"\"")); - } - - @Test - public void testIsDocWriteRequest() { - assertTrue((boolean) RestTestsFromSnippetsTask.shouldAddShardFailureCheck("doc-index/_search")); - assertFalse((boolean) RestTestsFromSnippetsTask.shouldAddShardFailureCheck("_cat")); - assertFalse((boolean) RestTestsFromSnippetsTask.shouldAddShardFailureCheck("_ml/datafeeds/datafeed-id/_preview")); - } -} diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/doc/SnippetsTaskTests.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/doc/SnippetsTaskTests.java deleted file mode 100644 index 0acae6ca03297..0000000000000 --- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/doc/SnippetsTaskTests.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.gradle.internal.doc; - -import org.junit.Test; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; - -public class SnippetsTaskTests { - - @Test - public void testMatchSource() { - SnippetsTask.Source source = SnippetsTask.matchSource("[source,console]"); - assertTrue(source.getMatches()); - assertEquals("console", source.getLanguage()); - assertNull(source.getName()); - - source = SnippetsTask.matchSource("[source,console,id=snippet-name-1]"); - assertTrue(source.getMatches()); - assertEquals("console", source.getLanguage()); - assertEquals("snippet-name-1", source.getName()); - - source = SnippetsTask.matchSource("[source, console, id=snippet-name-1]"); - assertTrue(source.getMatches()); - assertEquals("console", source.getLanguage()); - assertEquals("snippet-name-1", source.getName()); - - source = SnippetsTask.matchSource("[source,console,attr=5,id=snippet-name-1,attr2=6]"); - assertTrue(source.getMatches()); - assertEquals("console", source.getLanguage()); - assertEquals("snippet-name-1", source.getName()); - - source = SnippetsTask.matchSource("[source,console, attr=5, id=snippet-name-1, attr2=6]"); - assertTrue(source.getMatches()); - assertEquals("console", source.getLanguage()); - assertEquals("snippet-name-1", source.getName()); - - source = SnippetsTask.matchSource("[\"source\",\"console\",id=\"snippet-name-1\"]"); - assertTrue(source.getMatches()); - assertEquals("console", source.getLanguage()); - assertEquals("snippet-name-1", source.getName()); - - source = SnippetsTask.matchSource("[source,console,id=\"snippet-name-1\"]"); - assertTrue(source.getMatches()); - assertEquals("console", source.getLanguage()); - assertEquals("snippet-name-1", source.getName()); - - source = SnippetsTask.matchSource("[source.merge.styled,esql]"); - assertTrue(source.getMatches()); - assertEquals("esql", source.getLanguage()); - - source = SnippetsTask.matchSource("[source.merge.styled,foo-bar]"); - assertTrue(source.getMatches()); - assertEquals("foo-bar", source.getLanguage()); - } -} From d6f9d1e69e0335f2fb27f55d60256af8dde9a4e1 Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Wed, 10 Apr 2024 12:21:08 +0200 Subject: [PATCH 215/264] ESQL: Rename AUTO_BUCKET to just BUCKET (#107197) This renames the function AUTO_BUCKET to just BUCKET. It also removes the experimental tagging of the function in the docs, making it generally available. --- docs/reference/esql/esql-get-started.asciidoc | 10 +- .../{auto_bucket.asciidoc => bucket.asciidoc} | 44 +++--- .../functions/date-time-functions.asciidoc | 4 +- .../{auto_bucket.asciidoc => bucket.asciidoc} | 0 .../functions/layout/auto_bucket.asciidoc | 14 -- .../esql/functions/layout/bucket.asciidoc | 14 ++ .../{auto_bucket.asciidoc => bucket.asciidoc} | 0 .../esql/functions/signature/auto_bucket.svg | 1 - .../esql/functions/signature/bucket.svg | 1 + .../{auto_bucket.asciidoc => bucket.asciidoc} | 0 .../src/main/resources/date.csv-spec | 134 +++++++++--------- .../src/main/resources/floats.csv-spec | 4 +- .../src/main/resources/ints.csv-spec | 24 ++-- .../src/main/resources/meta.csv-spec | 8 +- .../src/main/resources/unsigned_long.csv-spec | 4 +- .../function/EsqlFunctionRegistry.java | 4 +- .../math/{AutoBucket.java => Bucket.java} | 12 +- .../xpack/esql/io/stream/PlanNamedTypes.java | 10 +- ...{AutoBucketTests.java => BucketTests.java} | 6 +- .../optimizer/LogicalPlanOptimizerTests.java | 10 +- .../session/IndexResolverFieldNamesTests.java | 12 +- 21 files changed, 157 insertions(+), 159 deletions(-) rename docs/reference/esql/functions/{auto_bucket.asciidoc => bucket.asciidoc} (62%) rename docs/reference/esql/functions/description/{auto_bucket.asciidoc => bucket.asciidoc} (100%) delete mode 100644 docs/reference/esql/functions/layout/auto_bucket.asciidoc create mode 100644 docs/reference/esql/functions/layout/bucket.asciidoc rename docs/reference/esql/functions/parameters/{auto_bucket.asciidoc => bucket.asciidoc} (100%) delete mode 100644 docs/reference/esql/functions/signature/auto_bucket.svg create mode 100644 docs/reference/esql/functions/signature/bucket.svg rename docs/reference/esql/functions/types/{auto_bucket.asciidoc => bucket.asciidoc} (100%) rename x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/{AutoBucket.java => Bucket.java} (95%) rename x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/{AutoBucketTests.java => BucketTests.java} (96%) diff --git a/docs/reference/esql/esql-get-started.asciidoc b/docs/reference/esql/esql-get-started.asciidoc index 29f61299cec30..421272f741602 100644 --- a/docs/reference/esql/esql-get-started.asciidoc +++ b/docs/reference/esql/esql-get-started.asciidoc @@ -240,7 +240,7 @@ include::{esql-specs}/eval.csv-spec[tag=gs-eval-stats-backticks] === Create a histogram To track statistics over time, {esql} enables you to create histograms using the -<> function. `AUTO_BUCKET` creates human-friendly bucket sizes +<> function. `BUCKET` creates human-friendly bucket sizes and returns a value for each row that corresponds to the resulting bucket the row falls into. @@ -248,22 +248,22 @@ For example, to create hourly buckets for the data on October 23rd: [source,esql] ---- -include::{esql-specs}/date.csv-spec[tag=gs-auto_bucket] +include::{esql-specs}/date.csv-spec[tag=gs-bucket] ---- -Combine `AUTO_BUCKET` with <> to create a histogram. For example, +Combine `BUCKET` with <> to create a histogram. For example, to count the number of events per hour: [source,esql] ---- -include::{esql-specs}/date.csv-spec[tag=gs-auto_bucket-stats-by] +include::{esql-specs}/date.csv-spec[tag=gs-bucket-stats-by] ---- Or the median duration per hour: [source,esql] ---- -include::{esql-specs}/date.csv-spec[tag=gs-auto_bucket-stats-by-median] +include::{esql-specs}/date.csv-spec[tag=gs-bucket-stats-by-median] ---- [discrete] diff --git a/docs/reference/esql/functions/auto_bucket.asciidoc b/docs/reference/esql/functions/bucket.asciidoc similarity index 62% rename from docs/reference/esql/functions/auto_bucket.asciidoc rename to docs/reference/esql/functions/bucket.asciidoc index 651ac168aa83a..e436a79d0ec1e 100644 --- a/docs/reference/esql/functions/auto_bucket.asciidoc +++ b/docs/reference/esql/functions/bucket.asciidoc @@ -1,14 +1,12 @@ [discrete] -[[esql-auto_bucket]] -=== `AUTO_BUCKET` - -experimental::[] +[[esql-bucket]] +=== `BUCKET` *Syntax* [source,esql] ---- -AUTO_BUCKET(expression, buckets, from, to) +BUCKET(expression, buckets, from, to) ---- *Parameters* @@ -28,39 +26,39 @@ End of the range. Can be a number or a date expressed as a string. *Description* Creates human-friendly buckets and returns a value for each row that corresponds -to the resulting bucket the row falls into. +to the resulting bucket the row falls into. Using a target number of buckets, a start of a range, and an end of a range, -`AUTO_BUCKET` picks an appropriate bucket size to generate the target number of +`BUCKET` picks an appropriate bucket size to generate the target number of buckets or fewer. For example, asking for at most 20 buckets over a year results in monthly buckets: [source.merge.styled,esql] ---- -include::{esql-specs}/date.csv-spec[tag=docsAutoBucketMonth] +include::{esql-specs}/date.csv-spec[tag=docsBucketMonth] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/date.csv-spec[tag=docsAutoBucketMonth-result] +include::{esql-specs}/date.csv-spec[tag=docsBucketMonth-result] |=== The goal isn't to provide *exactly* the target number of buckets, it's to pick a range that people are comfortable with that provides at most the target number of buckets. -Combine `AUTO_BUCKET` with +Combine `BUCKET` with <> to create a histogram: [source.merge.styled,esql] ---- -include::{esql-specs}/date.csv-spec[tag=docsAutoBucketMonthlyHistogram] +include::{esql-specs}/date.csv-spec[tag=docsBucketMonthlyHistogram] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/date.csv-spec[tag=docsAutoBucketMonthlyHistogram-result] +include::{esql-specs}/date.csv-spec[tag=docsBucketMonthlyHistogram-result] |=== -NOTE: `AUTO_BUCKET` does not create buckets that don't match any documents. +NOTE: `BUCKET` does not create buckets that don't match any documents. That's why this example is missing `1985-03-01` and other dates. Asking for more buckets can result in a smaller range. For example, asking for @@ -68,28 +66,28 @@ at most 100 buckets in a year results in weekly buckets: [source.merge.styled,esql] ---- -include::{esql-specs}/date.csv-spec[tag=docsAutoBucketWeeklyHistogram] +include::{esql-specs}/date.csv-spec[tag=docsBucketWeeklyHistogram] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/date.csv-spec[tag=docsAutoBucketWeeklyHistogram-result] +include::{esql-specs}/date.csv-spec[tag=docsBucketWeeklyHistogram-result] |=== -NOTE: `AUTO_BUCKET` does not filter any rows. It only uses the provided range to +NOTE: `BUCKET` does not filter any rows. It only uses the provided range to pick a good bucket size. For rows with a value outside of the range, it returns a bucket value that corresponds to a bucket outside the range. Combine -`AUTO_BUCKET` with <> to filter rows. +`BUCKET` with <> to filter rows. -`AUTO_BUCKET` can also operate on numeric fields. For example, to create a +`BUCKET` can also operate on numeric fields. For example, to create a salary histogram: [source.merge.styled,esql] ---- -include::{esql-specs}/ints.csv-spec[tag=docsAutoBucketNumeric] +include::{esql-specs}/ints.csv-spec[tag=docsBucketNumeric] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/ints.csv-spec[tag=docsAutoBucketNumeric-result] +include::{esql-specs}/ints.csv-spec[tag=docsBucketNumeric-result] |=== Unlike the earlier example that intentionally filters on a date range, you @@ -104,7 +102,7 @@ per hour: [source.styled,esql] ---- -include::{esql-specs}/date.csv-spec[tag=docsAutoBucketLast24hr] +include::{esql-specs}/date.csv-spec[tag=docsBucketLast24hr] ---- Create monthly buckets for the year 1985, and calculate the average salary by @@ -112,9 +110,9 @@ hiring month: [source.merge.styled,esql] ---- -include::{esql-specs}/date.csv-spec[tag=auto_bucket_in_agg] +include::{esql-specs}/date.csv-spec[tag=bucket_in_agg] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/date.csv-spec[tag=auto_bucket_in_agg-result] +include::{esql-specs}/date.csv-spec[tag=bucket_in_agg-result] |=== diff --git a/docs/reference/esql/functions/date-time-functions.asciidoc b/docs/reference/esql/functions/date-time-functions.asciidoc index c1cd36e376a1c..149bdffb5ef07 100644 --- a/docs/reference/esql/functions/date-time-functions.asciidoc +++ b/docs/reference/esql/functions/date-time-functions.asciidoc @@ -8,7 +8,7 @@ {esql} supports these date-time functions: // tag::date_list[] -* experimental:[] <> +* <> * <> * <> * <> @@ -17,7 +17,7 @@ * <> // end::date_list[] -include::auto_bucket.asciidoc[] +include::bucket.asciidoc[] include::date_diff.asciidoc[] include::date_extract.asciidoc[] include::date_format.asciidoc[] diff --git a/docs/reference/esql/functions/description/auto_bucket.asciidoc b/docs/reference/esql/functions/description/bucket.asciidoc similarity index 100% rename from docs/reference/esql/functions/description/auto_bucket.asciidoc rename to docs/reference/esql/functions/description/bucket.asciidoc diff --git a/docs/reference/esql/functions/layout/auto_bucket.asciidoc b/docs/reference/esql/functions/layout/auto_bucket.asciidoc deleted file mode 100644 index 82e05ab5d215c..0000000000000 --- a/docs/reference/esql/functions/layout/auto_bucket.asciidoc +++ /dev/null @@ -1,14 +0,0 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. - -[discrete] -[[esql-auto_bucket]] -=== `AUTO_BUCKET` - -*Syntax* - -[.text-center] -image::esql/functions/signature/auto_bucket.svg[Embedded,opts=inline] - -include::../parameters/auto_bucket.asciidoc[] -include::../description/auto_bucket.asciidoc[] -include::../types/auto_bucket.asciidoc[] diff --git a/docs/reference/esql/functions/layout/bucket.asciidoc b/docs/reference/esql/functions/layout/bucket.asciidoc new file mode 100644 index 0000000000000..0445007237c8c --- /dev/null +++ b/docs/reference/esql/functions/layout/bucket.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-bucket]] +=== `BUCKET` + +*Syntax* + +[.text-center] +image::esql/functions/signature/bucket.svg[Embedded,opts=inline] + +include::../parameters/bucket.asciidoc[] +include::../description/bucket.asciidoc[] +include::../types/bucket.asciidoc[] diff --git a/docs/reference/esql/functions/parameters/auto_bucket.asciidoc b/docs/reference/esql/functions/parameters/bucket.asciidoc similarity index 100% rename from docs/reference/esql/functions/parameters/auto_bucket.asciidoc rename to docs/reference/esql/functions/parameters/bucket.asciidoc diff --git a/docs/reference/esql/functions/signature/auto_bucket.svg b/docs/reference/esql/functions/signature/auto_bucket.svg deleted file mode 100644 index 7da9a053825f1..0000000000000 --- a/docs/reference/esql/functions/signature/auto_bucket.svg +++ /dev/null @@ -1 +0,0 @@ -AUTO_BUCKET(field,buckets,from,to) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/bucket.svg b/docs/reference/esql/functions/signature/bucket.svg new file mode 100644 index 0000000000000..f6662910c010d --- /dev/null +++ b/docs/reference/esql/functions/signature/bucket.svg @@ -0,0 +1 @@ +BUCKET(field,buckets,from,to) \ No newline at end of file diff --git a/docs/reference/esql/functions/types/auto_bucket.asciidoc b/docs/reference/esql/functions/types/bucket.asciidoc similarity index 100% rename from docs/reference/esql/functions/types/auto_bucket.asciidoc rename to docs/reference/esql/functions/types/bucket.asciidoc diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec index 8f9ce9968d89d..5b3b6235ccb8b 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec @@ -261,36 +261,36 @@ int:integer |dt:date // end::to_datetime-int-result[] ; -autoBucketSimpleMonth -// tag::auto_bucket_month[] +bucketSimpleMonth#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +// tag::bucket_month[] ROW date=TO_DATETIME("1985-07-09T00:00:00.000Z") -| EVAL bucket=AUTO_BUCKET(date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") -// end::auto_bucket_month[] +| EVAL bucket=BUCKET(date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") +// end::bucket_month[] ; -// tag::auto_bucket_month-result[] +// tag::bucket_month-result[] date:datetime | bucket:datetime 1985-07-09T00:00:00.000Z | 1985-07-01T00:00:00.000Z -// end::auto_bucket_month-result[] +// end::bucket_month-result[] ; -autoBucketSimpleWeek -// tag::auto_bucket_week[] +bucketSimpleWeek#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +// tag::bucket_week[] ROW date=TO_DATETIME("1985-07-09T00:00:00.000Z") -| EVAL bucket=AUTO_BUCKET(date, 100, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") -// end::auto_bucket_week[] +| EVAL bucket=BUCKET(date, 100, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") +// end::bucket_week[] ; -// tag::auto_bucket_week-result[] +// tag::bucket_week-result[] date:datetime | bucket:datetime 1985-07-09T00:00:00.000Z | 1985-07-08T00:00:00.000Z -// end::auto_bucket_week-result[] +// end::bucket_week-result[] ; -autoBucketMonth +bucketMonth#[skip:-8.13.99, reason:BUCKET renamed in 8.14] from employees | where hire_date >= "1985-01-01T00:00:00Z" and hire_date < "1986-01-01T00:00:00Z" -| eval hd = auto_bucket(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") +| eval hd = bucket(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") | sort hire_date | keep hire_date, hd; @@ -308,10 +308,10 @@ hire_date:date | hd:date 1985-11-21T00:00:00.000Z | 1985-11-01T00:00:00.000Z ; -autoBucketWeek +bucketWeek#[skip:-8.13.99, reason:BUCKET renamed in 8.14] from employees | where hire_date >= "1985-01-01T00:00:00Z" and hire_date < "1986-01-01T00:00:00Z" -| eval hd = auto_bucket(hire_date, 55, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") +| eval hd = bucket(hire_date, 55, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") | sort hire_date | keep hire_date, hd; @@ -350,10 +350,10 @@ from employees | where birth_date > now() | sort emp_no asc | keep emp_no, birth emp_no:integer | birth_date:date ; -autoBucketYearInAgg#[skip:-8.12.99, reason:date type is supported in 8.13] +bucketYearInAgg#[skip:-8.13.99, reason:BUCKET renamed in 8.14] FROM employees | WHERE hire_date >= "1999-01-01T00:00:00Z" -| EVAL bucket = AUTO_BUCKET(hire_date, 5, "1999-01-01T00:00:00Z", NOW()) +| EVAL bucket = BUCKET(hire_date, 5, "1999-01-01T00:00:00Z", NOW()) | STATS COUNT(*) by bucket | sort bucket; @@ -361,12 +361,12 @@ COUNT(*):long | bucket:date 1 | 1999-01-01T00:00:00.000Z ; -autoBucketYearInAggConstRefsString#[skip:-8.12.99, reason:date type is supported in 8.13] +bucketYearInAggConstRefsString#[skip:-8.13.99, reason:BUCKET renamed in 8.14] FROM employees | WHERE hire_date >= "1999-01-01T00:00:00Z" | EVAL bucket_start = "1999-01-01T00:00:00Z" | EVAL bucket_end = NOW() -| EVAL bucket = AUTO_BUCKET(hire_date, 5, bucket_start, bucket_end) +| EVAL bucket = BUCKET(hire_date, 5, bucket_start, bucket_end) | STATS COUNT(*) by bucket | sort bucket; @@ -374,12 +374,12 @@ COUNT(*):long | bucket:date 1 | 1999-01-01T00:00:00.000Z ; -autoBucketYearInAggConstRefsConcat#[skip:-8.12.99, reason:date type is supported in 8.13] +bucketYearInAggConstRefsConcat#[skip:-8.13.99, reason:BUCKET renamed in 8.14] FROM employees | WHERE hire_date >= "1999-01-01T00:00:00Z" | EVAL bucket_start = CONCAT("1999-01-01", "T", "00:00:00Z") | EVAL bucket_end = NOW() -| EVAL bucket = AUTO_BUCKET(hire_date, 5, bucket_start, bucket_end) +| EVAL bucket = BUCKET(hire_date, 5, bucket_start, bucket_end) | STATS COUNT(*) by bucket | sort bucket; @@ -387,12 +387,12 @@ COUNT(*):long | bucket:date 1 | 1999-01-01T00:00:00.000Z ; -autoBucketYearInAggConstRefsDate#[skip:-8.12.99, reason:date type is supported in 8.13] +bucketYearInAggConstRefsDate#[skip:-8.13.99, reason:BUCKET renamed in 8.14] FROM employees | WHERE hire_date >= "1999-01-01T00:00:00Z" | EVAL bucket_start = TO_DATETIME("1999-01-01T00:00:00.000Z") | EVAL bucket_end = NOW() -| EVAL bucket = AUTO_BUCKET(hire_date, 5, bucket_start, bucket_end) +| EVAL bucket = BUCKET(hire_date, 5, bucket_start, bucket_end) | keep bucket_start, bucket_end, bucket | STATS COUNT(*) by bucket | sort bucket; @@ -401,31 +401,31 @@ COUNT(*):long | bucket:date 1 | 1999-01-01T00:00:00.000Z ; -autoBucketYearInAggConstRefsRename#[skip:-8.12.99, reason:date type is supported in 8.13] +bucketYearInAggConstRefsRename#[skip:-8.13.99, reason:BUCKET renamed in 8.14] FROM employees | WHERE hire_date >= "1999-01-01T00:00:00Z" | EVAL bucket_start = "1999-01-01T00:00:00Z" | EVAL bucket_end = NOW() | RENAME bucket_end as be, bucket_start as bs -| STATS c = COUNT(*) by AUTO_BUCKET(hire_date, 5, bs, be) +| STATS c = COUNT(*) by BUCKET(hire_date, 5, bs, be) | SORT c ; -c:long | AUTO_BUCKET(hire_date, 5, bs, be):date +c:long | BUCKET(hire_date, 5, bs, be):date 1 | 1999-01-01T00:00:00.000Z ; -autoBucketMonthInAgg -// tag::auto_bucket_in_agg[] +bucketMonthInAgg#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +// tag::bucket_in_agg[] FROM employees | WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z" -| EVAL bucket = AUTO_BUCKET(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") +| EVAL bucket = BUCKET(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") | STATS AVG(salary) BY bucket | SORT bucket -// end::auto_bucket_in_agg[] +// end::bucket_in_agg[] ; -// tag::auto_bucket_in_agg-result[] +// tag::bucket_in_agg-result[] AVG(salary):double | bucket:date 46305.0 | 1985-02-01T00:00:00.000Z 44817.0 | 1985-05-01T00:00:00.000Z @@ -433,7 +433,7 @@ AVG(salary):double | bucket:date 49095.0 | 1985-09-01T00:00:00.000Z 51532.0 | 1985-10-01T00:00:00.000Z 54539.75 | 1985-11-01T00:00:00.000Z -// end::auto_bucket_in_agg-result[] +// end::bucket_in_agg-result[] ; evalDateDiffInNanoAndMicroAndMilliSeconds#[skip:-8.12.99, reason:date_diff added in 8.13] @@ -950,17 +950,17 @@ birth_date:datetime 1953-04-21T00:00:00.000Z ; -docsAutoBucketMonth -//tag::docsAutoBucketMonth[] +docsBucketMonth#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +//tag::docsBucketMonth[] FROM employees | WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z" -| EVAL month = AUTO_BUCKET(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") +| EVAL month = BUCKET(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") | KEEP hire_date, month | SORT hire_date -//end::docsAutoBucketMonth[] +//end::docsBucketMonth[] ; -//tag::docsAutoBucketMonth-result[] +//tag::docsBucketMonth-result[] hire_date:date | month:date 1985-02-18T00:00:00.000Z|1985-02-01T00:00:00.000Z 1985-02-24T00:00:00.000Z|1985-02-01T00:00:00.000Z @@ -973,20 +973,20 @@ FROM employees 1985-11-20T00:00:00.000Z|1985-11-01T00:00:00.000Z 1985-11-20T00:00:00.000Z|1985-11-01T00:00:00.000Z 1985-11-21T00:00:00.000Z|1985-11-01T00:00:00.000Z -//end::docsAutoBucketMonth-result[] +//end::docsBucketMonth-result[] ; -docsAutoBucketMonthlyHistogram -//tag::docsAutoBucketMonthlyHistogram[] +docsBucketMonthlyHistogram#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +//tag::docsBucketMonthlyHistogram[] FROM employees | WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z" -| EVAL month = AUTO_BUCKET(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") +| EVAL month = BUCKET(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") | STATS hires_per_month = COUNT(*) BY month | SORT month -//end::docsAutoBucketMonthlyHistogram[] +//end::docsBucketMonthlyHistogram[] ; -//tag::docsAutoBucketMonthlyHistogram-result[] +//tag::docsBucketMonthlyHistogram-result[] hires_per_month:long | month:date 2 |1985-02-01T00:00:00.000Z 1 |1985-05-01T00:00:00.000Z @@ -994,20 +994,20 @@ FROM employees 1 |1985-09-01T00:00:00.000Z 2 |1985-10-01T00:00:00.000Z 4 |1985-11-01T00:00:00.000Z -//end::docsAutoBucketMonthlyHistogram-result[] +//end::docsBucketMonthlyHistogram-result[] ; -docsAutoBucketWeeklyHistogram -//tag::docsAutoBucketWeeklyHistogram[] +docsBucketWeeklyHistogram#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +//tag::docsBucketWeeklyHistogram[] FROM employees | WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z" -| EVAL week = AUTO_BUCKET(hire_date, 100, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") +| EVAL week = BUCKET(hire_date, 100, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") | STATS hires_per_week = COUNT(*) BY week | SORT week -//end::docsAutoBucketWeeklyHistogram[] +//end::docsBucketWeeklyHistogram[] ; -//tag::docsAutoBucketWeeklyHistogram-result[] +//tag::docsBucketWeeklyHistogram-result[] hires_per_week:long | week:date 2 |1985-02-18T00:00:00.000Z 1 |1985-05-13T00:00:00.000Z @@ -1015,40 +1015,40 @@ FROM employees 1 |1985-09-16T00:00:00.000Z 2 |1985-10-14T00:00:00.000Z 4 |1985-11-18T00:00:00.000Z -//end::docsAutoBucketWeeklyHistogram-result[] +//end::docsBucketWeeklyHistogram-result[] ; -docsAutoBucketLast24hr#[skip:-8.12.99, reason:date type is supported in 8.13] -//tag::docsAutoBucketLast24hr[] +docsBucketLast24hr#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +//tag::docsBucketLast24hr[] FROM sample_data | WHERE @timestamp >= NOW() - 1 day and @timestamp < NOW() -| EVAL bucket = AUTO_BUCKET(@timestamp, 25, NOW() - 1 day, NOW()) +| EVAL bucket = BUCKET(@timestamp, 25, NOW() - 1 day, NOW()) | STATS COUNT(*) BY bucket -//end::docsAutoBucketLast24hr[] +//end::docsBucketLast24hr[] ; COUNT(*):long | bucket:date ; -docsGettingStartedAutoBucket#[skip:-8.12.99, reason:date type is supported in 8.13] -// tag::gs-auto_bucket[] +docsGettingStartedBucket#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +// tag::gs-bucket[] FROM sample_data | KEEP @timestamp -| EVAL bucket = AUTO_BUCKET(@timestamp, 24, "2023-10-23T00:00:00Z", NOW()) -// end::gs-auto_bucket[] +| EVAL bucket = BUCKET(@timestamp, 24, "2023-10-23T00:00:00Z", NOW()) +// end::gs-bucket[] | LIMIT 0 ; @timestamp:date | bucket:date ; -docsGettingStartedAutoBucketStatsBy -// tag::gs-auto_bucket-stats-by[] +docsGettingStartedBucketStatsBy#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +// tag::gs-bucket-stats-by[] FROM sample_data | KEEP @timestamp, event_duration -| EVAL bucket = AUTO_BUCKET(@timestamp, 24, "2023-10-23T00:00:00Z", "2023-10-23T23:59:59Z") +| EVAL bucket = BUCKET(@timestamp, 24, "2023-10-23T00:00:00Z", "2023-10-23T23:59:59Z") | STATS COUNT(*) BY bucket -// end::gs-auto_bucket-stats-by[] +// end::gs-bucket-stats-by[] | SORT bucket ; @@ -1057,13 +1057,13 @@ COUNT(*):long | bucket:date 5 |2023-10-23T13:00:00.000Z ; -docsGettingStartedAutoBucketStatsByMedian -// tag::gs-auto_bucket-stats-by-median[] +docsGettingStartedBucketStatsByMedian#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +// tag::gs-bucket-stats-by-median[] FROM sample_data | KEEP @timestamp, event_duration -| EVAL bucket = AUTO_BUCKET(@timestamp, 24, "2023-10-23T00:00:00Z", "2023-10-23T23:59:59Z") +| EVAL bucket = BUCKET(@timestamp, 24, "2023-10-23T00:00:00Z", "2023-10-23T23:59:59Z") | STATS median_duration = MEDIAN(event_duration) BY bucket -// end::gs-auto_bucket-stats-by-median[] +// end::gs-bucket-stats-by-median[] | SORT bucket ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec index 0882fec5ec0bf..8f8f218fd9821 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec @@ -257,10 +257,10 @@ emp_no:integer | salary_change:double | a1:double 10005 | [-2.14,13.07] | [-2.14,13.07] ; -autoBucket +bucket#[skip:-8.13.99, reason:BUCKET renamed in 8.14] FROM employees | WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z" -| EVAL bh = auto_bucket(height, 20, 1.41, 2.10) +| EVAL bh = bucket(height, 20, 1.41, 2.10) | SORT hire_date, height | KEEP hire_date, height, bh ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec index 3e1d1b19a7f67..026e3d922d00d 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec @@ -642,17 +642,17 @@ emp_no:integer | salary_change.long:long | a1:long 10005 | [-2, 13] | [-2, 13] ; -autoBucket -// tag::auto_bucket[] +bucket#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +// tag::bucket[] FROM employees | WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z" -| EVAL bs = AUTO_BUCKET(salary, 20, 25324, 74999) +| EVAL bs = BUCKET(salary, 20, 25324, 74999) | SORT hire_date, salary | KEEP hire_date, salary, bs -// end::auto_bucket[] +// end::bucket[] ; -// tag::auto_bucket-result[] +// tag::bucket-result[] hire_date:date | salary:integer | bs:double 1985-02-18T00:00:00.000Z | 66174 | 65000.0 1985-02-24T00:00:00.000Z | 26436 | 25000.0 @@ -665,19 +665,19 @@ hire_date:date | salary:integer | bs:double 1985-11-20T00:00:00.000Z | 33956 | 30000.0 1985-11-20T00:00:00.000Z | 74999 | 70000.0 1985-11-21T00:00:00.000Z | 56371 | 55000.0 -// end::auto_bucket-result[] +// end::bucket-result[] ; -docsAutoBucketNumeric -//tag::docsAutoBucketNumeric[] +docsBucketNumeric#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +//tag::docsBucketNumeric[] FROM employees -| EVAL bs = AUTO_BUCKET(salary, 20, 25324, 74999) +| EVAL bs = BUCKET(salary, 20, 25324, 74999) | STATS COUNT(*) by bs | SORT bs -//end::docsAutoBucketNumeric[] +//end::docsBucketNumeric[] ; -//tag::docsAutoBucketNumeric-result[] +//tag::docsBucketNumeric-result[] COUNT(*):long | bs:double 9 |25000.0 9 |30000.0 @@ -689,7 +689,7 @@ FROM employees 9 |60000.0 8 |65000.0 8 |70000.0 -//end::docsAutoBucketNumeric-result[] +//end::docsBucketNumeric-result[] ; cos diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec index 492da4ee5ef36..d0e18426f03ab 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -7,8 +7,8 @@ synopsis:keyword "double asin(number:double|integer|long|unsigned_long)" "double atan(number:double|integer|long|unsigned_long)" "double atan2(y_coordinate:double|integer|long|unsigned_long, x_coordinate:double|integer|long|unsigned_long)" -"double|date auto_bucket(field:integer|long|double|date, buckets:integer, from:integer|long|double|date|keyword|text, to:integer|long|double|date|keyword|text)" "double avg(number:double|integer|long)" +"double|date bucket(field:integer|long|double|date, buckets:integer, from:integer|long|double|date|keyword|text, to:integer|long|double|date|keyword|text)" "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version case(condition:boolean, trueValue...:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" "double|integer|long|unsigned_long ceil(number:double|integer|long|unsigned_long)" "boolean cidr_match(ip:ip, blockX...:keyword|text)" @@ -117,8 +117,8 @@ acos |number |"double|integer|long|unsigne asin |number |"double|integer|long|unsigned_long" |Number between -1 and 1. If `null`, the function returns `null`. atan |number |"double|integer|long|unsigned_long" |Numeric expression. If `null`, the function returns `null`. atan2 |[y_coordinate, x_coordinate] |["double|integer|long|unsigned_long", "double|integer|long|unsigned_long"] |[y coordinate. If `null`\, the function returns `null`., x coordinate. If `null`\, the function returns `null`.] -auto_bucket |[field, buckets, from, to] |["integer|long|double|date", integer, "integer|long|double|date|keyword|text", "integer|long|double|date|keyword|text"] |["", "", "", ""] avg |number |"double|integer|long" |[""] +bucket |[field, buckets, from, to] |["integer|long|double|date", integer, "integer|long|double|date|keyword|text", "integer|long|double|date|keyword|text"] |["", "", "", ""] case |[condition, trueValue] |[boolean, "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version"] |["", ""] ceil |number |"double|integer|long|unsigned_long" |Numeric expression. If `null`, the function returns `null`. cidr_match |[ip, blockX] |[ip, "keyword|text"] |[, CIDR block to test the IP against.] @@ -228,8 +228,8 @@ acos |Returns the {wikipedia}/Inverse_trigonometric_functions[arccosine asin |Returns the {wikipedia}/Inverse_trigonometric_functions[arcsine] of the input numeric expression as an angle, expressed in radians. atan |Returns the {wikipedia}/Inverse_trigonometric_functions[arctangent] of the input numeric expression as an angle, expressed in radians. atan2 |The {wikipedia}/Atan2[angle] between the positive x-axis and the ray from the origin to the point (x , y) in the Cartesian plane, expressed in radians. -auto_bucket |Creates human-friendly buckets and returns a datetime value for each row that corresponds to the resulting bucket the row falls into. avg |The average of a numeric field. +bucket |Creates human-friendly buckets and returns a datetime value for each row that corresponds to the resulting bucket the row falls into. case |Accepts pairs of conditions and values. The function returns the value that belongs to the first condition that evaluates to true. ceil |Round a number up to the nearest integer. cidr_match |Returns true if the provided IP is contained in one of the provided CIDR blocks. @@ -340,8 +340,8 @@ acos |double asin |double |false |false |false atan |double |false |false |false atan2 |double |[false, false] |false |false -auto_bucket |"double|date" |[false, false, false, false]|false |false avg |double |false |false |true +bucket |"double|date" |[false, false, false, false]|false |false case |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" |[false, false] |true |false ceil |"double|integer|long|unsigned_long" |false |false |false cidr_match |boolean |[false, false] |true |false diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec index f1a15f41af7b3..2bf9259478032 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec @@ -150,10 +150,10 @@ warning:Line 1:27: java.lang.IllegalArgumentException: single-value function enc 2017-11-10T20:21:58.000Z|154551962150890564|9382204513185396493|63 |OK ; -autoBucket +bucket#[skip:-8.13.99, reason:BUCKET renamed in 8.14] FROM ul_logs | WHERE @timestamp >= "2017-11-10T20:30:00Z" AND @timestamp < "2017-11-10T20:35:00Z" -| EVAL bh = auto_bucket(bytes_in, 20, 5480608687137202404, 17764691215469285192) +| EVAL bh = bucket(bytes_in, 20, 5480608687137202404, 17764691215469285192) | SORT @timestamp | KEEP @timestamp, bytes_in, bh ; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index a1a7c95ece2f9..62688d753aeef 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -48,7 +48,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.math.Asin; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Atan; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Atan2; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.AutoBucket; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Bucket; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Ceil; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cos; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cosh; @@ -143,7 +143,7 @@ private FunctionDefinition[][] functions() { def(Asin.class, Asin::new, "asin"), def(Atan.class, Atan::new, "atan"), def(Atan2.class, Atan2::new, "atan2"), - def(AutoBucket.class, AutoBucket::new, "auto_bucket"), + def(Bucket.class, Bucket::new, "bucket"), def(Ceil.class, Ceil::new, "ceil"), def(Cos.class, Cos::new, "cos"), def(Cosh.class, Cosh::new, "cosh"), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Bucket.java similarity index 95% rename from x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Bucket.java index ea581437f6c4f..b58a9bae08146 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Bucket.java @@ -48,13 +48,13 @@ *

    * Takes a date field and three constants and picks a bucket size based on the * constants. The constants are "target bucket count", "from", and "to". It looks like: - * {@code auto_bucket(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z")}. + * {@code bucket(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z")}. * We have a list of "human" bucket sizes like "one month" and "four hours". We pick * the largest range that covers the range in fewer than the target bucket count. So * in the above case we'll pick month long buckets, yielding 12 buckets. *

    */ -public class AutoBucket extends EsqlScalarFunction implements Validatable { +public class Bucket extends EsqlScalarFunction implements Validatable { // TODO maybe we should just cover the whole of representable dates here - like ten years, 100 years, 1000 years, all the way up. // That way you never end up with more than the target number of buckets. private static final Rounding LARGEST_HUMAN_DATE_ROUNDING = Rounding.builder(Rounding.DateTimeUnit.YEAR_OF_CENTURY).build(); @@ -86,7 +86,7 @@ public class AutoBucket extends EsqlScalarFunction implements Validatable { @FunctionInfo(returnType = { "double", "date" }, description = """ Creates human-friendly buckets and returns a datetime value for each row that corresponds to the resulting bucket the row falls into.""") - public AutoBucket( + public Bucket( Source source, @Param(name = "field", type = { "integer", "long", "double", "date" }) Expression field, @Param(name = "buckets", type = { "integer" }) Expression buckets, @@ -226,12 +226,12 @@ public DataType dataType() { @Override public Expression replaceChildren(List newChildren) { - return new AutoBucket(source(), newChildren.get(0), newChildren.get(1), newChildren.get(2), newChildren.get(3)); + return new Bucket(source(), newChildren.get(0), newChildren.get(1), newChildren.get(2), newChildren.get(3)); } @Override protected NodeInfo info() { - return NodeInfo.create(this, AutoBucket::new, field, buckets, from, to); + return NodeInfo.create(this, Bucket::new, field, buckets, from, to); } public Expression field() { @@ -252,6 +252,6 @@ public Expression to() { @Override public String toString() { - return "AutoBucket{" + "field=" + field + ", buckets=" + buckets + ", from=" + from + ", to=" + to + '}'; + return "Bucket{" + "field=" + field + ", buckets=" + buckets + ", from=" + from + ", to=" + to + '}'; } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java index 27e3c95bd123a..a0fecd731c71c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java @@ -69,7 +69,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.math.Asin; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Atan; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Atan2; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.AutoBucket; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Bucket; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Ceil; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cos; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cosh; @@ -378,7 +378,7 @@ public static List namedTypeEntries() { of(ESQL_UNARY_SCLR_CLS, Trim.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), // ScalarFunction of(ScalarFunction.class, Atan2.class, PlanNamedTypes::writeAtan2, PlanNamedTypes::readAtan2), - of(ScalarFunction.class, AutoBucket.class, PlanNamedTypes::writeAutoBucket, PlanNamedTypes::readAutoBucket), + of(ScalarFunction.class, Bucket.class, PlanNamedTypes::writeBucket, PlanNamedTypes::readBucket), of(ScalarFunction.class, Case.class, PlanNamedTypes::writeVararg, PlanNamedTypes::readVarag), of(ScalarFunction.class, CIDRMatch.class, PlanNamedTypes::writeCIDRMatch, PlanNamedTypes::readCIDRMatch), of(ScalarFunction.class, Coalesce.class, PlanNamedTypes::writeVararg, PlanNamedTypes::readVarag), @@ -1395,11 +1395,11 @@ static void writeAtan2(PlanStreamOutput out, Atan2 atan2) throws IOException { out.writeExpression(atan2.x()); } - static AutoBucket readAutoBucket(PlanStreamInput in) throws IOException { - return new AutoBucket(in.readSource(), in.readExpression(), in.readExpression(), in.readExpression(), in.readExpression()); + static Bucket readBucket(PlanStreamInput in) throws IOException { + return new Bucket(in.readSource(), in.readExpression(), in.readExpression(), in.readExpression(), in.readExpression()); } - static void writeAutoBucket(PlanStreamOutput out, AutoBucket bucket) throws IOException { + static void writeBucket(PlanStreamOutput out, Bucket bucket) throws IOException { out.writeSource(bucket.source()); out.writeExpression(bucket.field()); out.writeExpression(bucket.buckets()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucketTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/BucketTests.java similarity index 96% rename from x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucketTests.java rename to x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/BucketTests.java index 9d8cf702a375a..23122863b95f3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucketTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/BucketTests.java @@ -28,8 +28,8 @@ import static org.hamcrest.Matchers.equalTo; -public class AutoBucketTests extends AbstractFunctionTestCase { - public AutoBucketTests(@Name("TestCase") Supplier testCaseSupplier) { +public class BucketTests extends AbstractFunctionTestCase { + public BucketTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } @@ -141,7 +141,7 @@ private static Matcher dateResultsMatcher(List args) { - return new AutoBucket(source, args.get(0), args.get(1), args.get(2), args.get(3)); + return new Bucket(source, args.get(0), args.get(1), args.get(2), args.get(3)); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index a60999baba9fe..7de3308fcab16 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -3299,26 +3299,26 @@ public void testNestedExpressionsInStatsWithExpression() { assertThat(Expressions.names(fields), contains("languages + emp_no")); } - public void testLogicalPlanOptimizerVerifier() { + public void testBucketAcceptsEvalLiteralReferences() { var plan = plan(""" from test | eval bucket_start = 1, bucket_end = 100000 - | eval auto_bucket(salary, 10, bucket_start, bucket_end) + | eval bucket(salary, 10, bucket_start, bucket_end) """); var ab = as(plan, Eval.class); assertTrue(ab.optimized()); } - public void testLogicalPlanOptimizerVerificationException() { + public void testBucketFailsOnFieldArgument() { VerificationException e = expectThrows(VerificationException.class, () -> plan(""" from test | eval bucket_end = 100000 - | eval auto_bucket(salary, 10, emp_no, bucket_end) + | eval bucket(salary, 10, emp_no, bucket_end) """)); assertTrue(e.getMessage().startsWith("Found ")); final String header = "Found 1 problem\nline "; assertEquals( - "3:32: third argument of [auto_bucket(salary, 10, emp_no, bucket_end)] must be a constant, received [emp_no]", + "3:27: third argument of [bucket(salary, 10, emp_no, bucket_end)] must be a constant, received [emp_no]", e.getMessage().substring(header.length()) ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java index 17ad5eb8b9f3d..ff6c60310fd87 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java @@ -212,11 +212,11 @@ public void testIn2() { | limit 4""", Set.of("hire_date", "hire_date.*", "birth_date", "birth_date.*")); } - public void testAutoBucketMonth() { + public void testBucketMonth() { assertFieldNames(""" from employees | where hire_date >= "1985-01-01T00:00:00Z" and hire_date < "1986-01-01T00:00:00Z" - | eval hd = auto_bucket(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") + | eval hd = bucket(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") | sort hire_date | keep hire_date, hd""", Set.of("hire_date", "hire_date.*")); } @@ -228,11 +228,11 @@ public void testBorn_before_today() { ); } - public void testAutoBucketMonthInAgg() { + public void testBucketMonthInAgg() { assertFieldNames(""" FROM employees | WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z" - | EVAL bucket = AUTO_BUCKET(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") + | EVAL bucket = BUCKET(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") | STATS AVG(salary) BY bucket | SORT bucket""", Set.of("salary", "salary.*", "hire_date", "hire_date.*")); } @@ -554,11 +554,11 @@ public void testConvertFromDatetime() { ); } - public void testAutoBucket() { + public void testBucket() { assertFieldNames(""" FROM employees | WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z" - | EVAL bh = auto_bucket(height, 20, 1.41, 2.10) + | EVAL bh = bucket(height, 20, 1.41, 2.10) | SORT hire_date | KEEP hire_date, height, bh""", Set.of("hire_date", "hire_date.*", "height", "height.*")); } From ceeee1bf563db4115a197bee76784572d8ba040d Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 10 Apr 2024 11:39:38 +0100 Subject: [PATCH 216/264] Recommend NFS over S3-like repositories (#107297) Allegedly-S3-compatible APIs are very popular these days, but many third-party systems offering such an API also support a shared filesystem interface. Shared filesystem protocols such as NFS are much better specified than the S3 API, and experience shows that they lead to fewer compatibility headaches. This commit adds a recommendation to the `repository-s3` docs to consider such an interface instead. --- .../snapshot-restore/repository-s3.asciidoc | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/docs/reference/snapshot-restore/repository-s3.asciidoc b/docs/reference/snapshot-restore/repository-s3.asciidoc index 0c79793ee6c5a..11324639cb2f0 100644 --- a/docs/reference/snapshot-restore/repository-s3.asciidoc +++ b/docs/reference/snapshot-restore/repository-s3.asciidoc @@ -541,13 +541,17 @@ MinIO-backed repositories as well as repositories stored on AWS S3. Other S3-compatible storage systems may also work with {es}, but these are not covered by the {es} test suite. -Note that some storage systems claim to be S3-compatible but do not faithfully -emulate S3's behaviour in full. The `repository-s3` type requires full -compatibility with S3. In particular it must support the same set of API -endpoints, return the same errors in case of failures, and offer consistency and -performance at least as good as S3 even when accessed concurrently by multiple -nodes. You will need to work with the supplier of your storage system to address -any incompatibilities you encounter. Please do not report {es} issues involving +There are many systems, including some from very well-known storage vendors, +which claim to offer an S3-compatible API despite failing to emulate S3's +behaviour in full. If you are using such a system for your snapshots, consider +using a <> based +on a standardized protocol such as NFS to access your storage system instead. +The `repository-s3` type requires full compatibility with S3. In particular it +must support the same set of API endpoints, with the same parameters, return +the same errors in case of failures, and offer consistency and performance at +least as good as S3 even when accessed concurrently by multiple nodes. You will +need to work with the supplier of your storage system to address any +incompatibilities you encounter. Please do not report {es} issues involving storage systems which claim to be S3-compatible unless you can demonstrate that the same issue exists when using a genuine AWS S3 repository. From 8bcbc971288bc14c9aeee9d0ea2424ebb55dc572 Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Wed, 10 Apr 2024 12:50:12 +0200 Subject: [PATCH 217/264] Rename generated docs for (renamed) BUCKET func (#107299) This checks in the generated-by-test doc files for newly renamed BUCKET function. --- .../kibana/definition/{auto_bucket.json => bucket.json} | 2 +- .../esql/functions/kibana/docs/{auto_bucket.md => bucket.md} | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) rename docs/reference/esql/functions/kibana/definition/{auto_bucket.json => bucket.json} (99%) rename docs/reference/esql/functions/kibana/docs/{auto_bucket.md => bucket.md} (94%) diff --git a/docs/reference/esql/functions/kibana/definition/auto_bucket.json b/docs/reference/esql/functions/kibana/definition/bucket.json similarity index 99% rename from docs/reference/esql/functions/kibana/definition/auto_bucket.json rename to docs/reference/esql/functions/kibana/definition/bucket.json index 96940e5f051f2..dda3f384424b4 100644 --- a/docs/reference/esql/functions/kibana/definition/auto_bucket.json +++ b/docs/reference/esql/functions/kibana/definition/bucket.json @@ -1,7 +1,7 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "eval", - "name" : "auto_bucket", + "name" : "bucket", "description" : "Creates human-friendly buckets and returns a datetime value\nfor each row that corresponds to the resulting bucket the row falls into.", "signatures" : [ { diff --git a/docs/reference/esql/functions/kibana/docs/auto_bucket.md b/docs/reference/esql/functions/kibana/docs/bucket.md similarity index 94% rename from docs/reference/esql/functions/kibana/docs/auto_bucket.md rename to docs/reference/esql/functions/kibana/docs/bucket.md index df3999f968486..6ebfe7de5527d 100644 --- a/docs/reference/esql/functions/kibana/docs/auto_bucket.md +++ b/docs/reference/esql/functions/kibana/docs/bucket.md @@ -2,7 +2,7 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. --> -### AUTO_BUCKET +### BUCKET Creates human-friendly buckets and returns a datetime value for each row that corresponds to the resulting bucket the row falls into. From 84d61579c164df39a7cfb70be19775bd95a93c94 Mon Sep 17 00:00:00 2001 From: Przemyslaw Gomulka Date: Wed, 10 Apr 2024 13:04:40 +0200 Subject: [PATCH 218/264] Do not report document metering on system indices (#107041) For system indices we don't want to emit metrics. DocumentSizeReporter will be created given an index. It will internally contain a SystemIndices instance that will verify the indexName with isSystemName --- docs/changelog/107041.yaml | 6 ++++++ .../internal/DocumentSizeObserverWithPipelinesIT.java | 2 +- .../plugins/internal/DocumentSizeObserverIT.java | 2 +- .../elasticsearch/action/bulk/TransportShardBulkAction.java | 2 +- .../plugins/internal/DocumentParsingProvider.java | 4 ++-- .../java/org/elasticsearch/ingest/IngestServiceTests.java | 2 +- 6 files changed, 12 insertions(+), 6 deletions(-) create mode 100644 docs/changelog/107041.yaml diff --git a/docs/changelog/107041.yaml b/docs/changelog/107041.yaml new file mode 100644 index 0000000000000..b8b4f3d7c5690 --- /dev/null +++ b/docs/changelog/107041.yaml @@ -0,0 +1,6 @@ +pr: 107041 +summary: '`DocumentParsingObserver` to accept an `indexName` to allow skipping system + indices' +area: Infra/Metrics +type: enhancement +issues: [] diff --git a/modules/ingest-common/src/internalClusterTest/java/org/elasticsearch/plugins/internal/DocumentSizeObserverWithPipelinesIT.java b/modules/ingest-common/src/internalClusterTest/java/org/elasticsearch/plugins/internal/DocumentSizeObserverWithPipelinesIT.java index d1cdc719b02f1..49ab73e8d2375 100644 --- a/modules/ingest-common/src/internalClusterTest/java/org/elasticsearch/plugins/internal/DocumentSizeObserverWithPipelinesIT.java +++ b/modules/ingest-common/src/internalClusterTest/java/org/elasticsearch/plugins/internal/DocumentSizeObserverWithPipelinesIT.java @@ -92,7 +92,7 @@ public DocumentSizeObserver newDocumentSizeObserver() { } @Override - public DocumentSizeReporter getDocumentParsingReporter() { + public DocumentSizeReporter getDocumentParsingReporter(String indexName) { return new TestDocumentSizeReporter(); } }; diff --git a/server/src/internalClusterTest/java/org/elasticsearch/plugins/internal/DocumentSizeObserverIT.java b/server/src/internalClusterTest/java/org/elasticsearch/plugins/internal/DocumentSizeObserverIT.java index fd6151e8eadde..edf6973849bad 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/plugins/internal/DocumentSizeObserverIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/plugins/internal/DocumentSizeObserverIT.java @@ -86,7 +86,7 @@ public DocumentSizeObserver newDocumentSizeObserver() { } @Override - public DocumentSizeReporter getDocumentParsingReporter() { + public DocumentSizeReporter getDocumentParsingReporter(String indexName) { return new TestDocumentSizeReporter(); } }; diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java index 265719b4738c0..39de11d39bc34 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java @@ -487,7 +487,7 @@ private static void onComplete( final BulkItemResponse executionResult = context.getExecutionResult(); final boolean isFailed = executionResult.isFailed(); if (isFailed == false && opType != DocWriteRequest.OpType.DELETE) { - DocumentSizeReporter documentSizeReporter = documentParsingProvider.getDocumentParsingReporter(); + DocumentSizeReporter documentSizeReporter = documentParsingProvider.getDocumentParsingReporter(docWriteRequest.index()); DocumentSizeObserver documentSizeObserver = context.getDocumentSizeObserver(); documentSizeReporter.onCompleted(docWriteRequest.index(), documentSizeObserver.normalisedBytesParsed()); } diff --git a/server/src/main/java/org/elasticsearch/plugins/internal/DocumentParsingProvider.java b/server/src/main/java/org/elasticsearch/plugins/internal/DocumentParsingProvider.java index 946cd97968e22..329f3d704e50b 100644 --- a/server/src/main/java/org/elasticsearch/plugins/internal/DocumentParsingProvider.java +++ b/server/src/main/java/org/elasticsearch/plugins/internal/DocumentParsingProvider.java @@ -19,7 +19,7 @@ public DocumentSizeObserver newDocumentSizeObserver() { } @Override - public DocumentSizeReporter getDocumentParsingReporter() { + public DocumentSizeReporter getDocumentParsingReporter(String indexName) { return DocumentSizeReporter.EMPTY_INSTANCE; } @@ -42,6 +42,6 @@ public DocumentSizeObserver newFixedSizeDocumentObserver(long normalisedBytesPar /** * @return an instance of a reporter to use when parsing has been completed and indexing successful */ - DocumentSizeReporter getDocumentParsingReporter(); + DocumentSizeReporter getDocumentParsingReporter(String indexName); } diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java index 084eb94852524..41e865ceb97fb 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java @@ -1206,7 +1206,7 @@ public long normalisedBytesParsed() { } @Override - public DocumentSizeReporter getDocumentParsingReporter() { + public DocumentSizeReporter getDocumentParsingReporter(String indexName) { return null; } From b3bcc81ca6fafd505ac7b9a168017f59c0a3477b Mon Sep 17 00:00:00 2001 From: Pat Whelan Date: Wed, 10 Apr 2024 08:28:40 -0400 Subject: [PATCH 219/264] [Transform] Release test resources (#107057) Consume the HttpEntity after the API response is parsed, releasing network and thread resources back to their respective pools. Leaving them unconsumed does not appear to be causing issues during tests, but it does log a large amount of hanging threads on test failure, making it harder to spot what may be the issue when a thread is hanging during a transform test. Close #107055 --- .../test/rest/ESRestTestCase.java | 29 ++++++++-- .../common/TransformCommonRestTestCase.java | 2 +- .../integration/TransformChainIT.java | 2 +- .../integration/TransformRestTestCase.java | 54 +++++++++---------- 4 files changed, 53 insertions(+), 34 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 29b74478bec6b..4c1980fb1f673 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -157,11 +157,16 @@ public abstract class ESRestTestCase extends ESTestCase { /** * Convert the entity from a {@link Response} into a map of maps. + * Consumes the underlying HttpEntity, releasing any resources it may be holding. */ public static Map entityAsMap(Response response) throws IOException { return entityAsMap(response.getEntity()); } + /** + * Convert the entity from a {@link Response} into a map of maps. + * Consumes the underlying HttpEntity, releasing any resources it may be holding. + */ public static Map entityAsMap(HttpEntity entity) throws IOException { XContentType xContentType = XContentType.fromMediaType(entity.getContentType().getValue()); // EMPTY and THROW are fine here because `.map` doesn't use named x content or deprecation @@ -174,11 +179,14 @@ public static Map entityAsMap(HttpEntity entity) throws IOExcept ) ) { return parser.map(); + } finally { + EntityUtils.consumeQuietly(entity); } } /** * Convert the entity from a {@link Response} into a list of maps. + * Consumes the underlying HttpEntity, releasing any resources it may be holding. */ public static List entityAsList(Response response) throws IOException { XContentType xContentType = XContentType.fromMediaType(response.getEntity().getContentType().getValue()); @@ -192,6 +200,8 @@ public static List entityAsList(Response response) throws IOException { ) ) { return parser.list(); + } finally { + EntityUtils.consumeQuietly(response.getEntity()); } } @@ -1603,6 +1613,14 @@ public static Response assertOK(Response response) { return response; } + public static void assertOKAndConsume(Response response) { + try { + assertOK(response); + } finally { + EntityUtils.consumeQuietly(response.getEntity()); + } + } + public static ObjectPath assertOKAndCreateObjectPath(Response response) throws IOException { assertOK(response); return ObjectPath.createFromResponse(response); @@ -1622,9 +1640,14 @@ public static void assertDocCount(RestClient client, String indexName, long docC } public static void assertAcknowledged(Response response) throws IOException { - assertOK(response); - String jsonBody = EntityUtils.toString(response.getEntity()); - assertThat(jsonBody, containsString("\"acknowledged\":true")); + try { + assertOK(response); + String jsonBody = EntityUtils.toString(response.getEntity()); + assertThat(jsonBody, containsString("\"acknowledged\":true")); + } finally { + // if assertOK throws an exception, still release resources + EntityUtils.consumeQuietly(response.getEntity()); + } } /** diff --git a/x-pack/plugin/transform/qa/common/src/main/java/org/elasticsearch/xpack/transform/integration/common/TransformCommonRestTestCase.java b/x-pack/plugin/transform/qa/common/src/main/java/org/elasticsearch/xpack/transform/integration/common/TransformCommonRestTestCase.java index 486dd7c581032..98cf817d6c018 100644 --- a/x-pack/plugin/transform/qa/common/src/main/java/org/elasticsearch/xpack/transform/integration/common/TransformCommonRestTestCase.java +++ b/x-pack/plugin/transform/qa/common/src/main/java/org/elasticsearch/xpack/transform/integration/common/TransformCommonRestTestCase.java @@ -122,6 +122,6 @@ protected void logAudits() throws Exception { protected void refreshIndex(String index) throws IOException { Request refreshRequest = new Request("POST", index + "/_refresh"); - assertOK(adminClient().performRequest(refreshRequest)); + assertOKAndConsume(adminClient().performRequest(refreshRequest)); } } diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformChainIT.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformChainIT.java index 600ceb3cd8202..4d9a9e7705052 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformChainIT.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformChainIT.java @@ -188,7 +188,7 @@ private void testChainedTransforms(final int numTransforms) throws Exception { assertFalse(aliasExists(destWriteAlias)); String transformConfig = createTransformConfig(sourceIndex, destIndex, destReadAlias, destWriteAlias); - assertAcknowledged(putTransform(transformId, transformConfig, true, RequestOptions.DEFAULT)); + putTransform(transformId, transformConfig, true, RequestOptions.DEFAULT); } List transformIdsShuffled = new ArrayList<>(transformIds); diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java index 4cc9a31c8eff5..4b7e478dbb61d 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java @@ -10,6 +10,7 @@ import org.apache.http.client.methods.HttpGet; import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; +import org.apache.http.util.EntityUtils; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; @@ -168,15 +169,15 @@ protected void deleteTransform(String id, boolean force) throws IOException { if (force) { request.addParameter(TransformField.FORCE.getPreferredName(), "true"); } - assertOK(adminClient().performRequest(request)); + assertAcknowledged(adminClient().performRequest(request)); createdTransformIds.remove(id); } - protected Response putTransform(String id, String config, RequestOptions options) throws IOException { - return putTransform(id, config, false, options); + protected void putTransform(String id, String config, RequestOptions options) throws IOException { + putTransform(id, config, false, options); } - protected Response putTransform(String id, String config, boolean deferValidation, RequestOptions options) throws IOException { + protected void putTransform(String id, String config, boolean deferValidation, RequestOptions options) throws IOException { if (createdTransformIds.contains(id)) { throw new IllegalArgumentException("transform [" + id + "] is already registered"); } @@ -187,9 +188,8 @@ protected Response putTransform(String id, String config, boolean deferValidatio request.addParameter("defer_validation", "true"); } request.setOptions(options); - Response response = assertOK(client().performRequest(request)); + assertAcknowledged(client().performRequest(request)); createdTransformIds.add(id); - return response; } protected Map previewTransform(String transformConfig, RequestOptions options) throws IOException { @@ -214,8 +214,7 @@ protected Map getBasicTransformStats(String id) throws IOExcepti var request = new Request("GET", TRANSFORM_ENDPOINT + id + "/_stats"); request.addParameter(BASIC_STATS.getPreferredName(), "true"); request.setOptions(RequestOptions.DEFAULT); - Response response = client().performRequest(request); - List> stats = (List>) XContentMapValues.extractValue("transforms", entityAsMap(response)); + var stats = (List>) XContentMapValues.extractValue("transforms", entityAsMap(client().performRequest(request))); assertThat(stats, hasSize(1)); return stats.get(0); } @@ -226,11 +225,10 @@ protected String getTransformState(String id) throws IOException { @SuppressWarnings("unchecked") protected Map getTransform(String id) throws IOException { - Request request = new Request("GET", TRANSFORM_ENDPOINT + id); - Response response = client().performRequest(request); - List> transformConfigs = (List>) XContentMapValues.extractValue( + var request = new Request("GET", TRANSFORM_ENDPOINT + id); + var transformConfigs = (List>) XContentMapValues.extractValue( "transforms", - entityAsMap(response) + entityAsMap(client().performRequest(request)) ); assertThat(transformConfigs, hasSize(1)); return transformConfigs.get(0); @@ -257,14 +255,6 @@ protected long getCheckpoint(Map stats) { return ((Integer) XContentMapValues.extractValue("checkpointing.last.checkpoint", stats)).longValue(); } - protected DateHistogramGroupSource createDateHistogramGroupSourceWithFixedInterval( - String field, - DateHistogramInterval interval, - ZoneId zone - ) { - return new DateHistogramGroupSource(field, null, false, new DateHistogramGroupSource.FixedInterval(interval), zone, null); - } - protected DateHistogramGroupSource createDateHistogramGroupSourceWithCalendarInterval( String field, DateHistogramInterval interval, @@ -357,7 +347,7 @@ protected TransformConfig.Builder createTransformConfigBuilder( String destinationIndex, QueryConfig queryConfig, String... sourceIndices - ) throws Exception { + ) { return TransformConfig.builder() .setId(id) .setSource(new SourceConfig(sourceIndices, queryConfig, Collections.emptyMap())) @@ -377,7 +367,7 @@ protected void updateConfig(String id, String update, boolean deferValidation, R } updateRequest.setJsonEntity(update); updateRequest.setOptions(options); - assertOK(client().performRequest(updateRequest)); + assertOKAndConsume(client().performRequest(updateRequest)); } protected void createReviewsIndex( @@ -447,7 +437,7 @@ protected void createReviewsIndex( Request req = new Request("PUT", indexName); req.setEntity(indexMappings); req.setOptions(RequestOptions.DEFAULT); - assertOK(adminClient().performRequest(req)); + assertAcknowledged(adminClient().performRequest(req)); } // create index @@ -489,9 +479,12 @@ protected void doBulk(String bulkDocuments, boolean refresh) throws IOException bulkRequest.setJsonEntity(bulkDocuments); bulkRequest.setOptions(RequestOptions.DEFAULT); Response bulkResponse = adminClient().performRequest(bulkRequest); - assertOK(bulkResponse); - var bulkMap = entityAsMap(bulkResponse); - assertThat((boolean) bulkMap.get("errors"), is(equalTo(false))); + try { + var bulkMap = entityAsMap(assertOK(bulkResponse)); + assertThat((boolean) bulkMap.get("errors"), is(equalTo(false))); + } finally { + EntityUtils.consumeQuietly(bulkResponse.getEntity()); + } } protected Map matchAllSearch(String index, int size, RequestOptions options) throws IOException { @@ -499,8 +492,11 @@ protected Map matchAllSearch(String index, int size, RequestOpti request.addParameter("size", Integer.toString(size)); request.setOptions(options); Response response = client().performRequest(request); - assertOK(response); - return entityAsMap(response); + try { + return entityAsMap(assertOK(response)); + } finally { + EntityUtils.consumeQuietly(response.getEntity()); + } } private void waitForPendingTasks() { @@ -515,7 +511,7 @@ private void waitForPendingTasks() { ); request.addParameters(parameters); try { - adminClient().performRequest(request); + EntityUtils.consumeQuietly(adminClient().performRequest(request).getEntity()); } catch (Exception e) { throw new AssertionError("Failed to wait for pending tasks to complete", e); } From 8638deeb9e5c14f1fb1f0fddbcee76e3ec0c342d Mon Sep 17 00:00:00 2001 From: Jonathan Buttner <56361221+jonathan-buttner@users.noreply.github.com> Date: Wed, 10 Apr 2024 08:37:49 -0400 Subject: [PATCH 220/264] Openai model_id is required (#107286) --- docs/reference/inference/put-inference.asciidoc | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/docs/reference/inference/put-inference.asciidoc b/docs/reference/inference/put-inference.asciidoc index 6df1993175a0d..7d0ede82f70fa 100644 --- a/docs/reference/inference/put-inference.asciidoc +++ b/docs/reference/inference/put-inference.asciidoc @@ -165,7 +165,7 @@ want to use a different API key, delete the {infer} model and recreate it with the same name and the updated API key. `model_id`::: -(Optional, string) +(Required, string) The name of the model to use for the {infer} task. Refer to the https://platform.openai.com/docs/guides/embeddings/what-are-embeddings[OpenAI documentation] for the list of available text embedding models. @@ -431,4 +431,3 @@ PUT _inference/completion/openai_completion } ------------------------------------------------------------ // TEST[skip:TBD] - From 943885d0cd180be0436a9191181ebb768d5d4e05 Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Wed, 10 Apr 2024 15:12:20 +0200 Subject: [PATCH 221/264] [DOCS][ESQL] Render locate function docs (#107305) --- docs/reference/esql/functions/string-functions.asciidoc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/reference/esql/functions/string-functions.asciidoc b/docs/reference/esql/functions/string-functions.asciidoc index b568ae1061bb5..273c508fc6f63 100644 --- a/docs/reference/esql/functions/string-functions.asciidoc +++ b/docs/reference/esql/functions/string-functions.asciidoc @@ -11,6 +11,7 @@ * <> * <> * <> +* <> * <> * <> * <> @@ -25,6 +26,7 @@ include::concat.asciidoc[] include::layout/left.asciidoc[] include::length.asciidoc[] +include::layout/locate.asciidoc[] include::ltrim.asciidoc[] include::replace.asciidoc[] include::right.asciidoc[] From 19e9fc32f2d449e77e905aba6ca03c6351b3bbdc Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Wed, 10 Apr 2024 16:13:21 +0200 Subject: [PATCH 222/264] ES|QL: regex warnings in csv-spec tests (#107273) --- .../xpack/esql/ccq/MultiClustersIT.java | 6 +- .../xpack/esql/qa/rest/EsqlSpecTestCase.java | 17 ++++-- .../esql/qa/rest/RestEnrichTestCase.java | 8 +-- .../xpack/esql/qa/rest/RestEsqlTestCase.java | 57 ++++++++++++------- .../xpack/esql/EsqlTestUtils.java | 14 +++++ .../testFixtures/src/main/resources/README.md | 43 +++++++++++++- .../src/main/resources/ip.csv-spec | 21 +++++++ .../elasticsearch/xpack/esql/CsvTests.java | 4 +- .../elasticsearch/xpack/ql/CsvSpecReader.java | 14 +++++ 9 files changed, 148 insertions(+), 36 deletions(-) diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java index 9a494f6309997..2f681fc23bf31 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java @@ -133,14 +133,12 @@ protected boolean supportsAsync() { private Map runEsql(RestEsqlTestCase.RequestObjectBuilder requestObject) throws IOException { if (supportsAsync()) { - return RestEsqlTestCase.runEsqlAsync(requestObject, NO_WARNINGS); + return RestEsqlTestCase.runEsqlAsync(requestObject); } else { - return RestEsqlTestCase.runEsqlSync(requestObject, NO_WARNINGS); + return RestEsqlTestCase.runEsqlSync(requestObject); } } - private static final List NO_WARNINGS = List.of(); - public void testCount() throws Exception { { Map result = run("FROM test-local-index,*:test-remote-index | STATS c = COUNT(*)"); diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java index 349954450904d..4d8770a6ff112 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java @@ -35,6 +35,7 @@ import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.regex.Pattern; import static org.apache.lucene.geo.GeoEncodingUtils.decodeLatitude; import static org.apache.lucene.geo.GeoEncodingUtils.decodeLongitude; @@ -143,7 +144,11 @@ protected void shouldSkipTest(String testName) throws IOException { protected final void doTest() throws Throwable { RequestObjectBuilder builder = new RequestObjectBuilder(randomFrom(XContentType.values())); - Map answer = runEsql(builder.query(testCase.query), testCase.expectedWarnings(false)); + Map answer = runEsql( + builder.query(testCase.query), + testCase.expectedWarnings(false), + testCase.expectedWarningsRegex() + ); var expectedColumnsWithValues = loadCsvSpecValues(testCase.expectedResults); var metadata = answer.get("columns"); @@ -160,12 +165,16 @@ protected final void doTest() throws Throwable { assertResults(expectedColumnsWithValues, actualColumns, actualValues, testCase.ignoreOrder, logger); } - private Map runEsql(RequestObjectBuilder requestObject, List expectedWarnings) throws IOException { + private Map runEsql( + RequestObjectBuilder requestObject, + List expectedWarnings, + List expectedWarningsRegex + ) throws IOException { if (mode == Mode.ASYNC) { assert supportsAsync(); - return RestEsqlTestCase.runEsqlAsync(requestObject, expectedWarnings); + return RestEsqlTestCase.runEsqlAsync(requestObject, expectedWarnings, expectedWarningsRegex); } else { - return RestEsqlTestCase.runEsqlSync(requestObject, expectedWarnings); + return RestEsqlTestCase.runEsqlSync(requestObject, expectedWarnings, expectedWarningsRegex); } } diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java index e04435b715c99..a670b11c61780 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java @@ -144,7 +144,7 @@ public void wipeTestData() throws IOException { public void testNonExistentEnrichPolicy() throws IOException { ResponseException re = expectThrows( ResponseException.class, - () -> RestEsqlTestCase.runEsqlSync(new RestEsqlTestCase.RequestObjectBuilder().query("from test | enrich countris"), List.of()) + () -> RestEsqlTestCase.runEsqlSync(new RestEsqlTestCase.RequestObjectBuilder().query("from test | enrich countris")) ); assertThat( EntityUtils.toString(re.getResponse().getEntity()), @@ -188,14 +188,12 @@ public void testMatchField_ImplicitFieldsList_WithStats() throws IOException { private Map runEsql(RestEsqlTestCase.RequestObjectBuilder requestObject) throws IOException { if (mode == Mode.ASYNC) { - return RestEsqlTestCase.runEsqlAsync(requestObject, NO_WARNINGS); + return RestEsqlTestCase.runEsqlAsync(requestObject); } else { - return RestEsqlTestCase.runEsqlSync(requestObject, NO_WARNINGS); + return RestEsqlTestCase.runEsqlSync(requestObject); } } - private static final List NO_WARNINGS = List.of(); - @Override protected boolean preserveClusterUponCompletion() { return true; diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java index 5aa48234cb11a..a2296168c5fc0 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java @@ -50,10 +50,10 @@ import java.util.Map; import java.util.Set; import java.util.function.IntFunction; +import java.util.regex.Pattern; import static java.util.Collections.emptySet; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; -import static org.elasticsearch.test.ListMatcher.matchesList; import static org.elasticsearch.test.MapMatcher.assertMap; import static org.elasticsearch.test.MapMatcher.matchesMap; import static org.elasticsearch.xpack.esql.EsqlTestUtils.as; @@ -76,6 +76,7 @@ public abstract class RestEsqlTestCase extends ESRestTestCase { private static final Logger LOGGER = LogManager.getLogger(RestEsqlTestCase.class); private static final List NO_WARNINGS = List.of(); + private static final List NO_WARNINGS_REGEX = List.of(); private static final String MAPPING_ALL_TYPES; @@ -393,7 +394,7 @@ public void testCSVNoHeaderMode() throws IOException { options.addHeader("Content-Type", mediaType); options.addHeader("Accept", "text/csv; header=absent"); request.setOptions(options); - HttpEntity entity = performRequest(request, List.of()); + HttpEntity entity = performRequest(request, NO_WARNINGS, NO_WARNINGS_REGEX); String actual = Streams.copyToString(new InputStreamReader(entity.getContent(), StandardCharsets.UTF_8)); assertEquals("keyword0,0\r\n", actual); } @@ -452,7 +453,7 @@ public void testOutOfRangeComparisons() throws IOException { "Line 1:29: evaluation of [" + comparison + "] failed, treating result as null. Only first 20 failures recorded.", "Line 1:29: java.lang.IllegalArgumentException: single-value function encountered multi-value" ); - var result = runEsql(query, expectedWarnings, mode); + var result = runEsql(query, expectedWarnings, NO_WARNINGS_REGEX, mode); var values = as(result.get("values"), ArrayList.class); assertThat( @@ -661,22 +662,35 @@ private static String expectedTextBody(String format, int count, @Nullable Chara } public Map runEsql(RequestObjectBuilder requestObject) throws IOException { - return runEsql(requestObject, NO_WARNINGS, mode); + return runEsql(requestObject, NO_WARNINGS, NO_WARNINGS_REGEX, mode); } public static Map runEsqlSync(RequestObjectBuilder requestObject) throws IOException { - return runEsqlSync(requestObject, NO_WARNINGS); + return runEsqlSync(requestObject, NO_WARNINGS, NO_WARNINGS_REGEX); } - static Map runEsql(RequestObjectBuilder requestObject, List expectedWarnings, Mode mode) throws IOException { + public static Map runEsqlAsync(RequestObjectBuilder requestObject) throws IOException { + return runEsqlAsync(requestObject, NO_WARNINGS, NO_WARNINGS_REGEX); + } + + static Map runEsql( + RequestObjectBuilder requestObject, + List expectedWarnings, + List expectedWarningsRegex, + Mode mode + ) throws IOException { if (mode == ASYNC) { - return runEsqlAsync(requestObject, expectedWarnings); + return runEsqlAsync(requestObject, expectedWarnings, expectedWarningsRegex); } else { - return runEsqlSync(requestObject, expectedWarnings); + return runEsqlSync(requestObject, expectedWarnings, expectedWarningsRegex); } } - public static Map runEsqlSync(RequestObjectBuilder requestObject, List expectedWarnings) throws IOException { + public static Map runEsqlSync( + RequestObjectBuilder requestObject, + List expectedWarnings, + List expectedWarningsRegex + ) throws IOException { requestObject.build(); Request request = prepareRequest(SYNC); String mediaType = attachBody(requestObject, request); @@ -692,11 +706,15 @@ public static Map runEsqlSync(RequestObjectBuilder requestObject } request.setOptions(options); - HttpEntity entity = performRequest(request, expectedWarnings); + HttpEntity entity = performRequest(request, expectedWarnings, expectedWarningsRegex); return entityToMap(entity, requestObject.contentType()); } - public static Map runEsqlAsync(RequestObjectBuilder requestObject, List expectedWarnings) throws IOException { + public static Map runEsqlAsync( + RequestObjectBuilder requestObject, + List expectedWarnings, + List expectedWarningsRegex + ) throws IOException { addAsyncParameters(requestObject); requestObject.build(); Request request = prepareRequest(ASYNC); @@ -730,7 +748,7 @@ public static Map runEsqlAsync(RequestObjectBuilder requestObjec // no id returned from an async call, must have completed immediately and without keep_on_completion assertThat(requestObject.keepOnCompletion(), either(nullValue()).or(is(false))); assertThat((boolean) json.get("is_running"), is(false)); - assertWarnings(response, expectedWarnings); + assertWarnings(response, expectedWarnings, expectedWarningsRegex); json.remove("is_running"); // remove this to not mess up later map assertions return Collections.unmodifiableMap(json); } else { @@ -739,7 +757,7 @@ public static Map runEsqlAsync(RequestObjectBuilder requestObjec if ((boolean) json.get("is_running") == false) { // must have completed immediately so keep_on_completion must be true assertThat(requestObject.keepOnCompletion(), is(true)); - assertWarnings(response, expectedWarnings); + assertWarnings(response, expectedWarnings, expectedWarningsRegex); // we already have the results, but let's remember them so that we can compare to async get initialColumns = json.get("columns"); initialValues = json.get("values"); @@ -763,7 +781,7 @@ public static Map runEsqlAsync(RequestObjectBuilder requestObjec assertEquals(initialValues, result.get("values")); } - assertWarnings(response, expectedWarnings); + assertWarnings(response, expectedWarnings, expectedWarningsRegex); assertDeletable(id); return removeAsyncProperties(result); } @@ -837,7 +855,7 @@ static String runEsqlAsTextWithFormat(RequestObjectBuilder builder, String forma } request.setOptions(options); - HttpEntity entity = performRequest(request, List.of()); + HttpEntity entity = performRequest(request, NO_WARNINGS, NO_WARNINGS_REGEX); return Streams.copyToString(new InputStreamReader(entity.getContent(), StandardCharsets.UTF_8)); } @@ -870,8 +888,9 @@ private static String attachBody(RequestObjectBuilder requestObject, Request req return mediaType; } - private static HttpEntity performRequest(Request request, List allowedWarnings) throws IOException { - return assertWarnings(performRequest(request), allowedWarnings); + private static HttpEntity performRequest(Request request, List allowedWarnings, List allowedWarningsRegex) + throws IOException { + return assertWarnings(performRequest(request), allowedWarnings, allowedWarningsRegex); } private static Response performRequest(Request request) throws IOException { @@ -884,13 +903,13 @@ private static Response performRequest(Request request) throws IOException { return response; } - private static HttpEntity assertWarnings(Response response, List allowedWarnings) { + private static HttpEntity assertWarnings(Response response, List allowedWarnings, List allowedWarningsRegex) { List warnings = new ArrayList<>(response.getWarnings()); warnings.removeAll(mutedWarnings()); if (shouldLog()) { LOGGER.info("RESPONSE warnings (after muted)={}", warnings); } - assertMap(warnings, matchesList(allowedWarnings)); + EsqlTestUtils.assertWarnings(warnings, allowedWarnings, allowedWarningsRegex); return response.getEntity(); } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java index fc8f80a19f09f..e6470e0eb2d05 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java @@ -43,11 +43,15 @@ import java.util.Locale; import java.util.Map; import java.util.Set; +import java.util.regex.Pattern; import static java.util.Collections.emptyList; import static org.elasticsearch.test.ESTestCase.randomBoolean; +import static org.elasticsearch.test.ListMatcher.matchesList; +import static org.elasticsearch.test.MapMatcher.assertMap; import static org.elasticsearch.xpack.ql.TestUtils.of; import static org.hamcrest.Matchers.instanceOf; +import static org.junit.Assert.assertTrue; public final class EsqlTestUtils { @@ -244,4 +248,14 @@ public static String randomEnrichCommand(String name, Enrich.Mode mode, String m all.addAll(after); return String.join(" | ", all); } + + public static void assertWarnings(List warnings, List allowedWarnings, List allowedWarningsRegex) { + if (allowedWarningsRegex.isEmpty()) { + assertMap(warnings.stream().sorted().toList(), matchesList(allowedWarnings.stream().sorted().toList())); + } else { + for (String warning : warnings) { + assertTrue("Unexpected warning: " + warning, allowedWarningsRegex.stream().anyMatch(x -> x.matcher(warning).matches())); + } + } + } } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/README.md b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/README.md index fdd52c6aac229..dad5ae2828174 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/README.md +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/README.md @@ -102,7 +102,7 @@ include::{esql-specs}/floats.csv-spec[tag=sin-result]
    What is this asciidoc syntax? -The first section is a source code block for the ES|QL query: +The first section is a source code block for the ES|QL query: - a [source](https://docs.asciidoctor.org/asciidoc/latest/verbatim/source-blocks/) code block (delimited by `----`) - `source.merge.styled,esql` indicates custom syntax highlighting for ES|QL @@ -176,3 +176,44 @@ row a = [true, false, false, true] ``` That skips nodes that don't have the `esql.mv_sort` feature. + + +### Warnings + +Some queries can return warnings, eg. for number overflows or when a multi-value is passed to a funciton +that does not support it. + +Each CSV-SPEC test has to also assert all the expected warnings. + +Warnings can be specified as plain text or as a regular expression (but a single test cannot have a mix of both). +Each warning has to be specified on a single row, between the query and the result, prefixed by `warning:` or `warningRegex:`. +If multiple warnings are defined, the order is not relevant. + +This is an example of how to test a query that returns two warnings: + +```csv-spec +addLongOverflow +row max = 9223372036854775807 | eval sum = max + 1 | keep sum; + +warning:Line 1:44: evaluation of [max + 1] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:44: java.lang.ArithmeticException: long overflow + +sum:long +null +; +``` + +The same, using regular expressions: + +```csv-spec +addLongOverflow +row max = 9223372036854775807 | eval sum = max + 1 | keep sum; + +warningRegex:Line \d+:\d+: evaluation of \[max + 1\] failed, treating result as null. Only first 20 failures recorded. +warningRegex:Line \d+:\d+: java.lang.ArithmeticException: long overflow + +sum:long +null +; +``` + diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec index 58c1cf3dc9174..8e0da1dd354ed 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec @@ -166,6 +166,27 @@ eth0 |gamma |fe80::cae2:65ff:fece:feb9 eth0 |epsilon |[fe80::cae2:65ff:fece:feb9, fe80::cae2:65ff:fece:fec0, fe80::cae2:65ff:fece:fec1]|fe80::cae2:65ff:fece:fec1|fe80::cae2:65ff:fece:fec1 ; + +inWithWarningsRegex#[skip:-8.13.99, reason:regex warnings in tests introduced in v 8.14.0] +required_feature: esql.mv_warn + +from hosts | eval eq=case(ip0==ip1, ip0, ip1) | where eq in (ip0, ip1) | keep card, host, ip0, ip1, eq; +ignoreOrder:true +warningRegex:Line \d+:\d+: evaluation of \[ip0==ip1\] failed, treating result as null. Only first 20 failures recorded. +warningRegex:Line \d+:\d+: evaluation of \[eq in \(ip0, ip1\)\] failed, treating result as null. Only first 20 failures recorded. +warningRegex:java.lang.IllegalArgumentException: single-value function encountered multi-value + +card:keyword |host:keyword |ip0:ip |ip1:ip |eq:ip +eth0 |alpha |127.0.0.1 |127.0.0.1 |127.0.0.1 +eth1 |alpha |::1 |::1 |::1 +eth0 |beta |127.0.0.1 |::1 |::1 +eth1 |beta |127.0.0.1 |127.0.0.2 |127.0.0.2 +eth1 |beta |127.0.0.1 |128.0.0.1 |128.0.0.1 +lo0 |gamma |fe80::cae2:65ff:fece:feb9 |fe81::cae2:65ff:fece:feb9|fe81::cae2:65ff:fece:feb9 +eth0 |gamma |fe80::cae2:65ff:fece:feb9 |127.0.0.3 |127.0.0.3 +eth0 |epsilon |[fe80::cae2:65ff:fece:feb9, fe80::cae2:65ff:fece:fec0, fe80::cae2:65ff:fece:fec1]|fe80::cae2:65ff:fece:fec1|fe80::cae2:65ff:fece:fec1 +; + cidrMatchSimple required_feature: esql.mv_warn diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index 662ae1a208ed0..573dbd20b39c5 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -99,8 +99,6 @@ import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; -import static org.elasticsearch.test.ListMatcher.matchesList; -import static org.elasticsearch.test.MapMatcher.assertMap; import static org.elasticsearch.xpack.esql.CsvTestUtils.ExpectedResults; import static org.elasticsearch.xpack.esql.CsvTestUtils.isEnabled; import static org.elasticsearch.xpack.esql.CsvTestUtils.loadCsvSpecValues; @@ -470,6 +468,6 @@ private void assertWarnings(List warnings) { normalized.add(normW); } } - assertMap(normalized, matchesList(testCase.expectedWarnings(true))); + EsqlTestUtils.assertWarnings(normalized, testCase.expectedWarnings(true), testCase.expectedWarningsRegex()); } } diff --git a/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/CsvSpecReader.java b/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/CsvSpecReader.java index 5023b5a4bf877..757fe411387d6 100644 --- a/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/CsvSpecReader.java +++ b/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/CsvSpecReader.java @@ -11,6 +11,7 @@ import java.util.List; import java.util.Locale; import java.util.function.Function; +import java.util.regex.Pattern; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertThat; @@ -68,7 +69,15 @@ public Object parse(String line) { // read data String lower = line.toLowerCase(Locale.ROOT); if (lower.startsWith("warning:")) { + if (testCase.expectedWarningsRegex.isEmpty() == false) { + throw new IllegalArgumentException("Cannot mix warnings and regex warnings in CSV SPEC files: [" + line + "]"); + } testCase.expectedWarnings.add(line.substring("warning:".length()).trim()); + } else if (lower.startsWith("warningregex:")) { + if (testCase.expectedWarnings.isEmpty() == false) { + throw new IllegalArgumentException("Cannot mix warnings and regex warnings in CSV SPEC files: [" + line + "]"); + } + testCase.expectedWarningsRegex.add(Pattern.compile(".*" + line.substring("warningregex:".length()).trim() + ".*")); } else if (lower.startsWith("ignoreorder:")) { testCase.ignoreOrder = Boolean.parseBoolean(line.substring("ignoreOrder:".length()).trim()); } else if (line.startsWith(";")) { @@ -93,6 +102,7 @@ public static class CsvTestCase { public String earlySchema; public String expectedResults; private final List expectedWarnings = new ArrayList<>(); + private final List expectedWarningsRegex = new ArrayList<>(); public boolean ignoreOrder; public List requiredFeatures = List.of(); @@ -137,6 +147,10 @@ public List expectedWarnings(boolean forEmulated) { public void adjustExpectedWarnings(Function updater) { expectedWarnings.replaceAll(updater::apply); } + + public List expectedWarningsRegex() { + return expectedWarningsRegex; + } } } From 48a88c575cca0579b3eeca46bcdeb1242b12b3db Mon Sep 17 00:00:00 2001 From: Keith Massey Date: Wed, 10 Apr 2024 09:21:24 -0500 Subject: [PATCH 223/264] Renaming GeoIpDownloaderStatsAction (#107290) Renaming GeoIpDownloaderStatsAction to GeoIpStatsAction --- .../ingest/apis/geoip-stats-api.asciidoc | 8 ++--- .../ingest/geoip/GeoIpDownloaderIT.java | 25 +++++---------- .../ingest/geoip/GeoIpDownloaderStatsIT.java | 8 ++--- .../ingest/geoip/IngestGeoIpPlugin.java | 10 +++--- ...StatsAction.java => GeoIpStatsAction.java} | 32 +++++++++---------- ...on.java => GeoIpStatsTransportAction.java} | 18 +++++------ ...sAction.java => RestGeoIpStatsAction.java} | 8 ++--- ...tsActionNodeResponseSerializingTests.java} | 15 ++++----- ...=> GeoIpStatsActionNodeResponseTests.java} | 4 +-- ...pStatsActionResponseSerializingTests.java} | 17 +++++----- 10 files changed, 65 insertions(+), 80 deletions(-) rename modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/{GeoIpDownloaderStatsAction.java => GeoIpStatsAction.java} (88%) rename modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/{GeoIpDownloaderStatsTransportAction.java => GeoIpStatsTransportAction.java} (80%) rename modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/{RestGeoIpDownloaderStatsAction.java => RestGeoIpStatsAction.java} (80%) rename modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/{GeoIpDownloaderStatsActionNodeResponseSerializingTests.java => GeoIpStatsActionNodeResponseSerializingTests.java} (68%) rename modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/{GeoIpDownloaderStatsActionNodeResponseTests.java => GeoIpStatsActionNodeResponseTests.java} (91%) rename modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/{GeoIpDownloaderStatsActionResponseSerializingTests.java => GeoIpStatsActionResponseSerializingTests.java} (50%) diff --git a/docs/reference/ingest/apis/geoip-stats-api.asciidoc b/docs/reference/ingest/apis/geoip-stats-api.asciidoc index 6ef0db546342b..84a2b00737e5a 100644 --- a/docs/reference/ingest/apis/geoip-stats-api.asciidoc +++ b/docs/reference/ingest/apis/geoip-stats-api.asciidoc @@ -4,8 +4,8 @@ GeoIP stats ++++ -Gets download statistics for GeoIP2 databases used with the -<>. +Gets statistics about the <>, including +download statistics for GeoIP2 databases used with it. [source,console] ---- @@ -60,7 +60,7 @@ Total number of database updates skipped. `nodes`:: (object) -Downloaded GeoIP2 databases for each node. +Statistics for each node. + .Properties of `nodes` [%collapsible%open] @@ -90,4 +90,4 @@ Downloaded database files, including related license files. {es} stores these files in the node's <>: `$ES_TMPDIR/geoip-databases/`. ===== -==== \ No newline at end of file +==== diff --git a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java index 54d465aecda52..9dcd8abc7bc57 100644 --- a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java +++ b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java @@ -30,7 +30,7 @@ import org.elasticsearch.ingest.AbstractProcessor; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Processor; -import org.elasticsearch.ingest.geoip.stats.GeoIpDownloaderStatsAction; +import org.elasticsearch.ingest.geoip.stats.GeoIpStatsAction; import org.elasticsearch.persistent.PersistentTaskParams; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.plugins.IngestPlugin; @@ -121,13 +121,10 @@ public void cleanUp() throws Exception { } }); assertBusy(() -> { - GeoIpDownloaderStatsAction.Response response = client().execute( - GeoIpDownloaderStatsAction.INSTANCE, - new GeoIpDownloaderStatsAction.Request() - ).actionGet(); - assertThat(response.getStats().getDatabasesCount(), equalTo(0)); + GeoIpStatsAction.Response response = client().execute(GeoIpStatsAction.INSTANCE, new GeoIpStatsAction.Request()).actionGet(); + assertThat(response.getDownloaderStats().getDatabasesCount(), equalTo(0)); assertThat(response.getNodes(), not(empty())); - for (GeoIpDownloaderStatsAction.NodeResponse nodeResponse : response.getNodes()) { + for (GeoIpStatsAction.NodeResponse nodeResponse : response.getNodes()) { assertThat(nodeResponse.getConfigDatabases(), empty()); assertThat(nodeResponse.getDatabases(), empty()); assertThat(nodeResponse.getFilesInTemp().stream().filter(s -> s.endsWith(".txt") == false).toList(), empty()); @@ -703,12 +700,9 @@ private void setupDatabasesInConfigDirectory() throws Exception { }); assertBusy(() -> { - GeoIpDownloaderStatsAction.Response response = client().execute( - GeoIpDownloaderStatsAction.INSTANCE, - new GeoIpDownloaderStatsAction.Request() - ).actionGet(); + GeoIpStatsAction.Response response = client().execute(GeoIpStatsAction.INSTANCE, new GeoIpStatsAction.Request()).actionGet(); assertThat(response.getNodes(), not(empty())); - for (GeoIpDownloaderStatsAction.NodeResponse nodeResponse : response.getNodes()) { + for (GeoIpStatsAction.NodeResponse nodeResponse : response.getNodes()) { assertThat( nodeResponse.getConfigDatabases(), containsInAnyOrder("GeoLite2-Country.mmdb", "GeoLite2-City.mmdb", "GeoLite2-ASN.mmdb") @@ -751,12 +745,9 @@ private void deleteDatabasesInConfigDirectory() throws Exception { }); assertBusy(() -> { - GeoIpDownloaderStatsAction.Response response = client().execute( - GeoIpDownloaderStatsAction.INSTANCE, - new GeoIpDownloaderStatsAction.Request() - ).actionGet(); + GeoIpStatsAction.Response response = client().execute(GeoIpStatsAction.INSTANCE, new GeoIpStatsAction.Request()).actionGet(); assertThat(response.getNodes(), not(empty())); - for (GeoIpDownloaderStatsAction.NodeResponse nodeResponse : response.getNodes()) { + for (GeoIpStatsAction.NodeResponse nodeResponse : response.getNodes()) { assertThat(nodeResponse.getConfigDatabases(), empty()); } }); diff --git a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderStatsIT.java b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderStatsIT.java index 77b0faeeb6ebd..ec54317e144d1 100644 --- a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderStatsIT.java +++ b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderStatsIT.java @@ -11,7 +11,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.ingest.geoip.stats.GeoIpDownloaderStatsAction; +import org.elasticsearch.ingest.geoip.stats.GeoIpStatsAction; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.reindex.ReindexPlugin; import org.elasticsearch.test.ESIntegTestCase; @@ -65,8 +65,8 @@ public void testStats() throws Exception { * slowly to pass. */ assumeTrue("only test with fixture to have stable results", getEndpoint() != null); - GeoIpDownloaderStatsAction.Request req = new GeoIpDownloaderStatsAction.Request(); - GeoIpDownloaderStatsAction.Response response = client().execute(GeoIpDownloaderStatsAction.INSTANCE, req).actionGet(); + GeoIpStatsAction.Request req = new GeoIpStatsAction.Request(); + GeoIpStatsAction.Response response = client().execute(GeoIpStatsAction.INSTANCE, req).actionGet(); XContentTestUtils.JsonMapView jsonMapView = new XContentTestUtils.JsonMapView(convertToMap(response)); assertThat(jsonMapView.get("stats.successful_downloads"), equalTo(0)); assertThat(jsonMapView.get("stats.failed_downloads"), equalTo(0)); @@ -78,7 +78,7 @@ public void testStats() throws Exception { updateClusterSettings(Settings.builder().put(GeoIpDownloaderTaskExecutor.ENABLED_SETTING.getKey(), true)); assertBusy(() -> { - GeoIpDownloaderStatsAction.Response res = client().execute(GeoIpDownloaderStatsAction.INSTANCE, req).actionGet(); + GeoIpStatsAction.Response res = client().execute(GeoIpStatsAction.INSTANCE, req).actionGet(); XContentTestUtils.JsonMapView view = new XContentTestUtils.JsonMapView(convertToMap(res)); assertThat(view.get("stats.successful_downloads"), equalTo(4)); assertThat(view.get("stats.failed_downloads"), equalTo(0)); diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java index 2e0a84cfde23b..e5756652a9842 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java @@ -28,9 +28,9 @@ import org.elasticsearch.ingest.IngestService; import org.elasticsearch.ingest.Processor; import org.elasticsearch.ingest.geoip.stats.GeoIpDownloaderStats; -import org.elasticsearch.ingest.geoip.stats.GeoIpDownloaderStatsAction; -import org.elasticsearch.ingest.geoip.stats.GeoIpDownloaderStatsTransportAction; -import org.elasticsearch.ingest.geoip.stats.RestGeoIpDownloaderStatsAction; +import org.elasticsearch.ingest.geoip.stats.GeoIpStatsAction; +import org.elasticsearch.ingest.geoip.stats.GeoIpStatsTransportAction; +import org.elasticsearch.ingest.geoip.stats.RestGeoIpStatsAction; import org.elasticsearch.persistent.PersistentTaskParams; import org.elasticsearch.persistent.PersistentTaskState; import org.elasticsearch.persistent.PersistentTasksExecutor; @@ -144,7 +144,7 @@ public List> getPersistentTasksExecutor( @Override public List> getActions() { - return List.of(new ActionHandler<>(GeoIpDownloaderStatsAction.INSTANCE, GeoIpDownloaderStatsTransportAction.class)); + return List.of(new ActionHandler<>(GeoIpStatsAction.INSTANCE, GeoIpStatsTransportAction.class)); } @Override @@ -159,7 +159,7 @@ public List getRestHandlers( Supplier nodesInCluster, Predicate clusterSupportsFeature ) { - return List.of(new RestGeoIpDownloaderStatsAction()); + return List.of(new RestGeoIpStatsAction()); } @Override diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsAction.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsAction.java similarity index 88% rename from modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsAction.java rename to modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsAction.java index f9b1d8c637f68..db1242888ca82 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsAction.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsAction.java @@ -30,11 +30,11 @@ import java.util.Objects; import java.util.Set; -public class GeoIpDownloaderStatsAction { +public class GeoIpStatsAction { public static final ActionType INSTANCE = new ActionType<>("cluster:monitor/ingest/geoip/stats"); - private GeoIpDownloaderStatsAction() {/* no instances */} + private GeoIpStatsAction() {/* no instances */} public static class Request extends BaseNodesRequest implements ToXContentObject { @@ -89,8 +89,8 @@ public Response(ClusterName clusterName, List nodes, List n.stats).filter(Objects::nonNull).findFirst().orElse(GeoIpDownloaderStats.EMPTY); + public GeoIpDownloaderStats getDownloaderStats() { + return getNodes().stream().map(n -> n.downloaderStats).filter(Objects::nonNull).findFirst().orElse(GeoIpDownloaderStats.EMPTY); } @Override @@ -105,7 +105,7 @@ protected void writeNodesTo(StreamOutput out, List nodes) throws I @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - GeoIpDownloaderStats stats = getStats(); + GeoIpDownloaderStats stats = getDownloaderStats(); builder.startObject(); builder.field("stats", stats); builder.startObject("nodes"); @@ -153,14 +153,14 @@ public int hashCode() { public static class NodeResponse extends BaseNodeResponse { - private final GeoIpDownloaderStats stats; + private final GeoIpDownloaderStats downloaderStats; private final Set databases; private final Set filesInTemp; private final Set configDatabases; protected NodeResponse(StreamInput in) throws IOException { super(in); - stats = in.readBoolean() ? new GeoIpDownloaderStats(in) : null; + downloaderStats = in.readBoolean() ? new GeoIpDownloaderStats(in) : null; databases = in.readCollectionAsImmutableSet(StreamInput::readString); filesInTemp = in.readCollectionAsImmutableSet(StreamInput::readString); configDatabases = in.getTransportVersion().onOrAfter(TransportVersions.V_8_0_0) @@ -170,20 +170,20 @@ protected NodeResponse(StreamInput in) throws IOException { protected NodeResponse( DiscoveryNode node, - GeoIpDownloaderStats stats, + GeoIpDownloaderStats downloaderStats, Set databases, Set filesInTemp, Set configDatabases ) { super(node); - this.stats = stats; + this.downloaderStats = downloaderStats; this.databases = Set.copyOf(databases); this.filesInTemp = Set.copyOf(filesInTemp); this.configDatabases = Set.copyOf(configDatabases); } - public GeoIpDownloaderStats getStats() { - return stats; + public GeoIpDownloaderStats getDownloaderStats() { + return downloaderStats; } public Set getDatabases() { @@ -201,9 +201,9 @@ public Set getConfigDatabases() { @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeBoolean(stats != null); - if (stats != null) { - stats.writeTo(out); + out.writeBoolean(downloaderStats != null); + if (downloaderStats != null) { + downloaderStats.writeTo(out); } out.writeStringCollection(databases); out.writeStringCollection(filesInTemp); @@ -217,7 +217,7 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; NodeResponse that = (NodeResponse) o; - return stats.equals(that.stats) + return downloaderStats.equals(that.downloaderStats) && databases.equals(that.databases) && filesInTemp.equals(that.filesInTemp) && Objects.equals(configDatabases, that.configDatabases); @@ -225,7 +225,7 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(stats, databases, filesInTemp, configDatabases); + return Objects.hash(downloaderStats, databases, filesInTemp, configDatabases); } } } diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsTransportAction.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsTransportAction.java similarity index 80% rename from modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsTransportAction.java rename to modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsTransportAction.java index 0958002405fbe..13f9544e1b9e4 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsTransportAction.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsTransportAction.java @@ -18,10 +18,10 @@ import org.elasticsearch.ingest.geoip.DatabaseNodeService; import org.elasticsearch.ingest.geoip.GeoIpDownloader; import org.elasticsearch.ingest.geoip.GeoIpDownloaderTaskExecutor; -import org.elasticsearch.ingest.geoip.stats.GeoIpDownloaderStatsAction.NodeRequest; -import org.elasticsearch.ingest.geoip.stats.GeoIpDownloaderStatsAction.NodeResponse; -import org.elasticsearch.ingest.geoip.stats.GeoIpDownloaderStatsAction.Request; -import org.elasticsearch.ingest.geoip.stats.GeoIpDownloaderStatsAction.Response; +import org.elasticsearch.ingest.geoip.stats.GeoIpStatsAction.NodeRequest; +import org.elasticsearch.ingest.geoip.stats.GeoIpStatsAction.NodeResponse; +import org.elasticsearch.ingest.geoip.stats.GeoIpStatsAction.Request; +import org.elasticsearch.ingest.geoip.stats.GeoIpStatsAction.Response; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -29,14 +29,14 @@ import java.io.IOException; import java.util.List; -public class GeoIpDownloaderStatsTransportAction extends TransportNodesAction { +public class GeoIpStatsTransportAction extends TransportNodesAction { private final TransportService transportService; private final DatabaseNodeService registry; private final GeoIpDownloaderTaskExecutor geoIpDownloaderTaskExecutor; @Inject - public GeoIpDownloaderStatsTransportAction( + public GeoIpStatsTransportAction( TransportService transportService, ClusterService clusterService, ThreadPool threadPool, @@ -45,7 +45,7 @@ public GeoIpDownloaderStatsTransportAction( GeoIpDownloaderTaskExecutor geoIpDownloaderTaskExecutor ) { super( - GeoIpDownloaderStatsAction.INSTANCE.name(), + GeoIpStatsAction.INSTANCE.name(), clusterService, transportService, actionFilters, @@ -75,10 +75,10 @@ protected NodeResponse newNodeResponse(StreamInput in, DiscoveryNode node) throw @Override protected NodeResponse nodeOperation(NodeRequest request, Task task) { GeoIpDownloader geoIpTask = geoIpDownloaderTaskExecutor.getCurrentTask(); - GeoIpDownloaderStats stats = geoIpTask == null || geoIpTask.getStatus() == null ? null : geoIpTask.getStatus(); + GeoIpDownloaderStats downloaderStats = geoIpTask == null || geoIpTask.getStatus() == null ? null : geoIpTask.getStatus(); return new NodeResponse( transportService.getLocalNode(), - stats, + downloaderStats, registry.getAvailableDatabases(), registry.getFilesInTemp(), registry.getConfigDatabases() diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/RestGeoIpDownloaderStatsAction.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/RestGeoIpStatsAction.java similarity index 80% rename from modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/RestGeoIpDownloaderStatsAction.java rename to modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/RestGeoIpStatsAction.java index 49f3ee81c7f62..ac6022205d04e 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/RestGeoIpDownloaderStatsAction.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/RestGeoIpStatsAction.java @@ -20,7 +20,7 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; @ServerlessScope(Scope.INTERNAL) -public class RestGeoIpDownloaderStatsAction extends BaseRestHandler { +public class RestGeoIpStatsAction extends BaseRestHandler { @Override public String getName() { @@ -34,10 +34,6 @@ public List routes() { @Override protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { - return channel -> client.execute( - GeoIpDownloaderStatsAction.INSTANCE, - new GeoIpDownloaderStatsAction.Request(), - new RestToXContentListener<>(channel) - ); + return channel -> client.execute(GeoIpStatsAction.INSTANCE, new GeoIpStatsAction.Request(), new RestToXContentListener<>(channel)); } } diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsActionNodeResponseSerializingTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsActionNodeResponseSerializingTests.java similarity index 68% rename from modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsActionNodeResponseSerializingTests.java rename to modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsActionNodeResponseSerializingTests.java index 54193967ba853..1008dcf56c4f1 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsActionNodeResponseSerializingTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsActionNodeResponseSerializingTests.java @@ -15,30 +15,29 @@ import java.util.Set; -public class GeoIpDownloaderStatsActionNodeResponseSerializingTests extends AbstractWireSerializingTestCase< - GeoIpDownloaderStatsAction.NodeResponse> { +public class GeoIpStatsActionNodeResponseSerializingTests extends AbstractWireSerializingTestCase { @Override - protected Writeable.Reader instanceReader() { - return GeoIpDownloaderStatsAction.NodeResponse::new; + protected Writeable.Reader instanceReader() { + return GeoIpStatsAction.NodeResponse::new; } @Override - protected GeoIpDownloaderStatsAction.NodeResponse createTestInstance() { + protected GeoIpStatsAction.NodeResponse createTestInstance() { return createRandomInstance(); } @Override - protected GeoIpDownloaderStatsAction.NodeResponse mutateInstance(GeoIpDownloaderStatsAction.NodeResponse instance) { + protected GeoIpStatsAction.NodeResponse mutateInstance(GeoIpStatsAction.NodeResponse instance) { return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929 } - static GeoIpDownloaderStatsAction.NodeResponse createRandomInstance() { + static GeoIpStatsAction.NodeResponse createRandomInstance() { DiscoveryNode node = DiscoveryNodeUtils.create("id"); Set databases = Set.copyOf(randomList(10, () -> randomAlphaOfLengthBetween(5, 10))); Set files = Set.copyOf(randomList(10, () -> randomAlphaOfLengthBetween(5, 10))); Set configDatabases = Set.copyOf(randomList(10, () -> randomAlphaOfLengthBetween(5, 10))); - return new GeoIpDownloaderStatsAction.NodeResponse( + return new GeoIpStatsAction.NodeResponse( node, GeoIpDownloaderStatsSerializingTests.createRandomInstance(), databases, diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsActionNodeResponseTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsActionNodeResponseTests.java similarity index 91% rename from modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsActionNodeResponseTests.java rename to modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsActionNodeResponseTests.java index a0fd470ef0468..27a332c3b42f9 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsActionNodeResponseTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsActionNodeResponseTests.java @@ -18,14 +18,14 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; -public class GeoIpDownloaderStatsActionNodeResponseTests extends ESTestCase { +public class GeoIpStatsActionNodeResponseTests extends ESTestCase { public void testInputsAreDefensivelyCopied() { DiscoveryNode node = DiscoveryNodeUtils.create("id"); Set databases = new HashSet<>(randomList(10, () -> randomAlphaOfLengthBetween(5, 10))); Set files = new HashSet<>(randomList(10, () -> randomAlphaOfLengthBetween(5, 10))); Set configDatabases = new HashSet<>(randomList(10, () -> randomAlphaOfLengthBetween(5, 10))); - GeoIpDownloaderStatsAction.NodeResponse nodeResponse = new GeoIpDownloaderStatsAction.NodeResponse( + GeoIpStatsAction.NodeResponse nodeResponse = new GeoIpStatsAction.NodeResponse( node, GeoIpDownloaderStatsSerializingTests.createRandomInstance(), databases, diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsActionResponseSerializingTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsActionResponseSerializingTests.java similarity index 50% rename from modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsActionResponseSerializingTests.java rename to modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsActionResponseSerializingTests.java index d566fa8838df1..6e057843b9776 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsActionResponseSerializingTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsActionResponseSerializingTests.java @@ -14,25 +14,24 @@ import java.util.List; -public class GeoIpDownloaderStatsActionResponseSerializingTests extends AbstractWireSerializingTestCase< - GeoIpDownloaderStatsAction.Response> { +public class GeoIpStatsActionResponseSerializingTests extends AbstractWireSerializingTestCase { @Override - protected Writeable.Reader instanceReader() { - return GeoIpDownloaderStatsAction.Response::new; + protected Writeable.Reader instanceReader() { + return GeoIpStatsAction.Response::new; } @Override - protected GeoIpDownloaderStatsAction.Response createTestInstance() { - List nodeResponses = randomList( + protected GeoIpStatsAction.Response createTestInstance() { + List nodeResponses = randomList( 10, - GeoIpDownloaderStatsActionNodeResponseSerializingTests::createRandomInstance + GeoIpStatsActionNodeResponseSerializingTests::createRandomInstance ); - return new GeoIpDownloaderStatsAction.Response(ClusterName.DEFAULT, nodeResponses, List.of()); + return new GeoIpStatsAction.Response(ClusterName.DEFAULT, nodeResponses, List.of()); } @Override - protected GeoIpDownloaderStatsAction.Response mutateInstance(GeoIpDownloaderStatsAction.Response instance) { + protected GeoIpStatsAction.Response mutateInstance(GeoIpStatsAction.Response instance) { return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929 } } From a9cab350e0263a6a5f59ea4dd7974c3b8eb6b51d Mon Sep 17 00:00:00 2001 From: Dianna Hohensee Date: Wed, 10 Apr 2024 10:40:13 -0400 Subject: [PATCH 224/264] Log skipped elections due to shutdown marker (#106701) --- .../cluster/SpecificMasterNodesIT.java | 4 --- .../cluster/coordination/Coordinator.java | 36 +++++++++++++------ .../coordination/ElectionStrategy.java | 20 +++++++++-- .../AbstractCoordinatorTestCase.java | 22 +++++++----- 4 files changed, 57 insertions(+), 25 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/SpecificMasterNodesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/SpecificMasterNodesIT.java index 43506647f89ba..cd0bf5c428118 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/SpecificMasterNodesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/SpecificMasterNodesIT.java @@ -103,10 +103,6 @@ public void testElectOnlyBetweenMasterNodes() throws Exception { internalCluster().nonMasterClient().admin().cluster().prepareState().get().getState().nodes().getMasterNode().getName(), equalTo(masterNodeName) ); - assertThat( - internalCluster().nonMasterClient().admin().cluster().prepareState().get().getState().nodes().getMasterNode().getName(), - equalTo(masterNodeName) - ); assertThat( internalCluster().masterClient().admin().cluster().prepareState().get().getState().nodes().getMasterNode().getName(), equalTo(masterNodeName) diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java b/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java index fc7eaa97c677b..156ba88a7d2b1 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java @@ -27,6 +27,7 @@ import org.elasticsearch.cluster.coordination.CoordinationMetadata.VotingConfigExclusion; import org.elasticsearch.cluster.coordination.CoordinationMetadata.VotingConfiguration; import org.elasticsearch.cluster.coordination.CoordinationState.VoteCollection; +import org.elasticsearch.cluster.coordination.ElectionStrategy.NodeEligibility; import org.elasticsearch.cluster.coordination.FollowersChecker.FollowerCheckRequest; import org.elasticsearch.cluster.coordination.JoinHelper.InitialJoinAccumulator; import org.elasticsearch.cluster.metadata.Metadata; @@ -544,8 +545,14 @@ private void startElection() { // The preVoteCollector is only active while we are candidate, but it does not call this method with synchronisation, so we have // to check our mode again here. if (mode == Mode.CANDIDATE) { - if (localNodeMayWinElection(getLastAcceptedState(), electionStrategy) == false) { - logger.trace("skip election as local node may not win it: {}", getLastAcceptedState().coordinationMetadata()); + final var nodeEligibility = localNodeMayWinElection(getLastAcceptedState(), electionStrategy); + if (nodeEligibility.mayWin() == false) { + assert nodeEligibility.reason().isEmpty() == false; + logger.trace( + "skip election as local node may not win it ({}): {}", + nodeEligibility.reason(), + getLastAcceptedState().coordinationMetadata() + ); return; } @@ -598,7 +605,7 @@ private void abdicateTo(DiscoveryNode newMaster) { becomeCandidate("after abdicating to " + newMaster); } - private static boolean localNodeMayWinElection(ClusterState lastAcceptedState, ElectionStrategy electionStrategy) { + private static NodeEligibility localNodeMayWinElection(ClusterState lastAcceptedState, ElectionStrategy electionStrategy) { final DiscoveryNode localNode = lastAcceptedState.nodes().getLocalNode(); assert localNode != null; return electionStrategy.nodeMayWinElection(lastAcceptedState, localNode); @@ -1283,8 +1290,12 @@ public boolean setInitialConfiguration(final VotingConfiguration votingConfigura metadataBuilder.coordinationMetadata(coordinationMetadata); coordinationState.get().setInitialState(ClusterState.builder(currentState).metadata(metadataBuilder).build()); - assert localNodeMayWinElection(getLastAcceptedState(), electionStrategy) - : "initial state does not allow local node to win election: " + getLastAcceptedState().coordinationMetadata(); + var nodeEligibility = localNodeMayWinElection(getLastAcceptedState(), electionStrategy); + assert nodeEligibility.mayWin() + : "initial state does not allow local node to win election, reason: " + + nodeEligibility.reason() + + " , metadata: " + + getLastAcceptedState().coordinationMetadata(); preVoteCollector.update(getPreVoteResponse(), null); // pick up the change to last-accepted version startElectionScheduler(); return true; @@ -1767,9 +1778,14 @@ public void run() { synchronized (mutex) { if (mode == Mode.CANDIDATE) { final ClusterState lastAcceptedState = coordinationState.get().getLastAcceptedState(); - - if (localNodeMayWinElection(lastAcceptedState, electionStrategy) == false) { - logger.trace("skip prevoting as local node may not win election: {}", lastAcceptedState.coordinationMetadata()); + final var nodeEligibility = localNodeMayWinElection(lastAcceptedState, electionStrategy); + if (nodeEligibility.mayWin() == false) { + assert nodeEligibility.reason().isEmpty() == false; + logger.trace( + "skip prevoting as local node may not win election ({}): {}", + nodeEligibility.reason(), + lastAcceptedState.coordinationMetadata() + ); return; } @@ -1983,10 +1999,10 @@ public void onResponse(Void ignored) { // if necessary, abdicate to another node or improve the voting configuration boolean attemptReconfiguration = true; final ClusterState state = getLastAcceptedState(); // committed state - if (localNodeMayWinElection(state, electionStrategy) == false) { + if (localNodeMayWinElection(state, electionStrategy).mayWin() == false) { final List masterCandidates = completedNodes().stream() .filter(DiscoveryNode::isMasterNode) - .filter(node -> electionStrategy.nodeMayWinElection(state, node)) + .filter(node -> electionStrategy.nodeMayWinElection(state, node).mayWin()) .filter(node -> { // check if master candidate would be able to get an election quorum if we were to // abdicate to it. Assume that every node that completed the publication can provide diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/ElectionStrategy.java b/server/src/main/java/org/elasticsearch/cluster/coordination/ElectionStrategy.java index c98d9ec39e0f0..2bf6e10a9855a 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/ElectionStrategy.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/ElectionStrategy.java @@ -34,6 +34,17 @@ protected boolean satisfiesAdditionalQuorumConstraints( } }; + /** + * Contains a result for whether a node may win an election and the reason if not. + */ + public record NodeEligibility(boolean mayWin, String reason) {} + + public static final NodeEligibility NODE_MAY_WIN_ELECTION = new NodeEligibility(true, ""); + public static final NodeEligibility NODE_MAY_NOT_WIN_ELECTION = new NodeEligibility( + false, + "node is ineligible for election, not a voting node in the voting configuration" + ); + /** * Whether there is an election quorum from the point of view of the given local node under the provided voting configurations */ @@ -105,10 +116,13 @@ public void beforeCommit(long term, long version, ActionListener listener) listener.onResponse(null); } - public boolean nodeMayWinElection(ClusterState lastAcceptedState, DiscoveryNode node) { + public NodeEligibility nodeMayWinElection(ClusterState lastAcceptedState, DiscoveryNode node) { final String nodeId = node.getId(); - return lastAcceptedState.getLastCommittedConfiguration().getNodeIds().contains(nodeId) + if (lastAcceptedState.getLastCommittedConfiguration().getNodeIds().contains(nodeId) || lastAcceptedState.getLastAcceptedConfiguration().getNodeIds().contains(nodeId) - || lastAcceptedState.getVotingConfigExclusions().stream().noneMatch(vce -> vce.getNodeId().equals(nodeId)); + || lastAcceptedState.getVotingConfigExclusions().stream().noneMatch(vce -> vce.getNodeId().equals(nodeId))) { + return NODE_MAY_WIN_ELECTION; + } + return NODE_MAY_NOT_WIN_ELECTION; } } diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java index 4e43cb33111a1..cb70ab8e491cb 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java @@ -288,7 +288,7 @@ public class Cluster implements Releasable { @Nullable // null means construct a list from all the current nodes private List seedHostsList; - Cluster(int initialNodeCount) { + public Cluster(int initialNodeCount) { this(initialNodeCount, true, Settings.EMPTY); } @@ -364,7 +364,13 @@ List addNodes(int newNodesCount) { return addedNodes; } - int size() { + public static void becomeCandidate(ClusterNode node, String reason) { + synchronized (node.coordinator.mutex) { + node.coordinator.becomeCandidate(reason); + } + } + + public int size() { return clusterNodes.size(); } @@ -760,7 +766,7 @@ private void stabilise(long stabilisationDurationMillis, boolean expectIdleJoinV } } - void bootstrapIfNecessary() { + public void bootstrapIfNecessary() { if (clusterNodes.stream().allMatch(ClusterNode::isNotUsefullyBootstrapped)) { assertThat("setting initial configuration may fail with disconnected nodes", disconnectedNodes, empty()); assertThat("setting initial configuration may fail with blackholed nodes", blackholedNodes, empty()); @@ -773,7 +779,7 @@ void bootstrapIfNecessary() { } } - void runFor(long runDurationMillis, String description) { + public void runFor(long runDurationMillis, String description) { final long endTime = deterministicTaskQueue.getCurrentTimeMillis() + runDurationMillis; logger.info("--> runFor({}ms) running until [{}ms]: {}", runDurationMillis, endTime, description); @@ -856,7 +862,7 @@ ClusterNode getAnyNode() { return getAnyNodeExcept(); } - ClusterNode getAnyNodeExcept(ClusterNode... clusterNodesToExclude) { + public ClusterNode getAnyNodeExcept(ClusterNode... clusterNodesToExclude) { List filteredNodes = getAllNodesExcept(clusterNodesToExclude); assert filteredNodes.isEmpty() == false; return randomFrom(filteredNodes); @@ -956,7 +962,7 @@ public final class ClusterNode { private static final Logger logger = LogManager.getLogger(ClusterNode.class); private final int nodeIndex; - Coordinator coordinator; + public Coordinator coordinator; private final DiscoveryNode localNode; final CoordinationState.PersistedState persistedState; final Settings nodeSettings; @@ -1388,7 +1394,7 @@ public void onFailure(Exception e) { }); } - AckCollector submitUpdateTask( + public AckCollector submitUpdateTask( String source, UnaryOperator clusterStateUpdate, CoordinatorTestClusterStateUpdateTask taskListener @@ -1460,7 +1466,7 @@ void onDisconnectEventFrom(ClusterNode clusterNode) { transportService.disconnectFromNode(clusterNode.localNode); } - ClusterState getLastAppliedClusterState() { + public ClusterState getLastAppliedClusterState() { return clusterApplierService.state(); } From c57dd98ef483e3e87fb01d3ca6542f86a30298ad Mon Sep 17 00:00:00 2001 From: Carlos Delgado <6339205+carlosdelest@users.noreply.github.com> Date: Wed, 10 Apr 2024 16:44:46 +0200 Subject: [PATCH 225/264] semantic_text: Add index metadata information for inference field mappers (#107147) Co-authored-by: @jimczi Co-authored-by: @Mikep86 --- .../cluster/ClusterStateDiffIT.java | 6 +- .../org/elasticsearch/TransportVersions.java | 1 + .../cluster/metadata/IndexMetadata.java | 70 ++++++++++ .../metadata/InferenceFieldMetadata.java | 132 ++++++++++++++++++ .../metadata/MetadataCreateIndexService.java | 9 +- .../metadata/MetadataMappingService.java | 7 +- .../index/mapper/InferenceFieldMapper.java | 27 ++++ .../index/mapper/MappingLookup.java | 31 +++- .../cluster/metadata/IndexMetadataTests.java | 33 ++++- .../metadata/InferenceFieldMetadataTests.java | 72 ++++++++++ .../metadata/DataStreamTestHelper.java | 1 + 11 files changed, 373 insertions(+), 16 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/cluster/metadata/InferenceFieldMetadata.java create mode 100644 server/src/main/java/org/elasticsearch/index/mapper/InferenceFieldMapper.java create mode 100644 server/src/test/java/org/elasticsearch/cluster/metadata/InferenceFieldMetadataTests.java diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterStateDiffIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterStateDiffIT.java index 409fbdd70333e..e0dbc74567053 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterStateDiffIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterStateDiffIT.java @@ -61,6 +61,7 @@ import static java.util.Collections.emptyList; import static java.util.Collections.emptySet; import static org.elasticsearch.cluster.metadata.AliasMetadata.newAliasMetadataBuilder; +import static org.elasticsearch.cluster.metadata.IndexMetadataTests.randomInferenceFields; import static org.elasticsearch.cluster.routing.RandomShardRoutingMutator.randomChange; import static org.elasticsearch.cluster.routing.TestShardRouting.shardRoutingBuilder; import static org.elasticsearch.cluster.routing.UnassignedInfoTests.randomUnassignedInfo; @@ -571,7 +572,7 @@ public IndexMetadata randomCreate(String name) { @Override public IndexMetadata randomChange(IndexMetadata part) { IndexMetadata.Builder builder = IndexMetadata.builder(part); - switch (randomIntBetween(0, 2)) { + switch (randomIntBetween(0, 3)) { case 0: builder.settings(Settings.builder().put(part.getSettings()).put(randomSettings(Settings.EMPTY))); break; @@ -585,6 +586,9 @@ public IndexMetadata randomChange(IndexMetadata part) { case 2: builder.settings(Settings.builder().put(part.getSettings()).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)); break; + case 3: + builder.putInferenceFields(randomInferenceFields()); + break; default: throw new IllegalArgumentException("Shouldn't be here"); } diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 8589e183a150e..9d21e9fe5d794 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -166,6 +166,7 @@ static TransportVersion def(int id) { public static final TransportVersion INDEXING_PRESSURE_DOCUMENT_REJECTIONS_COUNT = def(8_625_00_0); public static final TransportVersion ALIAS_ACTION_RESULTS = def(8_626_00_0); public static final TransportVersion HISTOGRAM_AGGS_KEY_SORTED = def(8_627_00_0); + public static final TransportVersion INFERENCE_FIELDS_METADATA = def(8_628_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java index 22672756bdaf0..529814e83ba38 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java @@ -540,6 +540,8 @@ public Iterator> settings() { public static final String KEY_SHARD_SIZE_FORECAST = "shard_size_forecast"; + public static final String KEY_INFERENCE_FIELDS = "field_inference"; + public static final String INDEX_STATE_FILE_PREFIX = "state-"; static final TransportVersion SYSTEM_INDEX_FLAG_ADDED = TransportVersions.V_7_10_0; @@ -574,6 +576,8 @@ public Iterator> settings() { @Nullable private final MappingMetadata mapping; + private final ImmutableOpenMap inferenceFields; + private final ImmutableOpenMap customData; private final Map> inSyncAllocationIds; @@ -642,6 +646,7 @@ private IndexMetadata( final int numberOfReplicas, final Settings settings, final MappingMetadata mapping, + final ImmutableOpenMap inferenceFields, final ImmutableOpenMap aliases, final ImmutableOpenMap customData, final Map> inSyncAllocationIds, @@ -692,6 +697,7 @@ private IndexMetadata( this.totalNumberOfShards = numberOfShards * (numberOfReplicas + 1); this.settings = settings; this.mapping = mapping; + this.inferenceFields = inferenceFields; this.customData = customData; this.aliases = aliases; this.inSyncAllocationIds = inSyncAllocationIds; @@ -748,6 +754,7 @@ IndexMetadata withMappingMetadata(MappingMetadata mapping) { this.numberOfReplicas, this.settings, mapping, + this.inferenceFields, this.aliases, this.customData, this.inSyncAllocationIds, @@ -806,6 +813,7 @@ public IndexMetadata withInSyncAllocationIds(int shardId, Set inSyncSet) this.numberOfReplicas, this.settings, this.mapping, + this.inferenceFields, this.aliases, this.customData, Maps.copyMapWithAddedOrReplacedEntry(this.inSyncAllocationIds, shardId, Set.copyOf(inSyncSet)), @@ -862,6 +870,7 @@ public IndexMetadata withIncrementedPrimaryTerm(int shardId) { this.numberOfReplicas, this.settings, this.mapping, + this.inferenceFields, this.aliases, this.customData, this.inSyncAllocationIds, @@ -918,6 +927,7 @@ public IndexMetadata withTimestampRange(IndexLongFieldRange timestampRange) { this.numberOfReplicas, this.settings, this.mapping, + this.inferenceFields, this.aliases, this.customData, this.inSyncAllocationIds, @@ -970,6 +980,7 @@ public IndexMetadata withIncrementedVersion() { this.numberOfReplicas, this.settings, this.mapping, + this.inferenceFields, this.aliases, this.customData, this.inSyncAllocationIds, @@ -1193,6 +1204,10 @@ public MappingMetadata mapping() { return mapping; } + public Map getInferenceFields() { + return inferenceFields; + } + @Nullable public IndexMetadataStats getStats() { return stats; @@ -1403,6 +1418,9 @@ public boolean equals(Object o) { if (rolloverInfos.equals(that.rolloverInfos) == false) { return false; } + if (inferenceFields.equals(that.inferenceFields) == false) { + return false; + } if (isSystem != that.isSystem) { return false; } @@ -1423,6 +1441,7 @@ public int hashCode() { result = 31 * result + Arrays.hashCode(primaryTerms); result = 31 * result + inSyncAllocationIds.hashCode(); result = 31 * result + rolloverInfos.hashCode(); + result = 31 * result + inferenceFields.hashCode(); result = 31 * result + Boolean.hashCode(isSystem); return result; } @@ -1469,6 +1488,7 @@ private static class IndexMetadataDiff implements Diff { @Nullable private final Diff settingsDiff; private final Diff> mappings; + private final Diff> inferenceFields; private final Diff> aliases; private final Diff> customData; private final Diff>> inSyncAllocationIds; @@ -1500,6 +1520,7 @@ private static class IndexMetadataDiff implements Diff { : ImmutableOpenMap.builder(1).fPut(MapperService.SINGLE_MAPPING_NAME, after.mapping).build(), DiffableUtils.getStringKeySerializer() ); + inferenceFields = DiffableUtils.diff(before.inferenceFields, after.inferenceFields, DiffableUtils.getStringKeySerializer()); aliases = DiffableUtils.diff(before.aliases, after.aliases, DiffableUtils.getStringKeySerializer()); customData = DiffableUtils.diff(before.customData, after.customData, DiffableUtils.getStringKeySerializer()); inSyncAllocationIds = DiffableUtils.diff( @@ -1524,6 +1545,8 @@ private static class IndexMetadataDiff implements Diff { new DiffableUtils.DiffableValueReader<>(DiffableStringMap::readFrom, DiffableStringMap::readDiffFrom); private static final DiffableUtils.DiffableValueReader ROLLOVER_INFO_DIFF_VALUE_READER = new DiffableUtils.DiffableValueReader<>(RolloverInfo::new, RolloverInfo::readDiffFrom); + private static final DiffableUtils.DiffableValueReader INFERENCE_FIELDS_METADATA_DIFF_VALUE_READER = + new DiffableUtils.DiffableValueReader<>(InferenceFieldMetadata::new, InferenceFieldMetadata::readDiffFrom); IndexMetadataDiff(StreamInput in) throws IOException { index = in.readString(); @@ -1546,6 +1569,15 @@ private static class IndexMetadataDiff implements Diff { } primaryTerms = in.readVLongArray(); mappings = DiffableUtils.readImmutableOpenMapDiff(in, DiffableUtils.getStringKeySerializer(), MAPPING_DIFF_VALUE_READER); + if (in.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_FIELDS_METADATA)) { + inferenceFields = DiffableUtils.readImmutableOpenMapDiff( + in, + DiffableUtils.getStringKeySerializer(), + INFERENCE_FIELDS_METADATA_DIFF_VALUE_READER + ); + } else { + inferenceFields = DiffableUtils.emptyDiff(); + } aliases = DiffableUtils.readImmutableOpenMapDiff(in, DiffableUtils.getStringKeySerializer(), ALIAS_METADATA_DIFF_VALUE_READER); customData = DiffableUtils.readImmutableOpenMapDiff(in, DiffableUtils.getStringKeySerializer(), CUSTOM_DIFF_VALUE_READER); inSyncAllocationIds = DiffableUtils.readJdkMapDiff( @@ -1595,6 +1627,9 @@ public void writeTo(StreamOutput out) throws IOException { } out.writeVLongArray(primaryTerms); mappings.writeTo(out); + if (out.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_FIELDS_METADATA)) { + inferenceFields.writeTo(out); + } aliases.writeTo(out); customData.writeTo(out); inSyncAllocationIds.writeTo(out); @@ -1628,6 +1663,7 @@ public IndexMetadata apply(IndexMetadata part) { builder.mapping = mappings.apply( ImmutableOpenMap.builder(1).fPut(MapperService.SINGLE_MAPPING_NAME, part.mapping).build() ).get(MapperService.SINGLE_MAPPING_NAME); + builder.inferenceFields.putAllFromMap(inferenceFields.apply(part.inferenceFields)); builder.aliases.putAllFromMap(aliases.apply(part.aliases)); builder.customMetadata.putAllFromMap(customData.apply(part.customData)); builder.inSyncAllocationIds.putAll(inSyncAllocationIds.apply(part.inSyncAllocationIds)); @@ -1673,6 +1709,10 @@ public static IndexMetadata readFrom(StreamInput in, @Nullable Function builder.putInferenceField(f)); + } int aliasesSize = in.readVInt(); for (int i = 0; i < aliasesSize; i++) { AliasMetadata aliasMd = new AliasMetadata(in); @@ -1733,6 +1773,9 @@ public void writeTo(StreamOutput out, boolean mappingsAsHash) throws IOException mapping.writeTo(out); } } + if (out.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_FIELDS_METADATA)) { + out.writeCollection(inferenceFields.values()); + } out.writeCollection(aliases.values()); out.writeMap(customData, StreamOutput::writeWriteable); out.writeMap( @@ -1788,6 +1831,7 @@ public static class Builder { private long[] primaryTerms = null; private Settings settings = Settings.EMPTY; private MappingMetadata mapping; + private final ImmutableOpenMap.Builder inferenceFields; private final ImmutableOpenMap.Builder aliases; private final ImmutableOpenMap.Builder customMetadata; private final Map> inSyncAllocationIds; @@ -1802,6 +1846,7 @@ public static class Builder { public Builder(String index) { this.index = index; + this.inferenceFields = ImmutableOpenMap.builder(); this.aliases = ImmutableOpenMap.builder(); this.customMetadata = ImmutableOpenMap.builder(); this.inSyncAllocationIds = new HashMap<>(); @@ -1819,6 +1864,7 @@ public Builder(IndexMetadata indexMetadata) { this.settings = indexMetadata.getSettings(); this.primaryTerms = indexMetadata.primaryTerms.clone(); this.mapping = indexMetadata.mapping; + this.inferenceFields = ImmutableOpenMap.builder(indexMetadata.inferenceFields); this.aliases = ImmutableOpenMap.builder(indexMetadata.aliases); this.customMetadata = ImmutableOpenMap.builder(indexMetadata.customData); this.routingNumShards = indexMetadata.routingNumShards; @@ -2059,6 +2105,16 @@ public Builder shardSizeInBytesForecast(Long shardSizeInBytesForecast) { return this; } + public Builder putInferenceField(InferenceFieldMetadata value) { + this.inferenceFields.put(value.getName(), value); + return this; + } + + public Builder putInferenceFields(Map values) { + this.inferenceFields.putAllFromMap(values); + return this; + } + public IndexMetadata build() { return build(false); } @@ -2221,6 +2277,7 @@ IndexMetadata build(boolean repair) { numberOfReplicas, settings, mapping, + inferenceFields.build(), aliasesMap, newCustomMetadata, Map.ofEntries(denseInSyncAllocationIds), @@ -2379,6 +2436,14 @@ public static void toXContent(IndexMetadata indexMetadata, XContentBuilder build builder.field(KEY_SHARD_SIZE_FORECAST, indexMetadata.shardSizeInBytesForecast); } + if (indexMetadata.getInferenceFields().isEmpty() == false) { + builder.startObject(KEY_INFERENCE_FIELDS); + for (InferenceFieldMetadata field : indexMetadata.getInferenceFields().values()) { + field.toXContent(builder, params); + } + builder.endObject(); + } + builder.endObject(); } @@ -2456,6 +2521,11 @@ public static IndexMetadata fromXContent(XContentParser parser, Map, ToXContentFragment { + private static final String INFERENCE_ID_FIELD = "inference_id"; + private static final String SOURCE_FIELDS_FIELD = "source_fields"; + + private final String name; + private final String inferenceId; + private final String[] sourceFields; + + public InferenceFieldMetadata(String name, String inferenceId, String[] sourceFields) { + this.name = Objects.requireNonNull(name); + this.inferenceId = Objects.requireNonNull(inferenceId); + this.sourceFields = Objects.requireNonNull(sourceFields); + } + + public InferenceFieldMetadata(StreamInput input) throws IOException { + this.name = input.readString(); + this.inferenceId = input.readString(); + this.sourceFields = input.readStringArray(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(name); + out.writeString(inferenceId); + out.writeStringArray(sourceFields); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + InferenceFieldMetadata that = (InferenceFieldMetadata) o; + return Objects.equals(name, that.name) + && Objects.equals(inferenceId, that.inferenceId) + && Arrays.equals(sourceFields, that.sourceFields); + } + + @Override + public int hashCode() { + int result = Objects.hash(name, inferenceId); + result = 31 * result + Arrays.hashCode(sourceFields); + return result; + } + + public String getName() { + return name; + } + + public String getInferenceId() { + return inferenceId; + } + + public String[] getSourceFields() { + return sourceFields; + } + + public static Diff readDiffFrom(StreamInput in) throws IOException { + return SimpleDiffable.readDiffFrom(InferenceFieldMetadata::new, in); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(name); + builder.field(INFERENCE_ID_FIELD, inferenceId); + builder.array(SOURCE_FIELDS_FIELD, sourceFields); + return builder.endObject(); + } + + public static InferenceFieldMetadata fromXContent(XContentParser parser) throws IOException { + final String name = parser.currentName(); + + XContentParser.Token token = parser.nextToken(); + Objects.requireNonNull(token, "Expected InferenceFieldMetadata but got EOF"); + + String currentFieldName = null; + String inferenceId = null; + List inputFields = new ArrayList<>(); + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token == XContentParser.Token.VALUE_STRING) { + if (INFERENCE_ID_FIELD.equals(currentFieldName)) { + inferenceId = parser.text(); + } + } else if (token == XContentParser.Token.START_ARRAY) { + if (SOURCE_FIELDS_FIELD.equals(currentFieldName)) { + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + if (token == XContentParser.Token.VALUE_STRING) { + inputFields.add(parser.text()); + } else { + parser.skipChildren(); + } + } + } + } else { + parser.skipChildren(); + } + } + return new InferenceFieldMetadata(name, inferenceId, inputFields.toArray(String[]::new)); + } +} diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java index da24f0b9d0dc5..52642e1de8ac9 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java @@ -1263,10 +1263,11 @@ static IndexMetadata buildIndexMetadata( indexMetadataBuilder.system(isSystem); // now, update the mappings with the actual source Map mappingsMetadata = new HashMap<>(); - DocumentMapper mapper = documentMapperSupplier.get(); - if (mapper != null) { - MappingMetadata mappingMd = new MappingMetadata(mapper); - mappingsMetadata.put(mapper.type(), mappingMd); + DocumentMapper docMapper = documentMapperSupplier.get(); + if (docMapper != null) { + MappingMetadata mappingMd = new MappingMetadata(docMapper); + mappingsMetadata.put(docMapper.type(), mappingMd); + indexMetadataBuilder.putInferenceFields(docMapper.mappers().inferenceFields()); } for (MappingMetadata mappingMd : mappingsMetadata.values()) { diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java index 3ca206eaddb28..4e714b96f64c7 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java @@ -193,9 +193,10 @@ private static ClusterState applyRequest( IndexMetadata.Builder indexMetadataBuilder = IndexMetadata.builder(indexMetadata); // Mapping updates on a single type may have side-effects on other types so we need to // update mapping metadata on all types - DocumentMapper mapper = mapperService.documentMapper(); - if (mapper != null) { - indexMetadataBuilder.putMapping(new MappingMetadata(mapper)); + DocumentMapper docMapper = mapperService.documentMapper(); + if (docMapper != null) { + indexMetadataBuilder.putMapping(new MappingMetadata(docMapper)); + indexMetadataBuilder.putInferenceFields(docMapper.mappers().inferenceFields()); } if (updatedMapping) { indexMetadataBuilder.mappingVersion(1 + indexMetadataBuilder.mappingVersion()); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/InferenceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/InferenceFieldMapper.java new file mode 100644 index 0000000000000..2b0833c72021b --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/mapper/InferenceFieldMapper.java @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.mapper; + +import org.elasticsearch.cluster.metadata.InferenceFieldMetadata; +import org.elasticsearch.inference.InferenceService; + +import java.util.Set; + +/** + * Field mapper that requires to transform its input before indexation through the {@link InferenceService}. + */ +public interface InferenceFieldMapper { + + /** + * Retrieve the inference metadata associated with this mapper. + * + * @param sourcePaths The source path that populates the input for the field (before inference) + */ + InferenceFieldMetadata getMetadata(Set sourcePaths); +} diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java b/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java index 673593cc6e240..bf879f30e5a29 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java @@ -10,9 +10,11 @@ import org.apache.lucene.codecs.PostingsFormat; import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.cluster.metadata.InferenceFieldMetadata; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.inference.InferenceService; import java.util.ArrayList; import java.util.Collection; @@ -47,6 +49,7 @@ private CacheKey() {} /** Full field name to mapper */ private final Map fieldMappers; private final Map objectMappers; + private final Map inferenceFields; private final int runtimeFieldMappersCount; private final NestedLookup nestedLookup; private final FieldTypeLookup fieldTypeLookup; @@ -84,12 +87,12 @@ private static void collect( Collection fieldMappers, Collection fieldAliasMappers ) { - if (mapper instanceof ObjectMapper) { - objectMappers.add((ObjectMapper) mapper); - } else if (mapper instanceof FieldMapper) { - fieldMappers.add((FieldMapper) mapper); - } else if (mapper instanceof FieldAliasMapper) { - fieldAliasMappers.add((FieldAliasMapper) mapper); + if (mapper instanceof ObjectMapper objectMapper) { + objectMappers.add(objectMapper); + } else if (mapper instanceof FieldMapper fieldMapper) { + fieldMappers.add(fieldMapper); + } else if (mapper instanceof FieldAliasMapper fieldAliasMapper) { + fieldAliasMappers.add(fieldAliasMapper); } else { throw new IllegalStateException("Unrecognized mapper type [" + mapper.getClass().getSimpleName() + "]."); } @@ -174,6 +177,15 @@ private MappingLookup( final Collection runtimeFields = mapping.getRoot().runtimeFields(); this.fieldTypeLookup = new FieldTypeLookup(mappers, aliasMappers, runtimeFields); + + Map inferenceFields = new HashMap<>(); + for (FieldMapper mapper : mappers) { + if (mapper instanceof InferenceFieldMapper inferenceFieldMapper) { + inferenceFields.put(mapper.name(), inferenceFieldMapper.getMetadata(fieldTypeLookup.sourcePaths(mapper.name()))); + } + } + this.inferenceFields = Map.copyOf(inferenceFields); + if (runtimeFields.isEmpty()) { // without runtime fields this is the same as the field type lookup this.indexTimeLookup = fieldTypeLookup; @@ -360,6 +372,13 @@ public Map objectMappers() { return objectMappers; } + /** + * Returns a map containing all fields that require to run inference (through the {@link InferenceService} prior to indexation. + */ + public Map inferenceFields() { + return inferenceFields; + } + public NestedLookup nestedLookup() { return nestedLookup; } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetadataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetadataTests.java index 5cc1a7206e7e4..116acf938fcbc 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetadataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetadataTests.java @@ -83,6 +83,8 @@ public void testIndexMetadataSerialization() throws IOException { IndexMetadataStats indexStats = randomBoolean() ? randomIndexStats(numShard) : null; Double indexWriteLoadForecast = randomBoolean() ? randomDoubleBetween(0.0, 128, true) : null; Long shardSizeInBytesForecast = randomBoolean() ? randomLongBetween(1024, 10240) : null; + Map inferenceFields = randomInferenceFields(); + IndexMetadata metadata = IndexMetadata.builder("foo") .settings(indexSettings(numShard, numberOfReplicas).put("index.version.created", 1)) .creationDate(randomLong()) @@ -107,6 +109,7 @@ public void testIndexMetadataSerialization() throws IOException { .stats(indexStats) .indexWriteLoadForecast(indexWriteLoadForecast) .shardSizeInBytesForecast(shardSizeInBytesForecast) + .putInferenceFields(inferenceFields) .build(); assertEquals(system, metadata.isSystem()); @@ -141,6 +144,7 @@ public void testIndexMetadataSerialization() throws IOException { assertEquals(metadata.getStats(), fromXContentMeta.getStats()); assertEquals(metadata.getForecastedWriteLoad(), fromXContentMeta.getForecastedWriteLoad()); assertEquals(metadata.getForecastedShardSizeInBytes(), fromXContentMeta.getForecastedShardSizeInBytes()); + assertEquals(metadata.getInferenceFields(), fromXContentMeta.getInferenceFields()); final BytesStreamOutput out = new BytesStreamOutput(); metadata.writeTo(out); @@ -162,8 +166,9 @@ public void testIndexMetadataSerialization() throws IOException { assertEquals(metadata.getCustomData(), deserialized.getCustomData()); assertEquals(metadata.isSystem(), deserialized.isSystem()); assertEquals(metadata.getStats(), deserialized.getStats()); - assertEquals(metadata.getForecastedWriteLoad(), fromXContentMeta.getForecastedWriteLoad()); - assertEquals(metadata.getForecastedShardSizeInBytes(), fromXContentMeta.getForecastedShardSizeInBytes()); + assertEquals(metadata.getForecastedWriteLoad(), deserialized.getForecastedWriteLoad()); + assertEquals(metadata.getForecastedShardSizeInBytes(), deserialized.getForecastedShardSizeInBytes()); + assertEquals(metadata.getInferenceFields(), deserialized.getInferenceFields()); } } @@ -547,10 +552,34 @@ public void testPartialIndexReceivesDataFrozenTierPreference() { } } + public void testInferenceFieldMetadata() { + Settings.Builder settings = indexSettings(IndexVersion.current(), randomIntBetween(1, 8), 0); + IndexMetadata idxMeta1 = IndexMetadata.builder("test").settings(settings).build(); + assertTrue(idxMeta1.getInferenceFields().isEmpty()); + + Map dynamicFields = randomInferenceFields(); + IndexMetadata idxMeta2 = IndexMetadata.builder(idxMeta1).putInferenceFields(dynamicFields).build(); + assertThat(idxMeta2.getInferenceFields(), equalTo(dynamicFields)); + } + private static Settings indexSettingsWithDataTier(String dataTier) { return indexSettings(IndexVersion.current(), 1, 0).put(DataTier.TIER_PREFERENCE, dataTier).build(); } + public static Map randomInferenceFields() { + Map map = new HashMap<>(); + int numFields = randomIntBetween(0, 5); + for (int i = 0; i < numFields; i++) { + String field = randomAlphaOfLengthBetween(5, 10); + map.put(field, randomInferenceFieldMetadata(field)); + } + return map; + } + + private static InferenceFieldMetadata randomInferenceFieldMetadata(String name) { + return new InferenceFieldMetadata(name, randomIdentifier(), randomSet(1, 5, ESTestCase::randomIdentifier).toArray(String[]::new)); + } + private IndexMetadataStats randomIndexStats(int numberOfShards) { IndexWriteLoad.Builder indexWriteLoadBuilder = IndexWriteLoad.builder(numberOfShards); int numberOfPopulatedWriteLoads = randomIntBetween(0, numberOfShards); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/InferenceFieldMetadataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/InferenceFieldMetadataTests.java new file mode 100644 index 0000000000000..bd4c87be51157 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/InferenceFieldMetadataTests.java @@ -0,0 +1,72 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.cluster.metadata; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.test.AbstractXContentTestCase; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; +import java.util.function.Predicate; + +import static org.hamcrest.Matchers.equalTo; + +public class InferenceFieldMetadataTests extends AbstractXContentTestCase { + + public void testSerialization() throws IOException { + final InferenceFieldMetadata before = createTestItem(); + final BytesStreamOutput out = new BytesStreamOutput(); + before.writeTo(out); + + final StreamInput in = out.bytes().streamInput(); + final InferenceFieldMetadata after = new InferenceFieldMetadata(in); + + assertThat(after, equalTo(before)); + } + + @Override + protected InferenceFieldMetadata createTestInstance() { + return createTestItem(); + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + return p -> p.equals(""); // do not add elements at the top-level as any element at this level is parsed as a new inference field + } + + @Override + protected InferenceFieldMetadata doParseInstance(XContentParser parser) throws IOException { + if (parser.nextToken() == XContentParser.Token.START_OBJECT) { + parser.nextToken(); + } + assertEquals(XContentParser.Token.FIELD_NAME, parser.currentToken()); + InferenceFieldMetadata inferenceMetadata = InferenceFieldMetadata.fromXContent(parser); + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); + return inferenceMetadata; + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } + + private static InferenceFieldMetadata createTestItem() { + String name = randomAlphaOfLengthBetween(3, 10); + String inferenceId = randomIdentifier(); + String[] inputFields = generateRandomStringArray(5, 10, false, false); + return new InferenceFieldMetadata(name, inferenceId, inputFields); + } + + public void testNullCtorArgsThrowException() { + assertThrows(NullPointerException.class, () -> new InferenceFieldMetadata(null, "inferenceId", new String[0])); + assertThrows(NullPointerException.class, () -> new InferenceFieldMetadata("name", null, new String[0])); + assertThrows(NullPointerException.class, () -> new InferenceFieldMetadata("name", "inferenceId", null)); + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java index c83caa617e16e..e2b03c6b81af3 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java @@ -729,6 +729,7 @@ public static IndicesService mockIndicesServices(MappingLookup mappingLookup) th Mapping mapping = new Mapping(root, new MetadataFieldMapper[0], null); DocumentMapper documentMapper = mock(DocumentMapper.class); when(documentMapper.mapping()).thenReturn(mapping); + when(documentMapper.mappers()).thenReturn(MappingLookup.EMPTY); when(documentMapper.mappingSource()).thenReturn(mapping.toCompressedXContent()); RoutingFieldMapper routingFieldMapper = mock(RoutingFieldMapper.class); when(routingFieldMapper.required()).thenReturn(false); From 9e502aa4a082fb7f7bc1bfdb693efdf525fc5959 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Wed, 10 Apr 2024 10:49:10 -0400 Subject: [PATCH 226/264] Expanding and refactoring the vector rolling upgrade tests (#107020) This commit removes the legacy yaml rolling upgrade tests for vectors to the new rolling upgrade package. Also, it adds rolling upgrade tests for `int8_hnsw`. --- qa/rolling-upgrade-legacy/build.gradle | 22 +- .../test/mixed_cluster/30_vector_search.yml | 144 ------- .../test/old_cluster/30_vector_search.yml | 236 ----------- .../upgraded_cluster/30_vector_search.yml | 148 ------- .../upgrades/VectorSearchIT.java | 389 ++++++++++++++++++ 5 files changed, 390 insertions(+), 549 deletions(-) delete mode 100644 qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/mixed_cluster/30_vector_search.yml delete mode 100644 qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/old_cluster/30_vector_search.yml delete mode 100644 qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/upgraded_cluster/30_vector_search.yml create mode 100644 qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/VectorSearchIT.java diff --git a/qa/rolling-upgrade-legacy/build.gradle b/qa/rolling-upgrade-legacy/build.gradle index e48d05f98b20a..77dfc9724ce8f 100644 --- a/qa/rolling-upgrade-legacy/build.gradle +++ b/qa/rolling-upgrade-legacy/build.gradle @@ -7,8 +7,8 @@ */ + import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.internal.BwcVersions import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask @@ -59,11 +59,6 @@ BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> systemProperty 'tests.upgrade_from_version', oldVersion nonInputProperties.systemProperty('tests.rest.cluster', baseCluster.map(c -> c.allHttpSocketURI.join(","))) nonInputProperties.systemProperty('tests.clustername', baseName) - if (bwcVersion.before("8.4.0")) { - excludeList.addAll(["old_cluster/30_vector_search/*"]) - } else if (bwcVersion.before("8.6.0")) { - excludeList.addAll(["old_cluster/30_vector_search/Create indexed byte vectors and search"]) - } if (excludeList.isEmpty() == false) { systemProperty 'tests.rest.blacklist', excludeList.join(',') } @@ -81,11 +76,6 @@ BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> nonInputProperties.systemProperty('tests.rest.cluster', baseCluster.map(c -> c.allHttpSocketURI.join(","))) nonInputProperties.systemProperty('tests.clustername', baseName) def excludeList = [] - if (bwcVersion.before("8.4.0")) { - excludeList.addAll(["mixed_cluster/30_vector_search/*"]) - } else if (bwcVersion.before("8.6.0")) { - excludeList.addAll(["mixed_cluster/30_vector_search/Search byte indices created in old cluster"]) - } if (excludeList.isEmpty() == false) { systemProperty 'tests.rest.blacklist', excludeList.join(',') } @@ -103,11 +93,6 @@ BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> nonInputProperties.systemProperty('tests.rest.cluster', baseCluster.map(c -> c.allHttpSocketURI.join(","))) nonInputProperties.systemProperty('tests.clustername', baseName) def excludeList = [] - if (bwcVersion.before("8.4.0")) { - excludeList.addAll(["mixed_cluster/30_vector_search/*"]) - } else if (bwcVersion.before("8.6.0")) { - excludeList.addAll(["mixed_cluster/30_vector_search/Search byte indices created in old cluster"]) - } if (excludeList.isEmpty() == false) { systemProperty 'tests.rest.blacklist', excludeList.join(',') } @@ -124,11 +109,6 @@ BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> nonInputProperties.systemProperty('tests.rest.cluster', baseCluster.map(c -> c.allHttpSocketURI.join(","))) nonInputProperties.systemProperty('tests.clustername', baseName) def excludeList = [] - if (bwcVersion.before("8.4.0")) { - excludeList.addAll(["upgraded_cluster/30_vector_search/*"]) - } else if (bwcVersion.before("8.6.0")) { - excludeList.addAll(["upgraded_cluster/30_vector_search/Search byte indices created in old cluster"]) - } if (excludeList.isEmpty() == false) { systemProperty 'tests.rest.blacklist', excludeList.join(',') } diff --git a/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/mixed_cluster/30_vector_search.yml b/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/mixed_cluster/30_vector_search.yml deleted file mode 100644 index 108f58b29bf27..0000000000000 --- a/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/mixed_cluster/30_vector_search.yml +++ /dev/null @@ -1,144 +0,0 @@ ---- -"Search float indices created in old cluster": - - skip: - features: close_to - - do: - search: - index: test-float-index - body: - query: - script_score: - query: { "exists": { "field": "bdv" } } - script: - source: | - field(params.field).get().dotProduct(params.query) - params: - query: [4, 5, 6] - field: bdv - - - match: { hits.hits.0._id: "3" } - - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - - match: { hits.hits.1._id: "2" } - - close_to: { hits.hits.1._score: { value: 21.0, error: 0.00001 } } - - match: { hits.hits.2._id: "1" } - - close_to: { hits.hits.2._score: { value: 15.0, error: 0.00001 } } - - - do: - search: - index: test-float-index - body: - query: - script_score: - query: { "exists": { "field": "knn" } } - script: - source: | - field(params.field).get().dotProduct(params.query) - params: - query: [4, 5, 6] - field: knn - - - match: { hits.hits.0._id: "3" } - - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - - match: { hits.hits.1._id: "5" } - - close_to: { hits.hits.1._score: { value: 25.0, error: 0.00001 } } - - match: { hits.hits.2._id: "7" } - - close_to: { hits.hits.2._score: { value: 23.0, error: 0.00001 } } - - do: - search: - index: test-float-index - body: - knn: - field: "knn" - query_vector: [4, 5, 6] - k: 3 - num_candidates: 7 - - - match: { hits.hits.0._id: "3" } - - match: { hits.hits.1._id: "5" } - - match: { hits.hits.2._id: "2" } - - - do: - search: - index: test-float-index - body: - knn: - field: "knn" - query_vector: [ 4, 5, 6 ] - k: 3 - num_candidates: 6 - - - match: { hits.hits.0._id: "3" } - - match: { hits.hits.1._id: "5" } - - match: { hits.hits.2._id: "2" } - ---- -"Search byte indices created in old cluster": - - do: - search: - index: test-byte-index - body: - query: - script_score: - query: { "exists": { "field": "bdv" } } - script: - source: | - field(params.field).get().dotProduct(params.query) - params: - query: [4, 5, 6] - field: bdv - - - match: { hits.hits.0._id: "3" } - - match: { hits.hits.0._score: 27 } - - match: { hits.hits.1._id: "2" } - - match: { hits.hits.1._score: 21 } - - match: { hits.hits.2._id: "1" } - - match: { hits.hits.2._score: 15 } - - - do: - search: - index: test-byte-index - body: - query: - script_score: - query: { "exists": { "field": "knn" } } - script: - source: | - field(params.field).get().dotProduct(params.query) - params: - query: [4, 5, 6] - field: knn - - - match: { hits.hits.0._id: "3" } - - match: { hits.hits.0._score: 27 } - - match: { hits.hits.1._id: "5" } - - match: { hits.hits.1._score: 25 } - - match: { hits.hits.2._id: "7" } - - match: { hits.hits.2._score: 23 } - - - do: - search: - index: test-byte-index - body: - knn: - field: "knn" - query_vector: [4, 5, 6] - k: 3 - num_candidates: 6 - - - match: { hits.hits.0._id: "3" } - - match: { hits.hits.1._id: "5" } - - match: { hits.hits.2._id: "2" } - - - do: - search: - index: test-byte-index - body: - knn: - field: "knn" - query_vector: [4, 5, 6] - k: 3 - num_candidates: 7 - - - match: { hits.hits.0._id: "3" } - - match: { hits.hits.1._id: "5" } - - match: { hits.hits.2._id: "2" } diff --git a/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/old_cluster/30_vector_search.yml b/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/old_cluster/30_vector_search.yml deleted file mode 100644 index 96b950e5ae927..0000000000000 --- a/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/old_cluster/30_vector_search.yml +++ /dev/null @@ -1,236 +0,0 @@ ---- -"Create indexed float vectors and search": - - skip: - features: close_to - - do: - indices.create: - index: test-float-index - body: - settings: - number_of_shards: "1" - mappings: - properties: - bdv: - type: dense_vector - dims: 3 - knn: - type: dense_vector - dims: 3 - index: true - similarity: l2_norm - index_options: - type: hnsw - m: 16 - ef_construction: 100 - - do: - bulk: - index: test-float-index - refresh: true - body: - - '{"index": {"_id": "1"}}' - - '{"bdv": [1, 1, 1], "knn": [1, 1, 1]}' - - '{"index": {"_id": "2"}}' - - '{"bdv": [1, 1, 2], "knn": [1, 1, 2]}' - - '{"index": {"_id": "3"}}' - - '{"bdv": [1, 1, 3], "knn": [1, 1, 3]}' - - '{"index": {"_id": "4"}}' - - '{"knn": [1, 2, 1]}' - - '{"index": {"_id": "5"}}' - - '{"knn": [1, 3, 1]}' - - '{"index": {"_id": "6"}}' - - '{"knn": [2, 1, 1]}' - - '{"index": {"_id": "7"}}' - - '{"knn": [3, 1, 1]}' - - '{"index": {"_id": "missing_vector"}}' - - '{}' - - do: - indices.forcemerge: - index: test-float-index - max_num_segments: 1 - - do: - search: - index: test-float-index - body: - query: - script_score: - query: { "exists": { "field": "bdv" } } - script: - source: | - field(params.field).get().dotProduct(params.query) - params: - query: [4, 5, 6] - field: bdv - - - match: { hits.hits.0._id: "3" } - - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - - match: { hits.hits.1._id: "2" } - - close_to: { hits.hits.1._score: { value: 21.0, error: 0.00001 } } - - match: { hits.hits.2._id: "1" } - - close_to: { hits.hits.2._score: { value: 15.0, error: 0.00001 } } - - - do: - search: - index: test-float-index - body: - query: - script_score: - query: { "exists": { "field": "knn" } } - script: - source: | - field(params.field).get().dotProduct(params.query) - params: - query: [4, 5, 6] - field: knn - - - match: { hits.hits.0._id: "3" } - - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - - match: { hits.hits.1._id: "5" } - - close_to: { hits.hits.1._score: { value: 25.0, error: 0.00001 } } - - match: { hits.hits.2._id: "7" } - - close_to: { hits.hits.2._score: { value: 23.0, error: 0.00001 } } - - - do: - search: - index: test-float-index - body: - knn: - field: "knn" - query_vector: [4, 5, 6] - k: 3 - num_candidates: 7 - - - match: { hits.hits.0._id: "3" } - - match: { hits.hits.1._id: "5" } - - match: { hits.hits.2._id: "2" } - - - do: - search: - index: test-float-index - body: - knn: - field: "knn" - query_vector: [ 4, 5, 6 ] - k: 3 - num_candidates: 6 - - - match: { hits.hits.0._id: "3" } - - match: { hits.hits.1._id: "5" } - - match: { hits.hits.2._id: "2" } - - ---- -"Create indexed byte vectors and search": - - skip: - features: close_to - - do: - indices.create: - index: test-byte-index - body: - settings: - number_of_shards: "1" - mappings: - properties: - bdv: - type: dense_vector - element_type: byte - dims: 3 - knn: - type: dense_vector - element_type: byte - dims: 3 - index: true - similarity: l2_norm - - do: - bulk: - index: test-byte-index - refresh: true - body: - - '{"index": {"_id": "1"}}' - - '{"bdv": [1, 1, 1], "knn": [1, 1, 1]}' - - '{"index": {"_id": "2"}}' - - '{"bdv": [1, 1, 2], "knn": [1, 1, 2]}' - - '{"index": {"_id": "3"}}' - - '{"bdv": [1, 1, 3], "knn": [1, 1, 3]}' - - '{"index": {"_id": "4"}}' - - '{"knn": [1, 2, 1]}' - - '{"index": {"_id": "5"}}' - - '{"knn": [1, 3, 1]}' - - '{"index": {"_id": "6"}}' - - '{"knn": [2, 1, 1]}' - - '{"index": {"_id": "7"}}' - - '{"knn": [3, 1, 1]}' - - '{"index": {"_id": "missing_vector"}}' - - '{}' - - do: - indices.forcemerge: - index: test-byte-index - max_num_segments: 1 - - do: - search: - index: test-byte-index - body: - query: - script_score: - query: { "exists": { "field": "bdv" } } - script: - source: | - field(params.field).get().dotProduct(params.query) - params: - query: [4, 5, 6] - field: bdv - - - match: { hits.hits.0._id: "3" } - - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - - match: { hits.hits.1._id: "2" } - - close_to: { hits.hits.1._score: { value: 21.0, error: 0.00001 } } - - match: { hits.hits.2._id: "1" } - - close_to: { hits.hits.2._score: { value: 15.0, error: 0.00001 } } - - - do: - search: - index: test-byte-index - body: - query: - script_score: - query: { "exists": { "field": "knn" } } - script: - source: | - field(params.field).get().dotProduct(params.query) - params: - query: [4, 5, 6] - field: knn - - - match: { hits.hits.0._id: "3" } - - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - - match: { hits.hits.1._id: "5" } - - close_to: { hits.hits.1._score: { value: 25.0, error: 0.00001 } } - - match: { hits.hits.2._id: "7" } - - close_to: { hits.hits.2._score: { value: 23.0, error: 0.00001 } } - - - do: - search: - index: test-byte-index - body: - knn: - field: "knn" - query_vector: [4, 5, 6] - k: 3 - num_candidates: 6 - - - match: { hits.hits.0._id: "3" } - - match: { hits.hits.1._id: "5" } - - match: { hits.hits.2._id: "2" } - - - do: - search: - index: test-byte-index - body: - knn: - field: "knn" - query_vector: [4, 5, 6] - k: 3 - num_candidates: 7 - - - match: { hits.hits.0._id: "3" } - - match: { hits.hits.1._id: "5" } - - match: { hits.hits.2._id: "2" } diff --git a/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/upgraded_cluster/30_vector_search.yml b/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/upgraded_cluster/30_vector_search.yml deleted file mode 100644 index ee2c357594b94..0000000000000 --- a/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/upgraded_cluster/30_vector_search.yml +++ /dev/null @@ -1,148 +0,0 @@ ---- -"Search float indices created in old cluster": - - skip: - features: close_to - - do: - search: - index: test-float-index - body: - query: - script_score: - query: { "exists": { "field": "bdv" } } - script: - source: | - field(params.field).get().dotProduct(params.query) - params: - query: [4, 5, 6] - field: bdv - - - match: { hits.hits.0._id: "3" } - - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - - match: { hits.hits.1._id: "2" } - - close_to: { hits.hits.1._score: { value: 21.0, error: 0.00001 } } - - match: { hits.hits.2._id: "1" } - - close_to: { hits.hits.2._score: { value: 15.0, error: 0.00001 } } - - - do: - search: - index: test-float-index - body: - query: - script_score: - query: { "exists": { "field": "knn" } } - script: - source: | - field(params.field).get().dotProduct(params.query) - params: - query: [4, 5, 6] - field: knn - - - match: { hits.hits.0._id: "3" } - - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - - match: { hits.hits.1._id: "5" } - - close_to: { hits.hits.1._score: { value: 25.0, error: 0.00001 } } - - match: { hits.hits.2._id: "7" } - - close_to: { hits.hits.2._score: { value: 23.0, error: 0.00001 } } - - - do: - search: - index: test-float-index - body: - knn: - field: "knn" - query_vector: [4, 5, 6] - k: 3 - num_candidates: 7 - - - match: { hits.hits.0._id: "3" } - - match: { hits.hits.1._id: "5" } - - match: { hits.hits.2._id: "2" } - - - do: - search: - index: test-float-index - body: - knn: - field: "knn" - query_vector: [ 4, 5, 6 ] - k: 3 - num_candidates: 6 - - - match: { hits.hits.0._id: "3" } - - match: { hits.hits.1._id: "5" } - - match: { hits.hits.2._id: "2" } - ---- -"Search byte indices created in old cluster": - - skip: - features: close_to - - do: - search: - index: test-byte-index - body: - query: - script_score: - query: { "exists": { "field": "bdv" } } - script: - source: | - field(params.field).get().dotProduct(params.query) - params: - query: [4, 5, 6] - field: bdv - - - match: { hits.hits.0._id: "3" } - - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - - match: { hits.hits.1._id: "2" } - - close_to: { hits.hits.1._score: { value: 21.0, error: 0.00001 } } - - match: { hits.hits.2._id: "1" } - - close_to: { hits.hits.2._score: { value: 15.0, error: 0.00001 } } - - - do: - search: - index: test-byte-index - body: - query: - script_score: - query: { "exists": { "field": "knn" } } - script: - source: | - field(params.field).get().dotProduct(params.query) - params: - query: [4, 5, 6] - field: knn - - - match: { hits.hits.0._id: "3" } - - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - - match: { hits.hits.1._id: "5" } - - close_to: { hits.hits.1._score: { value: 25.0, error: 0.00001 } } - - match: { hits.hits.2._id: "7" } - - close_to: { hits.hits.2._score: { value: 23.0, error: 0.00001 } } - - - - do: - search: - index: test-byte-index - body: - knn: - field: "knn" - query_vector: [4, 5, 6] - k: 3 - num_candidates: 6 - - - match: { hits.hits.0._id: "3" } - - match: { hits.hits.1._id: "5" } - - match: { hits.hits.2._id: "2" } - - - do: - search: - index: test-byte-index - body: - knn: - field: "knn" - query_vector: [4, 5, 6] - k: 3 - num_candidates: 7 - - - match: { hits.hits.0._id: "3" } - - match: { hits.hits.1._id: "5" } - - match: { hits.hits.2._id: "2" } diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/VectorSearchIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/VectorSearchIT.java new file mode 100644 index 0000000000000..d77910f443d58 --- /dev/null +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/VectorSearchIT.java @@ -0,0 +1,389 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.upgrades; + +import com.carrotsearch.randomizedtesting.annotations.Name; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.support.XContentMapValues; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.closeTo; +import static org.hamcrest.Matchers.equalTo; + +public class VectorSearchIT extends ParameterizedRollingUpgradeTestCase { + public VectorSearchIT(@Name("upgradedNodes") int upgradedNodes) { + super(upgradedNodes); + } + + private static final String FLOAT_INDEX_NAME = "float_vector_index"; + private static final String SCRIPT_VECTOR_INDEX_NAME = "script_vector_index"; + private static final String SCRIPT_BYTE_INDEX_NAME = "script_byte_vector_index"; + private static final String BYTE_INDEX_NAME = "byte_vector_index"; + private static final String QUANTIZED_INDEX_NAME = "quantized_vector_index"; + private static final String FLOAT_VECTOR_SEARCH_VERSION = "8.4.0"; + private static final String BYTE_VECTOR_SEARCH_VERSION = "8.6.0"; + private static final String QUANTIZED_VECTOR_SEARCH_VERSION = "8.12.1"; + + public void testScriptByteVectorSearch() throws Exception { + assumeTrue("byte vector search is not supported on this version", getOldClusterTestVersion().onOrAfter(BYTE_VECTOR_SEARCH_VERSION)); + if (isOldCluster()) { + // create index and index 10 random floating point vectors + String mapping = """ + { + "properties": { + "vector": { + "type": "dense_vector", + "dims": 3, + "element_type": "byte", + "index": false + } + } + } + """; + createIndex(SCRIPT_BYTE_INDEX_NAME, Settings.EMPTY, mapping); + indexVectors(SCRIPT_BYTE_INDEX_NAME); + // refresh the index + client().performRequest(new Request("POST", "/" + SCRIPT_BYTE_INDEX_NAME + "/_refresh")); + } + // search with a script query + Request searchRequest = new Request("POST", "/" + SCRIPT_BYTE_INDEX_NAME + "/_search"); + searchRequest.setJsonEntity(""" + { + "query": { + "script_score": { + "query": { + "exists": { + "field": "vector" + } + }, + "script": { + "source": "cosineSimilarity(params.query, 'vector') + 1.0", + "params": { + "query": [4, 5, 6] + } + } + } + } + } + """); + Map response = search(searchRequest); + assertThat(extractValue(response, "hits.total.value"), equalTo(7)); + List> hits = extractValue(response, "hits.hits"); + assertThat(hits.get(0).get("_id"), equalTo("0")); + assertThat((double) hits.get(0).get("_score"), closeTo(1.9869276, 0.0001)); + } + + public void testScriptVectorSearch() throws Exception { + assumeTrue( + "Float vector search is not supported on this version", + getOldClusterTestVersion().onOrAfter(FLOAT_VECTOR_SEARCH_VERSION) + ); + if (isOldCluster()) { + // create index and index 10 random floating point vectors + String mapping = """ + { + "properties": { + "vector": { + "type": "dense_vector", + "dims": 3, + "index": false + } + } + } + """; + createIndex(SCRIPT_VECTOR_INDEX_NAME, Settings.EMPTY, mapping); + indexVectors(SCRIPT_VECTOR_INDEX_NAME); + // refresh the index + client().performRequest(new Request("POST", "/" + SCRIPT_VECTOR_INDEX_NAME + "/_refresh")); + } + // search with a script query + Request searchRequest = new Request("POST", "/" + SCRIPT_VECTOR_INDEX_NAME + "/_search"); + searchRequest.setJsonEntity(""" + { + "query": { + "script_score": { + "query": { + "exists": { + "field": "vector" + } + }, + "script": { + "source": "cosineSimilarity(params.query, 'vector') + 1.0", + "params": { + "query": [4, 5, 6] + } + } + } + } + } + """); + Map response = search(searchRequest); + assertThat(extractValue(response, "hits.total.value"), equalTo(7)); + List> hits = extractValue(response, "hits.hits"); + assertThat(hits.get(0).get("_id"), equalTo("0")); + assertThat((double) hits.get(0).get("_score"), closeTo(1.9869276, 0.0001)); + } + + public void testFloatVectorSearch() throws Exception { + assumeTrue( + "Float vector search is not supported on this version", + getOldClusterTestVersion().onOrAfter(FLOAT_VECTOR_SEARCH_VERSION) + ); + if (isOldCluster()) { + String mapping = """ + { + "properties": { + "vector": { + "type": "dense_vector", + "dims": 3, + "index": true, + "similarity": "l2_norm", + "index_options": { + "type": "hnsw", + "ef_construction": 100, + "m": 16 + } + } + } + } + """; + // create index and index 10 random floating point vectors + createIndex(FLOAT_INDEX_NAME, Settings.EMPTY, mapping); + indexVectors(FLOAT_INDEX_NAME); + // force merge the index + client().performRequest(new Request("POST", "/" + FLOAT_INDEX_NAME + "/_forcemerge?max_num_segments=1")); + } + // search with a script query + Request searchRequest = new Request("POST", "/" + FLOAT_INDEX_NAME + "/_search"); + searchRequest.setJsonEntity(""" + { + "query": { + "script_score": { + "query": { + "exists": { + "field": "vector" + } + }, + "script": { + "source": "cosineSimilarity(params.query, 'vector') + 1.0", + "params": { + "query": [4, 5, 6] + } + } + } + } + } + """); + Map response = search(searchRequest); + assertThat(extractValue(response, "hits.total.value"), equalTo(7)); + List> hits = extractValue(response, "hits.hits"); + assertThat(hits.get(0).get("_id"), equalTo("0")); + assertThat((double) hits.get(0).get("_score"), closeTo(1.9869276, 0.0001)); + + // search with knn + searchRequest = new Request("POST", "/" + FLOAT_INDEX_NAME + "/_search"); + searchRequest.setJsonEntity(""" + { + "knn": { + "field": "vector", + "query_vector": [4, 5, 6], + "k": 2, + "num_candidates": 5 + } + } + """); + response = search(searchRequest); + assertThat(extractValue(response, "hits.total.value"), equalTo(2)); + hits = extractValue(response, "hits.hits"); + assertThat(hits.get(0).get("_id"), equalTo("2")); + assertThat((double) hits.get(0).get("_score"), closeTo(0.028571429, 0.0001)); + } + + public void testByteVectorSearch() throws Exception { + assumeTrue("Byte vector search is not supported on this version", getOldClusterTestVersion().onOrAfter(BYTE_VECTOR_SEARCH_VERSION)); + if (isOldCluster()) { + String mapping = """ + { + "properties": { + "vector": { + "type": "dense_vector", + "dims": 3, + "element_type": "byte", + "index": true, + "similarity": "l2_norm", + "index_options": { + "type": "hnsw", + "ef_construction": 100, + "m": 16 + } + } + } + } + """; + // create index and index 10 random floating point vectors + createIndex(BYTE_INDEX_NAME, Settings.EMPTY, mapping); + indexVectors(BYTE_INDEX_NAME); + // refresh the index + // force merge the index + client().performRequest(new Request("POST", "/" + BYTE_INDEX_NAME + "/_forcemerge?max_num_segments=1")); + } + // search with a script query + Request searchRequest = new Request("POST", "/" + BYTE_INDEX_NAME + "/_search"); + searchRequest.setJsonEntity(""" + { + "query": { + "script_score": { + "query": { + "exists": { + "field": "vector" + } + }, + "script": { + "source": "cosineSimilarity(params.query, 'vector') + 1.0", + "params": { + "query": [4, 5, 6] + } + } + } + } + } + """); + Map response = search(searchRequest); + assertThat(extractValue(response, "hits.total.value"), equalTo(7)); + List> hits = extractValue(response, "hits.hits"); + assertThat(hits.get(0).get("_id"), equalTo("0")); + assertThat((double) hits.get(0).get("_score"), closeTo(1.9869276, 0.0001)); + + // search with knn + searchRequest = new Request("POST", "/" + BYTE_INDEX_NAME + "/_search"); + searchRequest.setJsonEntity(""" + { + "knn": { + "field": "vector", + "query_vector": [4, 5, 6], + "k": 2, + "num_candidates": 5 + } + } + """); + response = search(searchRequest); + assertThat(extractValue(response, "hits.total.value"), equalTo(2)); + hits = extractValue(response, "hits.hits"); + assertThat(hits.get(0).get("_id"), equalTo("2")); + assertThat((double) hits.get(0).get("_score"), closeTo(0.028571429, 0.0001)); + } + + public void testQuantizedVectorSearch() throws Exception { + assumeTrue( + "Quantized vector search is not supported on this version", + getOldClusterTestVersion().onOrAfter(QUANTIZED_VECTOR_SEARCH_VERSION) + ); + if (isOldCluster()) { + String mapping = """ + { + "properties": { + "vector": { + "type": "dense_vector", + "dims": 3, + "index": true, + "similarity": "cosine", + "index_options": { + "type": "int8_hnsw", + "ef_construction": 100, + "m": 16 + } + } + } + } + """; + // create index and index 10 random floating point vectors + createIndex(QUANTIZED_INDEX_NAME, Settings.EMPTY, mapping); + indexVectors(QUANTIZED_INDEX_NAME); + // force merge the index + client().performRequest(new Request("POST", "/" + QUANTIZED_INDEX_NAME + "/_forcemerge?max_num_segments=1")); + } + Request searchRequest = new Request("POST", "/" + QUANTIZED_INDEX_NAME + "/_search"); + searchRequest.setJsonEntity(""" + { + "query": { + "script_score": { + "query": { + "exists": { + "field": "vector" + } + }, + "script": { + "source": "cosineSimilarity(params.query, 'vector') + 1.0", + "params": { + "query": [4, 5, 6] + } + } + } + } + } + """); + Map response = search(searchRequest); + assertThat(extractValue(response, "hits.total.value"), equalTo(7)); + List> hits = extractValue(response, "hits.hits"); + assertThat(hits.get(0).get("_id"), equalTo("0")); + assertThat((double) hits.get(0).get("_score"), closeTo(1.9869276, 0.0001)); + + // search with knn + searchRequest = new Request("POST", "/" + QUANTIZED_INDEX_NAME + "/_search"); + searchRequest.setJsonEntity(""" + { + "knn": { + "field": "vector", + "query_vector": [4, 5, 6], + "k": 2, + "num_candidates": 5 + } + } + """); + response = search(searchRequest); + assertThat(extractValue(response, "hits.total.value"), equalTo(2)); + hits = extractValue(response, "hits.hits"); + assertThat(hits.get(0).get("_id"), equalTo("0")); + assertThat((double) hits.get(0).get("_score"), closeTo(0.9934857, 0.0001)); + } + + private void indexVectors(String indexName) throws Exception { + String[] vectors = new String[] { + "{\"vector\":[1, 1, 1]}", + "{\"vector\":[1, 1, 2]}", + "{\"vector\":[1, 1, 3]}", + "{\"vector\":[1, 2, 1]}", + "{\"vector\":[1, 3, 1]}", + "{\"vector\":[2, 1, 1]}", + "{\"vector\":[3, 1, 1]}", + "{}" }; + for (int i = 0; i < vectors.length; i++) { + Request indexRequest = new Request("PUT", "/" + indexName + "/_doc/" + i); + indexRequest.setJsonEntity(vectors[i]); + assertOK(client().performRequest(indexRequest)); + } + } + + private static Map search(Request request) throws IOException { + final Response response = client().performRequest(request); + assertOK(response); + return responseAsMap(response); + } + + @SuppressWarnings("unchecked") + private static T extractValue(Map map, String path) { + return (T) XContentMapValues.extractValue(path, map); + } +} From 92f7e078071fba021f9784c98a7cea6e76c5710d Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Wed, 10 Apr 2024 17:07:20 +0200 Subject: [PATCH 227/264] Fix docs plugin unit tests on windows (#107310) --- .../internal/doc/DocSnippetTaskSpec.groovy | 113 +----------------- .../RestTestsFromDocSnippetTaskSpec.groovy | 30 ++--- 2 files changed, 18 insertions(+), 125 deletions(-) diff --git a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/DocSnippetTaskSpec.groovy b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/DocSnippetTaskSpec.groovy index 89939645d0f9c..85ce3c1804474 100644 --- a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/DocSnippetTaskSpec.groovy +++ b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/DocSnippetTaskSpec.groovy @@ -14,6 +14,8 @@ import spock.lang.TempDir import org.gradle.api.InvalidUserDataException import org.gradle.testfixtures.ProjectBuilder +import static org.elasticsearch.gradle.internal.test.TestUtils.normalizeString + class DocSnippetTaskSpec extends Specification { @TempDir @@ -540,9 +542,9 @@ GET /_analyze ) def snippets = task().parseDocFile(tempDir, doc, []) expect: - snippets*.start == [3] - snippets*.language == ["console"] - snippets*.contents == ["""GET /_analyze + snippets[0].start == 3 + snippets[0].language == "console" + normalizeString(snippets[0].contents, tempDir) == """GET /_analyze { "tokenizer": "keyword", "char_filter": [ @@ -556,112 +558,9 @@ GET /_analyze } ], "text": "My license plate is ٢٥٠١٥" -} -"""] +}""" } - def "test parsing snippet from doc2"() { - given: - def doc = docFile( - """ -[role="xpack"] -[[ml-update-snapshot]] -= Update model snapshots API -++++ -Update model snapshots -++++ - -Updates certain properties of a snapshot. - -[[ml-update-snapshot-request]] -== {api-request-title} - -`POST _ml/anomaly_detectors//model_snapshots//_update` - -[[ml-update-snapshot-prereqs]] -== {api-prereq-title} - -Requires the `manage_ml` cluster privilege. This privilege is included in the -`machine_learning_admin` built-in role. - -[[ml-update-snapshot-path-parms]] -== {api-path-parms-title} - -``:: -(Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] - -``:: -(Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=snapshot-id] - -[[ml-update-snapshot-request-body]] -== {api-request-body-title} - -The following properties can be updated after the model snapshot is created: - -`description`:: -(Optional, string) A description of the model snapshot. - -`retain`:: -(Optional, Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=retain] - - -[[ml-update-snapshot-example]] -== {api-examples-title} - -[source,console] --------------------------------------------------- -POST -_ml/anomaly_detectors/it_ops_new_logs/model_snapshots/1491852978/_update -{ - "description": "Snapshot 1", - "retain": true -} --------------------------------------------------- -// TEST[skip:todo] - -When the snapshot is updated, you receive the following results: -[source,js] ----- -{ - "acknowledged": true, - "model": { - "job_id": "it_ops_new_logs", - "timestamp": 1491852978000, - "description": "Snapshot 1", -... - "retain": true - } -} ----- -""" - ) - def snippets = task().parseDocFile(tempDir, doc, []) - expect: - snippets*.start == [50, 62] - snippets*.language == ["console", "js"] - snippets*.contents == ["""POST -_ml/anomaly_detectors/it_ops_new_logs/model_snapshots/1491852978/_update -{ - "description": "Snapshot 1", - "retain": true -} -""", """{ - "acknowledged": true, - "model": { - "job_id": "it_ops_new_logs", - "timestamp": 1491852978000, - "description": "Snapshot 1", -... - "retain": true - } -} -"""] - } - - File docFile(String docContent) { def file = tempDir.toPath().resolve("mapping-charfilter.asciidoc").toFile() file.text = docContent diff --git a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/RestTestsFromDocSnippetTaskSpec.groovy b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/RestTestsFromDocSnippetTaskSpec.groovy index 6ef4726e1578a..6e86cba235886 100644 --- a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/RestTestsFromDocSnippetTaskSpec.groovy +++ b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/RestTestsFromDocSnippetTaskSpec.groovy @@ -16,6 +16,7 @@ import org.gradle.testfixtures.ProjectBuilder import static org.elasticsearch.gradle.internal.doc.RestTestsFromDocSnippetTask.replaceBlockQuote import static org.elasticsearch.gradle.internal.doc.RestTestsFromDocSnippetTask.shouldAddShardFailureCheck +import static org.elasticsearch.gradle.internal.test.TestUtils.normalizeString class RestTestsFromDocSnippetTaskSpec extends Specification { @@ -59,9 +60,7 @@ class RestTestsFromDocSnippetTaskSpec extends Specification { def build = ProjectBuilder.builder().build() given: def task = build.tasks.create("restTestFromSnippet", RestTestsFromDocSnippetTask) -// def task = build.tasks.create("restTestFromSnippet", RestTestsFromSnippetsTask) task.expectedUnconvertedCandidates = ["ml-update-snapshot.asciidoc", "reference/security/authorization/run-as-privilege.asciidoc"] -// docs() task.docs = build.fileTree(new File(tempDir, "docs")) task.testRoot.convention(build.getLayout().buildDirectory.dir("rest-tests")); @@ -72,7 +71,7 @@ class RestTestsFromDocSnippetTaskSpec extends Specification { then: restSpec.exists() - restSpec.text == """--- + normalizeString(restSpec.text, tempDir) == """--- "line_22": - skip: features: @@ -143,11 +142,10 @@ class RestTestsFromDocSnippetTaskSpec extends Specification { } }, "status": 400 - } -""" + }""" def restSpec2 = new File(task.testRoot.get().getAsFile(), "rest-api-spec/test/ml-update-snapshot.yml") restSpec2.exists() - restSpec2.text == """--- + normalizeString(restSpec2.text, tempDir) == """--- "line_50": - skip: features: @@ -167,11 +165,10 @@ class RestTestsFromDocSnippetTaskSpec extends Specification { "description": "Snapshot 1", "retain": true } - - is_false: _shards.failures -""" + - is_false: _shards.failures""" def restSpec3 = new File(task.testRoot.get().getAsFile(), "rest-api-spec/test/reference/sql/getting-started.yml") restSpec3.exists() - restSpec3.text == """--- + normalizeString(restSpec3.text, tempDir) == """--- "line_10": - skip: features: @@ -205,15 +202,13 @@ class RestTestsFromDocSnippetTaskSpec extends Specification { - is_false: _shards.failures - match: \$body: - / \\s+author \\s+\\| \\s+name \\s+\\| \\s+page_count \\s+\\| \\s+release_date\\s* - ---------------\\+---------------\\+---------------\\+------------------------\\s* - Dan \\s+Simmons \\s+\\|Hyperion \\s+\\|482 \\s+\\|1989-05-26T00:00:00.000Z\\s* - Frank \\s+Herbert \\s+\\|Dune \\s+\\|604 \\s+\\|1965-06-01T00:00:00.000Z\\s*/ -""" - + / /s+author /s+/| /s+name /s+/| /s+page_count /s+/| /s+release_date/s* + ---------------/+---------------/+---------------/+------------------------/s* + Dan /s+Simmons /s+/|Hyperion /s+/|482 /s+/|1989-05-26T00:00:00.000Z/s* + Frank /s+Herbert /s+/|Dune /s+/|604 /s+/|1965-06-01T00:00:00.000Z/s*/""" def restSpec4 = new File(task.testRoot.get().getAsFile(), "rest-api-spec/test/reference/security/authorization/run-as-privilege.yml") restSpec4.exists() - restSpec4.text == """--- + normalizeString(restSpec4.text, tempDir) == """--- "line_51": - skip: features: @@ -356,8 +351,7 @@ class RestTestsFromDocSnippetTaskSpec extends Specification { "full_name": "Monday Jaffe", "metadata": { "innovation" : 8} } - - is_false: _shards.failures -""" + - is_false: _shards.failures""" } File docFile(String fileName, String docContent) { From a06da4398957876683ab343fd9669dfe9b695512 Mon Sep 17 00:00:00 2001 From: Carlos Delgado <6339205+carlosdelest@users.noreply.github.com> Date: Wed, 10 Apr 2024 17:41:21 +0200 Subject: [PATCH 228/264] Update error message in sparse_vector for indices 8.0-8.10 (#107308) --- .../index/mapper/vectors/SparseVectorFieldMapper.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapper.java index 6532abed19044..7b1e20a6cdda3 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapper.java @@ -44,7 +44,7 @@ public class SparseVectorFieldMapper extends FieldMapper { static final String ERROR_MESSAGE_7X = "[sparse_vector] field type in old 7.x indices is allowed to " + "contain [sparse_vector] fields, but they cannot be indexed or searched."; - static final String ERROR_MESSAGE_8X = "The [sparse_vector] field type is not supported from 8.0 to 8.10 versions."; + static final String ERROR_MESSAGE_8X = "The [sparse_vector] field type is not supported on indices created on versions 8.0 to 8.10."; static final IndexVersion PREVIOUS_SPARSE_VECTOR_INDEX_VERSION = IndexVersions.V_8_0_0; static final IndexVersion NEW_SPARSE_VECTOR_INDEX_VERSION = IndexVersions.NEW_SPARSE_VECTOR; From f8e516eb9c5f5b5098593cdd1fce5241d4390773 Mon Sep 17 00:00:00 2001 From: Carlos Delgado <6339205+carlosdelest@users.noreply.github.com> Date: Wed, 10 Apr 2024 17:41:42 +0200 Subject: [PATCH 229/264] Update sparse_vector docs on index version availability (#107315) --- docs/reference/mapping/types/sparse-vector.asciidoc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/reference/mapping/types/sparse-vector.asciidoc b/docs/reference/mapping/types/sparse-vector.asciidoc index 17a193eef1d4d..6c7ad6550753e 100644 --- a/docs/reference/mapping/types/sparse-vector.asciidoc +++ b/docs/reference/mapping/types/sparse-vector.asciidoc @@ -26,6 +26,8 @@ PUT my-index See <> for a complete example on adding documents to a `sparse_vector` mapped field using ELSER. +NOTE: `sparse_vector` fields can not be included in indices that were *created* on {es} versions between 8.0 and 8.10 + NOTE: `sparse_vector` fields only support single-valued fields and strictly positive values. Multi-valued fields and negative values will be rejected. From cb464b6d6a14e83bbfe0eba3d9341030d5d8c179 Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Wed, 10 Apr 2024 11:54:46 -0400 Subject: [PATCH 230/264] Bump versions after 7.17.20 release --- .buildkite/pipelines/intake.yml | 2 +- .buildkite/pipelines/periodic-packaging.yml | 6 +++--- .buildkite/pipelines/periodic.yml | 10 +++++----- .ci/bwcVersions | 2 +- .ci/snapshotBwcVersions | 2 +- server/src/main/java/org/elasticsearch/Version.java | 1 + .../resources/org/elasticsearch/TransportVersions.csv | 1 + .../org/elasticsearch/index/IndexVersions.csv | 1 + 8 files changed, 14 insertions(+), 11 deletions(-) diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index 612838592712b..18a93c9b63a3e 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -48,7 +48,7 @@ steps: timeout_in_minutes: 300 matrix: setup: - BWC_VERSION: ["7.17.20", "8.13.3", "8.14.0"] + BWC_VERSION: ["7.17.21", "8.13.3", "8.14.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index 58dcf875ce297..c306e1d9f63cb 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -305,8 +305,8 @@ steps: env: BWC_VERSION: 7.16.3 - - label: "{{matrix.image}} / 7.17.20 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.20 + - label: "{{matrix.image}} / 7.17.21 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.21 timeout_in_minutes: 300 matrix: setup: @@ -319,7 +319,7 @@ steps: machineType: custom-16-32768 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 7.17.20 + BWC_VERSION: 7.17.21 - label: "{{matrix.image}} / 8.0.1 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.0.1 diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 3462e0fb95aba..3410436eda2bf 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -172,8 +172,8 @@ steps: buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.16.3 - - label: 7.17.20 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v7.17.20#bwcTest + - label: 7.17.21 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v7.17.21#bwcTest timeout_in_minutes: 300 agents: provider: gcp @@ -181,7 +181,7 @@ steps: machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 7.17.20 + BWC_VERSION: 7.17.21 - label: 8.0.1 / bwc command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.0.1#bwcTest timeout_in_minutes: 300 @@ -396,7 +396,7 @@ steps: setup: ES_RUNTIME_JAVA: - openjdk17 - BWC_VERSION: ["7.17.20", "8.13.3", "8.14.0"] + BWC_VERSION: ["7.17.21", "8.13.3", "8.14.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 @@ -438,7 +438,7 @@ steps: - graalvm-ce17 - openjdk17 - openjdk21 - BWC_VERSION: ["7.17.20", "8.13.3", "8.14.0"] + BWC_VERSION: ["7.17.21", "8.13.3", "8.14.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.ci/bwcVersions b/.ci/bwcVersions index d3e57196e1c89..46165da472e74 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -16,7 +16,7 @@ BWC_VERSION: - "7.14.2" - "7.15.2" - "7.16.3" - - "7.17.20" + - "7.17.21" - "8.0.1" - "8.1.3" - "8.2.3" diff --git a/.ci/snapshotBwcVersions b/.ci/snapshotBwcVersions index db131b89ffa4e..dfd238a041b1e 100644 --- a/.ci/snapshotBwcVersions +++ b/.ci/snapshotBwcVersions @@ -1,4 +1,4 @@ BWC_VERSION: - - "7.17.20" + - "7.17.21" - "8.13.3" - "8.14.0" diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 99e811c021845..88a1049a42557 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -120,6 +120,7 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_7_17_18 = new Version(7_17_18_99); public static final Version V_7_17_19 = new Version(7_17_19_99); public static final Version V_7_17_20 = new Version(7_17_20_99); + public static final Version V_7_17_21 = new Version(7_17_21_99); public static final Version V_8_0_0 = new Version(8_00_00_99); public static final Version V_8_0_1 = new Version(8_00_01_99); diff --git a/server/src/main/resources/org/elasticsearch/TransportVersions.csv b/server/src/main/resources/org/elasticsearch/TransportVersions.csv index 4bae460e3bce2..dbc170828fabc 100644 --- a/server/src/main/resources/org/elasticsearch/TransportVersions.csv +++ b/server/src/main/resources/org/elasticsearch/TransportVersions.csv @@ -67,6 +67,7 @@ 7.17.17,7171799 7.17.18,7171899 7.17.19,7171999 +7.17.20,7172099 8.0.0,8000099 8.0.1,8000199 8.1.0,8010099 diff --git a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv index 61cc2167a9048..f379ac81b9009 100644 --- a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv +++ b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv @@ -67,6 +67,7 @@ 7.17.17,7171799 7.17.18,7171899 7.17.19,7171999 +7.17.20,7172099 8.0.0,8000099 8.0.1,8000199 8.1.0,8010099 From d915b964ba131073e3c961589ced73f0163699d8 Mon Sep 17 00:00:00 2001 From: Craig Taverner Date: Wed, 10 Apr 2024 17:56:45 +0200 Subject: [PATCH 231/264] Rename ST_CENTROID to ST_CENTROID_AGG (#107226) * Rename ST_CENTROID to ST_CENTROID_AGG In order to allow development of a scalar ST_CENTROID function. * Fix table alignment --- .../functions/aggregation-functions.asciidoc | 2 +- ...roid.asciidoc => st_centroid_agg.asciidoc} | 6 +- .../resources/enrich-IT_tests_only.csv-spec | 4 +- .../src/main/resources/meta.csv-spec | 8 +- .../src/main/resources/spatial.csv-spec | 186 +++++++++--------- .../function/EsqlFunctionRegistry.java | 2 +- .../xpack/esql/plugin/EsqlFeatures.java | 4 +- .../optimizer/LogicalPlanOptimizerTests.java | 2 +- .../optimizer/PhysicalPlanOptimizerTests.java | 38 ++-- 9 files changed, 126 insertions(+), 126 deletions(-) rename docs/reference/esql/functions/{st_centroid.asciidoc => st_centroid_agg.asciidoc} (69%) diff --git a/docs/reference/esql/functions/aggregation-functions.asciidoc b/docs/reference/esql/functions/aggregation-functions.asciidoc index c040e7fe01327..2fdc8582d6bfb 100644 --- a/docs/reference/esql/functions/aggregation-functions.asciidoc +++ b/docs/reference/esql/functions/aggregation-functions.asciidoc @@ -29,6 +29,6 @@ include::median.asciidoc[] include::median-absolute-deviation.asciidoc[] include::min.asciidoc[] include::percentile.asciidoc[] -include::st_centroid.asciidoc[] +include::st_centroid_agg.asciidoc[] include::sum.asciidoc[] include::values.asciidoc[] diff --git a/docs/reference/esql/functions/st_centroid.asciidoc b/docs/reference/esql/functions/st_centroid_agg.asciidoc similarity index 69% rename from docs/reference/esql/functions/st_centroid.asciidoc rename to docs/reference/esql/functions/st_centroid_agg.asciidoc index e91a325a5597b..c980560f8f198 100644 --- a/docs/reference/esql/functions/st_centroid.asciidoc +++ b/docs/reference/esql/functions/st_centroid_agg.asciidoc @@ -1,6 +1,6 @@ [discrete] [[esql-agg-st-centroid]] -=== `ST_CENTROID` +=== `ST_CENTROID_AGG` experimental::[] @@ -8,11 +8,11 @@ Calculate the spatial centroid over a field with spatial point geometry type. [source.merge.styled,esql] ---- -include::{esql-specs}/spatial.csv-spec[tag=st_centroid-airports] +include::{esql-specs}/spatial.csv-spec[tag=st_centroid_agg-airports] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/spatial.csv-spec[tag=st_centroid-airports-result] +include::{esql-specs}/spatial.csv-spec[tag=st_centroid_agg-airports-result] |=== Supported types: diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-IT_tests_only.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-IT_tests_only.csv-spec index ee43efa69447b..468329e41fe38 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-IT_tests_only.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-IT_tests_only.csv-spec @@ -294,7 +294,7 @@ required_feature: esql.mv_warn FROM airports | ENRICH city_boundaries ON city_location WITH airport, region, city_boundary | EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) -| STATS city_centroid = ST_CENTROID(city_location), count = COUNT(city_location), min_wkt = MIN(boundary_wkt_length), max_wkt = MAX(boundary_wkt_length) +| STATS city_centroid = ST_CENTROID_AGG(city_location), count = COUNT(city_location), min_wkt = MIN(boundary_wkt_length), max_wkt = MAX(boundary_wkt_length) ; warning:Line 3:30: evaluation of [LENGTH(TO_STRING(city_boundary))] failed, treating result as null. Only first 20 failures recorded. warning:Line 3:30: java.lang.IllegalArgumentException: single-value function encountered multi-value @@ -325,7 +325,7 @@ FROM airports | ENRICH city_names ON city WITH airport, region, city_boundary | MV_EXPAND city_boundary | EVAL airport_in_city = ST_INTERSECTS(location, city_boundary) -| STATS count=COUNT(*), centroid=ST_CENTROID(location) BY airport_in_city +| STATS count=COUNT(*), centroid=ST_CENTROID_AGG(location) BY airport_in_city | SORT count ASC ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec index d0e18426f03ab..1a154bc6a61fa 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -64,7 +64,7 @@ double pi() "double sinh(angle:double|integer|long|unsigned_long)" "keyword split(string:keyword|text, delim:keyword|text)" "double sqrt(number:double|integer|long|unsigned_long)" -"geo_point|cartesian_point st_centroid(field:geo_point|cartesian_point)" +"geo_point|cartesian_point st_centroid_agg(field:geo_point|cartesian_point)" "boolean st_contains(geomA:geo_point|cartesian_point|geo_shape|cartesian_shape, geomB:geo_point|cartesian_point|geo_shape|cartesian_shape)" "boolean st_disjoint(geomA:geo_point|cartesian_point|geo_shape|cartesian_shape, geomB:geo_point|cartesian_point|geo_shape|cartesian_shape)" "boolean st_intersects(geomA:geo_point|cartesian_point|geo_shape|cartesian_shape, geomB:geo_point|cartesian_point|geo_shape|cartesian_shape)" @@ -174,7 +174,7 @@ sin |angle |"double|integer|long|unsigne sinh |angle |"double|integer|long|unsigned_long" |An angle, in radians. If `null`, the function returns `null`. split |[string, delim] |["keyword|text", "keyword|text"] |[, ] sqrt |number |"double|integer|long|unsigned_long" |[""] -st_centroid |field |"geo_point|cartesian_point" |[""] +st_centroid_ag|field |"geo_point|cartesian_point" |[""] st_contains |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Geometry column name or variable of geometry type, Geometry column name or variable of geometry type] st_disjoint |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Geometry column name or variable of geometry type, Geometry column name or variable of geometry type] st_intersects |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Geometry column name or variable of geometry type, Geometry column name or variable of geometry type] @@ -285,7 +285,7 @@ sin |Returns ths {wikipedia}/Sine_and_cosine[Sine] trigonometric funct sinh |Returns the {wikipedia}/Hyperbolic_functions[hyperbolic sine] of an angle. split |Split a single valued string into multiple strings. sqrt |Returns the square root of a number. -st_centroid |The centroid of a spatial field. +st_centroid_ag|The centroid of a spatial field. st_contains |Returns whether the first geometry contains the second geometry. st_disjoint |Returns whether the two geometries or geometry columns are disjoint. st_intersects |Returns whether the two geometries or geometry columns intersect. @@ -397,7 +397,7 @@ sin |double sinh |double |false |false |false split |keyword |[false, false] |false |false sqrt |double |false |false |false -st_centroid |"geo_point|cartesian_point" |false |false |true +st_centroid_ag|"geo_point|cartesian_point" |false |false |true st_contains |boolean |[false, false] |false |false st_disjoint |boolean |[false, false] |false |false st_intersects |boolean |[false, false] |false |false diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec index 843b2674967fe..26fcca423d28d 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec @@ -38,46 +38,46 @@ wkt:keyword ; centroidFromStringNested -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg row wkt = "POINT(42.97109629958868 14.7552534006536)" -| STATS c = ST_CENTROID(TO_GEOPOINT(wkt)); +| STATS c = ST_CENTROID_AGG(TO_GEOPOINT(wkt)); c:geo_point POINT(42.97109629958868 14.7552534006536) ; centroidFromString1 -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg ROW wkt = ["POINT(42.97109629958868 14.7552534006536)"] | MV_EXPAND wkt | EVAL pt = TO_GEOPOINT(wkt) -| STATS c = ST_CENTROID(pt); +| STATS c = ST_CENTROID_AGG(pt); c:geo_point POINT(42.97109629958868 14.7552534006536) ; centroidFromString2 -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg ROW wkt = ["POINT(42.97109629958868 14.7552534006536)", "POINT(75.80929149873555 22.72774917539209)"] | MV_EXPAND wkt | EVAL pt = TO_GEOPOINT(wkt) -| STATS c = ST_CENTROID(pt); +| STATS c = ST_CENTROID_AGG(pt); c:geo_point POINT(59.390193899162114 18.741501288022846) ; centroidFromString3 -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg ROW wkt = ["POINT(42.97109629958868 14.7552534006536)", "POINT(75.80929149873555 22.72774917539209)", "POINT(-0.030548143003023033 24.37553649504829)"] | MV_EXPAND wkt | EVAL pt = TO_GEOPOINT(wkt) -| STATS c = ST_CENTROID(pt); +| STATS c = ST_CENTROID_AGG(pt); c:geo_point POINT(39.58327988510707 20.619513023697994) @@ -89,7 +89,7 @@ required_feature: esql.st_x_y ROW wkt = ["POINT(42.97109629958868 14.7552534006536)", "POINT(75.80929149873555 22.72774917539209)", "POINT(-0.030548143003023033 24.37553649504829)"] | MV_EXPAND wkt | EVAL pt = TO_GEOPOINT(wkt) -| STATS c = ST_CENTROID(pt) +| STATS c = ST_CENTROID_AGG(pt) | EVAL x = ST_X(c), y = ST_Y(c); c:geo_point | x:double | y:double @@ -149,25 +149,25 @@ c:long | x:double | y:double # Tests for ST_CENTROID on GEO_POINT type centroidFromAirports -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg -// tag::st_centroid-airports[] +// tag::st_centroid_agg-airports[] FROM airports -| STATS centroid=ST_CENTROID(location) -// end::st_centroid-airports[] +| STATS centroid=ST_CENTROID_AGG(location) +// end::st_centroid_agg-airports[] ; -// tag::st_centroid-airports-result[] +// tag::st_centroid_agg-airports-result[] centroid:geo_point POINT(-0.030548143003023033 24.37553649504829) -// end::st_centroid-airports-result[] +// end::st_centroid_agg-airports-result[] ; centroidFromAirportsNested -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg FROM airports -| STATS centroid=ST_CENTROID(TO_GEOPOINT(location)) +| STATS centroid=ST_CENTROID_AGG(TO_GEOPOINT(location)) ; centroid:geo_point @@ -175,10 +175,10 @@ POINT (-0.03054810272375508 24.37553651570554) ; centroidFromAirportsCount -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg FROM airports -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:geo_point | count:long @@ -186,10 +186,10 @@ POINT(-0.030548143003023033 24.37553649504829) | 891 ; centroidFromAirportsCountGrouped -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg FROM airports -| STATS centroid=ST_CENTROID(location), count=COUNT() BY scalerank +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() BY scalerank | SORT scalerank DESC ; @@ -205,11 +205,11 @@ POINT(1.2588642098541771 24.379140841774642) | 63 | 2 ; centroidFromAirportsFiltered -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg FROM airports | WHERE scalerank == 9 -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:geo_point | count:long @@ -217,11 +217,11 @@ POINT(83.27726172452623 28.99289782286029) | 33 ; centroidFromAirportsCountGroupedCentroid -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg FROM airports -| STATS centroid=ST_CENTROID(location), count=COUNT() BY scalerank -| STATS centroid=ST_CENTROID(centroid), count=SUM(count) +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() BY scalerank +| STATS centroid=ST_CENTROID_AGG(centroid), count=SUM(count) ; centroid:geo_point | count:long @@ -229,10 +229,10 @@ POINT (7.572387259169772 26.836561792945492) | 891 ; centroidFromAirportsCountCityLocations -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg FROM airports -| STATS centroid=ST_CENTROID(city_location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(city_location), count=COUNT() ; centroid:geo_point | count:long @@ -240,10 +240,10 @@ POINT (1.3965610809060276 24.127649406297987) | 891 ; centroidFromAirportsCountGroupedCountry -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg FROM airports -| STATS centroid=ST_CENTROID(city_location), count=COUNT() BY country +| STATS centroid=ST_CENTROID_AGG(city_location), count=COUNT() BY country | SORT count DESC, country ASC | WHERE count >= 10 ; @@ -269,11 +269,11 @@ POINT (70.7946499697864 30.69746997440234) | 10 | Pakistan ; centroidFromAirportsFilteredCountry -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg FROM airports | WHERE country == "United States" -| STATS centroid=ST_CENTROID(city_location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(city_location), count=COUNT() ; centroid:geo_point | count:long @@ -281,11 +281,11 @@ POINT (-97.3333946136801 38.07953176370194) | 129 ; centroidFromAirportsCountGroupedCountryCentroid -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg FROM airports -| STATS centroid=ST_CENTROID(city_location), count=COUNT() BY country -| STATS centroid=ST_CENTROID(centroid), count=SUM(count) +| STATS centroid=ST_CENTROID_AGG(city_location), count=COUNT() BY country +| STATS centroid=ST_CENTROID_AGG(centroid), count=SUM(count) ; centroid:geo_point | count:long @@ -293,10 +293,10 @@ POINT (17.55538044598613 18.185558743854063) | 891 ; centroidFromAirportsCountryCount -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg FROM airports -| STATS airports=ST_CENTROID(location), cities=ST_CENTROID(city_location), count=COUNT() +| STATS airports=ST_CENTROID_AGG(location), cities=ST_CENTROID_AGG(city_location), count=COUNT() ; airports:geo_point | cities:geo_point | count:long @@ -304,13 +304,13 @@ POINT(-0.030548143003023033 24.37553649504829) | POINT (1.3965610809060276 24.12 ; centroidFromAirportsFilteredAndSorted -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg FROM airports | WHERE scalerank == 9 | SORT abbrev | WHERE length(name) > 12 -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:geo_point | count:long @@ -318,11 +318,11 @@ POINT(78.73736493755132 26.761841227998957) | 12 ; centroidFromAirportsAfterMvExpand -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg FROM airports | MV_EXPAND type -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:geo_point | count:long @@ -330,11 +330,11 @@ POINT(2.121611400672094 24.559172889205755) | 933 ; centroidFromAirportsGroupedAfterMvExpand -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg FROM airports | MV_EXPAND type -| STATS centroid=ST_CENTROID(location), count=COUNT() BY scalerank +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() BY scalerank | SORT scalerank DESC ; @@ -350,12 +350,12 @@ POINT(1.2588642098541771 24.379140841774642) | 63 | 2 ; centroidFromAirportsGroupedAfterMvExpandFiltered -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg FROM airports | WHERE scalerank == 9 | MV_EXPAND type -| STATS centroid=ST_CENTROID(location), count=COUNT() BY scalerank +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() BY scalerank ; centroid:geo_point | count:long | scalerank:i @@ -363,12 +363,12 @@ POINT(83.16847535921261 28.79002037679311) | 40 | 9 ; centroidFromAirportsAfterMvExpandFiltered -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg FROM airports | WHERE scalerank == 9 | MV_EXPAND type -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:geo_point | count:long @@ -376,11 +376,11 @@ POINT(83.16847535921261 28.79002037679311) | 40 ; centroidFromAirportsAfterKeywordPredicateCountryUK -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg FROM airports | WHERE country == "United Kingdom" -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:geo_point | count:long @@ -392,7 +392,7 @@ required_feature: esql.st_intersects FROM airports | WHERE ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((1.2305 60.8449, -1.582 61.6899, -10.7227 58.4017, -7.1191 55.3291, -7.9102 54.2139, -5.4492 54.0078, -5.2734 52.3756, -7.8223 49.6676, -5.0977 49.2678, 0.9668 50.5134, 2.5488 52.1065, 2.6367 54.0078, -0.9668 56.4625, 1.2305 60.8449))")) -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:geo_point | count:long @@ -404,7 +404,7 @@ required_feature: esql.st_contains_within FROM airports | WHERE ST_CONTAINS(TO_GEOSHAPE("POLYGON((1.2305 60.8449, -1.582 61.6899, -10.7227 58.4017, -7.1191 55.3291, -7.9102 54.2139, -5.4492 54.0078, -5.2734 52.3756, -7.8223 49.6676, -5.0977 49.2678, 0.9668 50.5134, 2.5488 52.1065, 2.6367 54.0078, -0.9668 56.4625, 1.2305 60.8449))"), location) -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:geo_point | count:long @@ -416,7 +416,7 @@ required_feature: esql.st_contains_within FROM airports | WHERE ST_WITHIN(location, TO_GEOSHAPE("POLYGON((1.2305 60.8449, -1.582 61.6899, -10.7227 58.4017, -7.1191 55.3291, -7.9102 54.2139, -5.4492 54.0078, -5.2734 52.3756, -7.8223 49.6676, -5.0977 49.2678, 0.9668 50.5134, 2.5488 52.1065, 2.6367 54.0078, -0.9668 56.4625, 1.2305 60.8449))")) -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:geo_point | count:long @@ -428,7 +428,7 @@ required_feature: esql.st_intersects FROM airports | WHERE country == "United Kingdom" -| STATS centroid = ST_CENTROID(location), count=COUNT() +| STATS centroid = ST_CENTROID_AGG(location), count=COUNT() | EVAL centroid_in_uk = ST_INTERSECTS(centroid, TO_GEOSHAPE("POLYGON((1.2305 60.8449, -1.582 61.6899, -10.7227 58.4017, -7.1191 55.3291, -7.9102 54.2139, -5.4492 54.0078, -5.2734 52.3756, -7.8223 49.6676, -5.0977 49.2678, 0.9668 50.5134, 2.5488 52.1065, 2.6367 54.0078, -0.9668 56.4625, 1.2305 60.8449))")) | EVAL centroid_in_iceland = ST_INTERSECTS(centroid, TO_GEOSHAPE("POLYGON ((-25.4883 65.5312, -23.4668 66.7746, -18.4131 67.4749, -13.0957 66.2669, -12.3926 64.4159, -20.1270 62.7346, -24.7852 63.3718, -25.4883 65.5312))")) | EVAL centroid_within_uk = ST_WITHIN(centroid, TO_GEOSHAPE("POLYGON((1.2305 60.8449, -1.582 61.6899, -10.7227 58.4017, -7.1191 55.3291, -7.9102 54.2139, -5.4492 54.0078, -5.2734 52.3756, -7.8223 49.6676, -5.0977 49.2678, 0.9668 50.5134, 2.5488 52.1065, 2.6367 54.0078, -0.9668 56.4625, 1.2305 60.8449))")) @@ -450,7 +450,7 @@ FROM airports | EVAL in_iceland = ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON ((-25.4883 65.5312, -23.4668 66.7746, -18.4131 67.4749, -13.0957 66.2669, -12.3926 64.4159, -20.1270 62.7346, -24.7852 63.3718, -25.4883 65.5312))")) | EVAL within_uk = ST_WITHIN(location, TO_GEOSHAPE("POLYGON((1.2305 60.8449, -1.582 61.6899, -10.7227 58.4017, -7.1191 55.3291, -7.9102 54.2139, -5.4492 54.0078, -5.2734 52.3756, -7.8223 49.6676, -5.0977 49.2678, 0.9668 50.5134, 2.5488 52.1065, 2.6367 54.0078, -0.9668 56.4625, 1.2305 60.8449))")) | EVAL within_iceland = ST_WITHIN(location, TO_GEOSHAPE("POLYGON ((-25.4883 65.5312, -23.4668 66.7746, -18.4131 67.4749, -13.0957 66.2669, -12.3926 64.4159, -20.1270 62.7346, -24.7852 63.3718, -25.4883 65.5312))")) -| STATS centroid = ST_CENTROID(location), count=COUNT() BY in_uk, in_iceland, within_uk, within_iceland +| STATS centroid = ST_CENTROID_AGG(location), count=COUNT() BY in_uk, in_iceland, within_uk, within_iceland | SORT count ASC ; @@ -465,7 +465,7 @@ required_feature: esql.st_intersects FROM airports | WHERE ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:geo_point | count:long @@ -477,7 +477,7 @@ required_feature: esql.st_intersects FROM airports | WHERE scalerank == 9 AND ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) AND country == "Yemen" -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:geo_point | count:long @@ -598,7 +598,7 @@ required_feature: esql.st_intersects FROM airports_mp | WHERE ST_INTERSECTS(location, city_location) -| STATS location=ST_CENTROID(location), city_location=ST_CENTROID(city_location), count=COUNT() +| STATS location=ST_CENTROID_AGG(location), city_location=ST_CENTROID_AGG(city_location), count=COUNT() ; location:geo_point | city_location:geo_point | count:long @@ -683,7 +683,7 @@ required_feature: esql.st_disjoint FROM airports_mp | WHERE ST_DISJOINT(location, city_location) -| STATS location=ST_CENTROID(location), city_location=ST_CENTROID(city_location), count=COUNT() +| STATS location=ST_CENTROID_AGG(location), city_location=ST_CENTROID_AGG(city_location), count=COUNT() ; location:geo_point | city_location:geo_point | count:long @@ -776,7 +776,7 @@ required_feature: esql.st_contains_within FROM airports_mp | WHERE ST_CONTAINS(location, city_location) -| STATS location=ST_CENTROID(location), city_location=ST_CENTROID(city_location), count=COUNT() +| STATS location=ST_CENTROID_AGG(location), city_location=ST_CENTROID_AGG(city_location), count=COUNT() ; location:geo_point | city_location:geo_point | count:long @@ -863,7 +863,7 @@ required_feature: esql.st_contains_within FROM airports_mp | WHERE ST_WITHIN(location, city_location) -| STATS location=ST_CENTROID(location), city_location=ST_CENTROID(city_location), count=COUNT() +| STATS location=ST_CENTROID_AGG(location), city_location=ST_CENTROID_AGG(city_location), count=COUNT() ; location:geo_point | city_location:geo_point | count:long @@ -963,46 +963,46 @@ wkt:keyword |pt:cartesian_point ; centroidCartesianFromStringNested -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg row wkt = "POINT(4297.10986328125 -1475.530029296875)" -| STATS c = ST_CENTROID(TO_CARTESIANPOINT(wkt)); +| STATS c = ST_CENTROID_AGG(TO_CARTESIANPOINT(wkt)); c:cartesian_point POINT(4297.10986328125 -1475.530029296875) ; centroidFromCartesianString1 -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg ROW wkt = ["POINT(4297.10986328125 -1475.530029296875)"] | MV_EXPAND wkt | EVAL pt = TO_CARTESIANPOINT(wkt) -| STATS c = ST_CENTROID(pt); +| STATS c = ST_CENTROID_AGG(pt); c:cartesian_point POINT(4297.10986328125 -1475.530029296875) ; centroidFromCartesianString2 -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg ROW wkt = ["POINT(4297.10986328125 -1475.530029296875)", "POINT(7580.93017578125 2272.77001953125)"] | MV_EXPAND wkt | EVAL pt = TO_CARTESIANPOINT(wkt) -| STATS c = ST_CENTROID(pt); +| STATS c = ST_CENTROID_AGG(pt); c:cartesian_point POINT(5939.02001953125 398.6199951171875) ; centroidFromCartesianString3 -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg ROW wkt = ["POINT(4297.10986328125 -1475.530029296875)", "POINT(7580.93017578125 2272.77001953125)", "POINT(-30.548143003023033 2437.553649504829)"] | MV_EXPAND wkt | EVAL pt = TO_CARTESIANPOINT(wkt) -| STATS c = ST_CENTROID(pt); +| STATS c = ST_CENTROID_AGG(pt); c:cartesian_point POINT(3949.163965353159 1078.2645465797348) @@ -1039,30 +1039,30 @@ ZAH | POINT (6779435.866395892 3436280.545331025) | Zahedan Int'l # Tests for ST_CENTROID on CARTESIAN_POINT type cartesianCentroidFromAirports -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg FROM airports_web -| STATS centroid=ST_CENTROID(location); +| STATS centroid=ST_CENTROID_AGG(location); centroid:cartesian_point POINT(-266681.67563861894 3053301.5120195406) ; cartesianCentroidFromAirportsNested -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg FROM airports_web -| STATS centroid=ST_CENTROID(TO_CARTESIANPOINT(location)); +| STATS centroid=ST_CENTROID_AGG(TO_CARTESIANPOINT(location)); centroid:cartesian_point POINT (-266681.66530554957 3053301.506061676) ; cartesianCentroidFromAirportsCount -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg FROM airports_web -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:cartesian_point | count:long @@ -1070,10 +1070,10 @@ POINT(-266681.67563861894 3053301.5120195406) | 849 ; cartesianCentroidFromAirportsCountGrouped -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg FROM airports_web -| STATS centroid=ST_CENTROID(location), count=COUNT() BY scalerank +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() BY scalerank | SORT scalerank DESC ; @@ -1089,11 +1089,11 @@ POINT(140136.12878224207 3081220.7881944445) | 63 | 2 ; cartesianCentroidFromAirportsFiltered -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg FROM airports_web | WHERE scalerank == 9 -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:cartesian_point | count:long @@ -1101,13 +1101,13 @@ POINT(9289013.153846154 3615537.0533353365) | 26 ; cartesianCentroidFromAirportsFilteredAndSorted -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg FROM airports_web | WHERE scalerank == 9 | SORT abbrev | WHERE length(name) > 12 -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:cartesian_point | count:long @@ -1115,11 +1115,11 @@ POINT(9003597.4375 3429344.0078125) | 8 ; cartesianCentroidFromAirportsCountGroupedCentroid -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg FROM airports_web -| STATS centroid=ST_CENTROID(location), count=COUNT() BY scalerank -| STATS centroid=ST_CENTROID(centroid), count=SUM(count) +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() BY scalerank +| STATS centroid=ST_CENTROID_AGG(centroid), count=SUM(count) ; centroid:cartesian_point | count:long @@ -1134,7 +1134,7 @@ required_feature: esql.st_intersects FROM airports_web | WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))")) -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:cartesian_point | count:long @@ -1233,7 +1233,7 @@ required_feature: esql.st_intersects FROM airports_web | WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("POINT(4783520.559160681 1661010.0197476079)")) -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:cartesian_point | count:long @@ -1245,7 +1245,7 @@ required_feature: esql.st_intersects FROM airports_web | WHERE ST_INTERSECTS(location, TO_CARTESIANPOINT("POINT(4783520.559160681 1661010.0197476079)")) -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:cartesian_point | count:long @@ -1402,7 +1402,7 @@ required_feature: esql.st_contains_within FROM airports_web | WHERE ST_CONTAINS(TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))"), location) -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:cartesian_point | count:long @@ -1439,7 +1439,7 @@ required_feature: esql.st_contains_within FROM airports_web | WHERE ST_CONTAINS(location, TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))")) -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:cartesian_point | count:long @@ -1528,7 +1528,7 @@ required_feature: esql.st_intersects FROM airports_web | WHERE ST_CONTAINS(location, TO_CARTESIANSHAPE("POINT(4783520.559160681 1661010.0197476079)")) -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:cartesian_point | count:long @@ -1540,7 +1540,7 @@ required_feature: esql.st_intersects FROM airports_web | WHERE ST_CONTAINS(location, TO_CARTESIANPOINT("POINT(4783520.559160681 1661010.0197476079)")) -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:cartesian_point | count:long @@ -1555,7 +1555,7 @@ required_feature: esql.st_contains_within FROM airports_web | WHERE ST_WITHIN(location, TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))")) -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:cartesian_point | count:long @@ -1654,7 +1654,7 @@ required_feature: esql.st_intersects FROM airports_web | WHERE ST_WITHIN(location, TO_CARTESIANSHAPE("POINT(4783520.559160681 1661010.0197476079)")) -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:cartesian_point | count:long @@ -1666,7 +1666,7 @@ required_feature: esql.st_intersects FROM airports_web | WHERE ST_WITHIN(location, TO_CARTESIANPOINT("POINT(4783520.559160681 1661010.0197476079)")) -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:cartesian_point | count:long diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index 62688d753aeef..037b76801ca75 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -189,7 +189,7 @@ private FunctionDefinition[][] functions() { def(Now.class, Now::new, "now") }, // spatial new FunctionDefinition[] { - def(SpatialCentroid.class, SpatialCentroid::new, "st_centroid"), + def(SpatialCentroid.class, SpatialCentroid::new, "st_centroid_agg"), def(SpatialContains.class, SpatialContains::new, "st_contains"), def(SpatialDisjoint.class, SpatialDisjoint::new, "st_disjoint"), def(SpatialIntersects.class, SpatialIntersects::new, "st_intersects"), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java index 31c967fc3eee8..192c011c4494b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java @@ -68,7 +68,7 @@ public class EsqlFeatures implements FeatureSpecification { /** * Support for spatial aggregation {@code ST_CENTROID}. Done in #104269. */ - private static final NodeFeature ST_CENTROID = new NodeFeature("esql.st_centroid"); + private static final NodeFeature ST_CENTROID_AGG = new NodeFeature("esql.st_centroid_agg"); /** * Support for spatial aggregation {@code ST_INTERSECTS}. Done in #104907. @@ -111,7 +111,7 @@ public Set getFeatures() { FROM_OPTIONS, SPATIAL_POINTS_FROM_SOURCE, SPATIAL_SHAPES, - ST_CENTROID, + ST_CENTROID_AGG, ST_INTERSECTS, ST_CONTAINS_WITHIN, ST_DISJOINT diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 7de3308fcab16..32e46ee544d07 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -3077,7 +3077,7 @@ public void testIsNotNullConstraintForAliasedExpressions() { public void testSpatialTypesAndStatsUseDocValues() { var plan = planAirports(""" from test - | stats centroid = st_centroid(location) + | stats centroid = st_centroid_agg(location) """); var limit = as(plan, Limit.class); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index fb2362851e43c..f71161d64e130 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -2286,7 +2286,7 @@ public void testPartialAggFoldingOutputForSyntheticAgg() { public void testSpatialTypesAndStatsUseDocValues() { var plan = this.physicalPlan(""" from airports - | stats centroid = st_centroid(location) + | stats centroid = st_centroid_agg(location) """, airports); var limit = as(plan, LimitExec.class); @@ -2343,7 +2343,7 @@ public void testSpatialTypesAndStatsUseDocValues() { public void testSpatialTypesAndStatsUseDocValuesNested() { var plan = this.physicalPlan(""" from airports - | stats centroid = st_centroid(to_geopoint(location)) + | stats centroid = st_centroid_agg(to_geopoint(location)) """, airports); var limit = as(plan, LimitExec.class); @@ -2404,7 +2404,7 @@ public void testSpatialTypesAndStatsUseDocValuesNested() { public void testSpatialTypesAndStatsUseDocValuesNestedLiteral() { var plan = this.physicalPlan(""" row wkt = "POINT(42.97109629958868 14.7552534006536)" - | stats centroid = st_centroid(to_geopoint(wkt)) + | stats centroid = st_centroid_agg(to_geopoint(wkt)) """, airports); var limit = as(plan, LimitExec.class); @@ -2458,7 +2458,7 @@ public void testSpatialTypesAndStatsUseDocValuesNestedLiteral() { public void testSpatialTypesAndStatsUseDocValuesMultiAggregations() { var plan = this.physicalPlan(""" from airports - | stats centroid = st_centroid(location), count = COUNT() + | stats centroid = st_centroid_agg(location), count = COUNT() """, airports); var limit = as(plan, LimitExec.class); @@ -2524,7 +2524,7 @@ public void testSpatialTypesAndStatsUseDocValuesMultiAggregations() { public void testSpatialTypesAndStatsUseDocValuesMultiSpatialAggregations() { var plan = this.physicalPlan(""" FROM airports - | STATS airports=ST_CENTROID(location), cities=ST_CENTROID(city_location), count=COUNT() + | STATS airports=ST_CENTROID_AGG(location), cities=ST_CENTROID_AGG(city_location), count=COUNT() """, airports); var limit = as(plan, LimitExec.class); @@ -2590,7 +2590,7 @@ public void testSpatialTypesAndStatsUseDocValuesMultiAggregationsFiltered() { var plan = this.physicalPlan(""" FROM airports | WHERE scalerank == 9 - | STATS centroid=ST_CENTROID(location), count=COUNT() + | STATS centroid=ST_CENTROID_AGG(location), count=COUNT() """, airports); var limit = as(plan, LimitExec.class); @@ -2657,7 +2657,7 @@ public void testSpatialTypesAndStatsUseDocValuesMultiAggregationsFiltered() { public void testSpatialTypesAndStatsUseDocValuesMultiAggregationsGrouped() { var plan = this.physicalPlan(""" FROM airports - | STATS centroid=ST_CENTROID(location), count=COUNT() BY scalerank + | STATS centroid=ST_CENTROID_AGG(location), count=COUNT() BY scalerank """, airports); var limit = as(plan, LimitExec.class); @@ -2727,8 +2727,8 @@ public void testSpatialTypesAndStatsUseDocValuesMultiAggregationsGrouped() { public void testSpatialTypesAndStatsUseDocValuesMultiAggregationsGroupedAggregated() { var plan = this.physicalPlan(""" FROM airports - | STATS centroid=ST_CENTROID(location), count=COUNT() BY scalerank - | STATS centroid=ST_CENTROID(centroid), count=SUM(count) + | STATS centroid=ST_CENTROID_AGG(location), count=COUNT() BY scalerank + | STATS centroid=ST_CENTROID_AGG(centroid), count=SUM(count) """, airports); var limit = as(plan, LimitExec.class); @@ -2821,7 +2821,7 @@ public void testEnrichBeforeSpatialAggregationSupportsDocValues() { var plan = physicalPlan(""" from airports | enrich city_boundaries ON city_location WITH airport, region, city_boundary - | stats centroid = st_centroid(city_location) + | stats centroid = st_centroid_agg(city_location) """, airports); var limit = as(plan, LimitExec.class); @@ -3049,7 +3049,7 @@ public void testPushDownSpatialRelatesStringToSourceAndUseDocValuesForCentroid() new TestSpatialRelation(ShapeRelation.CONTAINS, airportsWeb, true, true), new TestSpatialRelation(ShapeRelation.CONTAINS, airportsWeb, false, true) }; for (TestSpatialRelation test : tests) { - var centroidExpr = "centroid=ST_CENTROID(location), count=COUNT()"; + var centroidExpr = "centroid=ST_CENTROID_AGG(location), count=COUNT()"; var plan = this.physicalPlan( "FROM " + test.index.index.name() + " | WHERE " + test.predicate() + " | STATS " + centroidExpr, test.index @@ -3152,11 +3152,11 @@ public void testPushSpatialIntersectsStringToSourceAndUseDocValuesForCentroid() for (String query : new String[] { """ FROM airports | WHERE ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) - | STATS centroid=ST_CENTROID(location), count=COUNT() + | STATS centroid=ST_CENTROID_AGG(location), count=COUNT() """, """ FROM airports | WHERE ST_INTERSECTS(TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))"), location) - | STATS centroid=ST_CENTROID(location), count=COUNT() + | STATS centroid=ST_CENTROID_AGG(location), count=COUNT() """ }) { var plan = this.physicalPlan(query, airports); @@ -3253,13 +3253,13 @@ public void testPushSpatialIntersectsStringToSourceCompoundPredicateAndUseDocVal | WHERE scalerank == 9 AND ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) AND type == "mid" - | STATS centroid=ST_CENTROID(location), count=COUNT() + | STATS centroid=ST_CENTROID_AGG(location), count=COUNT() """, """ FROM airports | WHERE scalerank == 9 AND ST_INTERSECTS(TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))"), location) AND type == "mid" - | STATS centroid=ST_CENTROID(location), count=COUNT() + | STATS centroid=ST_CENTROID_AGG(location), count=COUNT() """ }) { var plan = this.physicalPlan(query, airports); @@ -3340,7 +3340,7 @@ public void testIntersectsOnTwoPointFieldAndBothCentroidUsesDocValues() { String query = """ FROM airports | WHERE ST_INTERSECTS(location, city_location) - | STATS location=ST_CENTROID(location), city_location=ST_CENTROID(city_location), count=COUNT() + | STATS location=ST_CENTROID_AGG(location), city_location=ST_CENTROID_AGG(city_location), count=COUNT() """; var plan = this.physicalPlan(query, airports); @@ -3383,11 +3383,11 @@ public void testIntersectsOnTwoPointFieldAndOneCentroidUsesDocValues() { for (String query : new String[] { """ FROM airports | WHERE ST_INTERSECTS(location, city_location) - | STATS location=ST_CENTROID(location), count=COUNT() + | STATS location=ST_CENTROID_AGG(location), count=COUNT() """, """ FROM airports | WHERE ST_INTERSECTS(location, city_location) - | STATS city_location=ST_CENTROID(city_location), count=COUNT() + | STATS city_location=ST_CENTROID_AGG(city_location), count=COUNT() """ }) { var plan = this.physicalPlan(query, airports); @@ -3430,7 +3430,7 @@ public void testTwoIntersectsWithTwoCentroidsUsesDocValues() { FROM airports | WHERE ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) AND ST_INTERSECTS(city_location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) - | STATS location=ST_CENTROID(location), city_location=ST_CENTROID(city_location), count=COUNT() + | STATS location=ST_CENTROID_AGG(location), city_location=ST_CENTROID_AGG(city_location), count=COUNT() """; var plan = this.physicalPlan(query, airports); From afb492272a06431b3a0c7f8785fe7d10e114ca3a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Wed, 10 Apr 2024 17:57:18 +0200 Subject: [PATCH 232/264] [DOCS] Adds HuggingFace example to inference API tutorial (#107298) --- .../inference/put-inference.asciidoc | 15 +++- .../semantic-search-inference.asciidoc | 12 +-- .../infer-api-ingest-pipeline-widget.asciidoc | 17 +++++ .../infer-api-ingest-pipeline.asciidoc | 25 +++++++ .../infer-api-mapping-widget.asciidoc | 16 ++++ .../inference-api/infer-api-mapping.asciidoc | 31 ++++++++ .../infer-api-reindex-widget.asciidoc | 17 +++++ .../inference-api/infer-api-reindex.asciidoc | 23 ++++++ .../infer-api-requirements-widget.asciidoc | 17 +++++ .../infer-api-requirements.asciidoc | 6 ++ .../infer-api-search-widget.asciidoc | 17 +++++ .../inference-api/infer-api-search.asciidoc | 73 +++++++++++++++++++ .../infer-api-task-widget.asciidoc | 17 +++++ .../inference-api/infer-api-task.asciidoc | 30 ++++++++ 14 files changed, 310 insertions(+), 6 deletions(-) diff --git a/docs/reference/inference/put-inference.asciidoc b/docs/reference/inference/put-inference.asciidoc index 7d0ede82f70fa..9f7f6384a7389 100644 --- a/docs/reference/inference/put-inference.asciidoc +++ b/docs/reference/inference/put-inference.asciidoc @@ -346,7 +346,7 @@ Example response: ===== Hugging Face service The following example shows how to create an {infer} endpoint called -`hugging-face_embeddings` to perform a `text_embedding` task type. +`hugging-face-embeddings` to perform a `text_embedding` task type. [source,console] ------------------------------------------------------------ @@ -371,6 +371,19 @@ endpoint URL. Select the model you want to use on the new endpoint creation page task under the Advanced configuration section. Create the endpoint. Copy the URL after the endpoint initialization has been finished. +[discrete] +[[inference-example-hugging-face-supported-models]] +The list of supported models for the Hugging Face service: + +* https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2[all-MiniLM-L6-v2] +* https://huggingface.co/sentence-transformers/all-MiniLM-L12-v2[all-MiniLM-L12-v2] +* https://huggingface.co/sentence-transformers/all-mpnet-base-v2[all-mpnet-base-v2] +* https://huggingface.co/intfloat/e5-base-v2[e5-base-v2] +* https://huggingface.co/intfloat/e5-small-v2[e5-small-v2] +* https://huggingface.co/intfloat/multilingual-e5-base[multilingual-e5-base] +* https://huggingface.co/intfloat/multilingual-e5-small[multilingual-e5-small] + + [discrete] [[inference-example-eland]] ===== Models uploaded by Eland via the elasticsearch service diff --git a/docs/reference/search/search-your-data/semantic-search-inference.asciidoc b/docs/reference/search/search-your-data/semantic-search-inference.asciidoc index b5619f8dda7b9..53abf0f0458af 100644 --- a/docs/reference/search/search-your-data/semantic-search-inference.asciidoc +++ b/docs/reference/search/search-your-data/semantic-search-inference.asciidoc @@ -6,9 +6,11 @@ The instructions in this tutorial shows you how to use the {infer} API with various services to perform semantic search on your data. The following examples -use Cohere's `embed-english-v3.0` model and OpenAI's `text-embedding-ada-002` -second generation embedding model. You can use any Cohere and OpenAI models, -they are all supported by the {infer} API. +use Cohere's `embed-english-v3.0` model, the `all-mpnet-base-v2` model from +HuggingFace, and OpenAI's `text-embedding-ada-002` second generation embedding +model. You can use any Cohere and OpenAI models, they are all supported by the +{infer} API. For a list of supported models available on HuggingFace, refer to +<>. Click the name of the service you want to use on any of the widgets below to review the corresponding instructions. @@ -91,7 +93,7 @@ GET _tasks/ // TEST[skip:TBD] You can also cancel the reindexing process if you don't want to wait until the -reindexing process is fully complete which might take hours: +reindexing process is fully complete which might take hours for large data sets: [source,console] ---- @@ -104,7 +106,7 @@ POST _tasks//_cancel [[infer-semantic-search]] ==== Semantic search -After the dataset has been enriched with the embeddings, you can query the data +After the data set has been enriched with the embeddings, you can query the data using {ref}/knn-search.html#knn-semantic-search[semantic search]. Pass a `query_vector_builder` to the k-nearest neighbor (kNN) vector search API, and provide the query text and the model you have used to create the embeddings. diff --git a/docs/reference/tab-widgets/inference-api/infer-api-ingest-pipeline-widget.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-ingest-pipeline-widget.asciidoc index 44d2f60966caa..069dcb61f81b0 100644 --- a/docs/reference/tab-widgets/inference-api/infer-api-ingest-pipeline-widget.asciidoc +++ b/docs/reference/tab-widgets/inference-api/infer-api-ingest-pipeline-widget.asciidoc @@ -7,6 +7,12 @@ id="infer-api-ingest-cohere"> Cohere + + + + + +