From 6e3608992c1d8fc28b041fda5777532e7bad9251 Mon Sep 17 00:00:00 2001 From: Joe Reuter Date: Wed, 3 Apr 2024 17:04:24 +0200 Subject: [PATCH 001/173] Add non-indexed fields to ecs templates (#106714) * add non-indexed fields to ecs templates * update * Update docs/changelog/106714.yaml * Adjusting ECS tests to verify proper index and doc_values settings * review comment --------- Co-authored-by: eyalkoren <41850454+eyalkoren@users.noreply.github.com> --- docs/changelog/106714.yaml | 5 ++ .../src/main/resources/ecs@mappings.json | 24 ++++++ .../xpack/stack/EcsDynamicTemplatesIT.java | 78 ++++++++++++------- .../xpack/stack/StackTemplateRegistry.java | 2 +- 4 files changed, 81 insertions(+), 28 deletions(-) create mode 100644 docs/changelog/106714.yaml diff --git a/docs/changelog/106714.yaml b/docs/changelog/106714.yaml new file mode 100644 index 0000000000000..65b0acd77d764 --- /dev/null +++ b/docs/changelog/106714.yaml @@ -0,0 +1,5 @@ +pr: 106714 +summary: Add non-indexed fields to ecs templates +area: Data streams +type: bug +issues: [] diff --git a/x-pack/plugin/core/template-resources/src/main/resources/ecs@mappings.json b/x-pack/plugin/core/template-resources/src/main/resources/ecs@mappings.json index 7eaf37ba1d95e..3eae6c1fa4f5a 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/ecs@mappings.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/ecs@mappings.json @@ -23,6 +23,30 @@ "unmatch_mapping_type": "object" } }, + { + "ecs_non_indexed_keyword": { + "mapping": { + "type": "keyword", + "index": false, + "doc_values": false + }, + "path_match": [ + "event.original" + ] + } + }, + { + "ecs_non_indexed_long": { + "mapping": { + "type": "long", + "index": false, + "doc_values": false + }, + "path_match": [ + "*.x509.public_key_exponent" + ] + } + }, { "ecs_ip": { "mapping": { diff --git a/x-pack/plugin/stack/src/javaRestTest/java/org/elasticsearch/xpack/stack/EcsDynamicTemplatesIT.java b/x-pack/plugin/stack/src/javaRestTest/java/org/elasticsearch/xpack/stack/EcsDynamicTemplatesIT.java index 09e9a6090c485..8bdf7b30a9997 100644 --- a/x-pack/plugin/stack/src/javaRestTest/java/org/elasticsearch/xpack/stack/EcsDynamicTemplatesIT.java +++ b/x-pack/plugin/stack/src/javaRestTest/java/org/elasticsearch/xpack/stack/EcsDynamicTemplatesIT.java @@ -191,6 +191,11 @@ public void testNumericMessage() throws IOException { verifyEcsMappings(indexName); } + private void assertType(String expectedType, Map actualMappings) throws IOException { + assertNotNull("expected to get non-null mappings for field", actualMappings); + assertEquals(expectedType, actualMappings.get("type")); + } + public void testUsage() throws IOException { String indexName = "test-usage"; createTestIndex(indexName); @@ -205,13 +210,13 @@ public void testUsage() throws IOException { indexDocument(indexName, fieldsMap); final Map rawMappings = getMappings(indexName); - final Map flatFieldMappings = new HashMap<>(); + final Map> flatFieldMappings = new HashMap<>(); processRawMappingsSubtree(rawMappings, flatFieldMappings, new HashMap<>(), ""); - assertEquals("scaled_float", flatFieldMappings.get("host.cpu.usage")); - assertEquals("scaled_float", flatFieldMappings.get("string.usage")); - assertEquals("long", flatFieldMappings.get("usage")); - assertEquals("long", flatFieldMappings.get("root.usage.long")); - assertEquals("float", flatFieldMappings.get("root.usage.float")); + assertType("scaled_float", flatFieldMappings.get("host.cpu.usage")); + assertType("scaled_float", flatFieldMappings.get("string.usage")); + assertType("long", flatFieldMappings.get("usage")); + assertType("long", flatFieldMappings.get("root.usage.long")); + assertType("float", flatFieldMappings.get("root.usage.float")); } public void testOnlyMatchLeafFields() throws IOException { @@ -230,16 +235,16 @@ public void testOnlyMatchLeafFields() throws IOException { indexDocument(indexName, fieldsMap); final Map rawMappings = getMappings(indexName); - final Map flatFieldMappings = new HashMap<>(); + final Map> flatFieldMappings = new HashMap<>(); processRawMappingsSubtree(rawMappings, flatFieldMappings, new HashMap<>(), ""); - assertEquals("long", flatFieldMappings.get("foo.message.bar")); - assertEquals("long", flatFieldMappings.get("foo.url.path.bar")); - assertEquals("long", flatFieldMappings.get("foo.url.full.bar")); - assertEquals("long", flatFieldMappings.get("foo.stack_trace.bar")); - assertEquals("long", flatFieldMappings.get("foo.user_agent.original.bar")); - assertEquals("long", flatFieldMappings.get("foo.created.bar")); - assertEquals("float", flatFieldMappings.get("foo._score.bar")); - assertEquals("long", flatFieldMappings.get("foo.structured_data")); + assertType("long", flatFieldMappings.get("foo.message.bar")); + assertType("long", flatFieldMappings.get("foo.url.path.bar")); + assertType("long", flatFieldMappings.get("foo.url.full.bar")); + assertType("long", flatFieldMappings.get("foo.stack_trace.bar")); + assertType("long", flatFieldMappings.get("foo.user_agent.original.bar")); + assertType("long", flatFieldMappings.get("foo.created.bar")); + assertType("float", flatFieldMappings.get("foo._score.bar")); + assertType("long", flatFieldMappings.get("foo.structured_data")); } private static void indexDocument(String indexName, Map flattenedFieldsMap) throws IOException { @@ -364,28 +369,26 @@ private Map getMappings(String indexName) throws IOException { private void processRawMappingsSubtree( final Map fieldSubtrees, - final Map flatFieldMappings, - final Map flatMultiFieldsMappings, + final Map> flatFieldMappings, + final Map> flatMultiFieldsMappings, final String subtreePrefix ) { fieldSubtrees.forEach((fieldName, fieldMappings) -> { String fieldFullPath = subtreePrefix + fieldName; Map fieldMappingsMap = ((Map) fieldMappings); - String type = (String) fieldMappingsMap.get("type"); - if (type != null) { - flatFieldMappings.put(fieldFullPath, type); + if (fieldMappingsMap.get("type") != null) { + flatFieldMappings.put(fieldFullPath, fieldMappingsMap); } Map subfields = (Map) fieldMappingsMap.get("properties"); if (subfields != null) { processRawMappingsSubtree(subfields, flatFieldMappings, flatMultiFieldsMappings, fieldFullPath + "."); } - Map> fields = (Map>) fieldMappingsMap.get("fields"); + Map> fields = (Map>) fieldMappingsMap.get("fields"); if (fields != null) { fields.forEach((subFieldName, multiFieldMappings) -> { String subFieldFullPath = fieldFullPath + "." + subFieldName; - String subFieldType = Objects.requireNonNull(multiFieldMappings.get("type")); - flatMultiFieldsMappings.put(subFieldFullPath, subFieldType); + flatMultiFieldsMappings.put(subFieldFullPath, multiFieldMappings); }); } }); @@ -393,34 +396,44 @@ private void processRawMappingsSubtree( private void verifyEcsMappings(String indexName) throws IOException { final Map rawMappings = getMappings(indexName); - final Map flatFieldMappings = new HashMap<>(); - final Map flatMultiFieldsMappings = new HashMap<>(); + final Map> flatFieldMappings = new HashMap<>(); + final Map> flatMultiFieldsMappings = new HashMap<>(); processRawMappingsSubtree(rawMappings, flatFieldMappings, flatMultiFieldsMappings, ""); Map> shallowFieldMapCopy = new HashMap<>(ecsFlatFieldDefinitions); logger.info("Testing mapping of {} ECS fields", shallowFieldMapCopy.size()); List nonEcsFields = new ArrayList<>(); Map fieldToWrongMappingType = new HashMap<>(); - flatFieldMappings.forEach((fieldName, actualMappingType) -> { + List wronglyIndexedFields = new ArrayList<>(); + List wronglyDocValuedFields = new ArrayList<>(); + flatFieldMappings.forEach((fieldName, actualMappings) -> { Map expectedMappings = shallowFieldMapCopy.remove(fieldName); if (expectedMappings == null) { nonEcsFields.add(fieldName); } else { String expectedType = (String) expectedMappings.get("type"); + String actualMappingType = (String) actualMappings.get("type"); if (actualMappingType.equals(expectedType) == false) { fieldToWrongMappingType.put(fieldName, actualMappingType); } + if (expectedMappings.get("index") != actualMappings.get("index")) { + wronglyIndexedFields.add(fieldName); + } + if (expectedMappings.get("doc_values") != actualMappings.get("doc_values")) { + wronglyDocValuedFields.add(fieldName); + } } }); Map shallowMultiFieldMapCopy = new HashMap<>(ecsFlatMultiFieldDefinitions); logger.info("Testing mapping of {} ECS multi-fields", shallowMultiFieldMapCopy.size()); - flatMultiFieldsMappings.forEach((fieldName, actualMappingType) -> { + flatMultiFieldsMappings.forEach((fieldName, actualMappings) -> { String expectedType = shallowMultiFieldMapCopy.remove(fieldName); if (expectedType != null) { // not finding an entry in the expected multi-field mappings map is acceptable: our dynamic templates are required to // ensure multi-field mapping for all fields with such ECS definitions. However, the patterns in these templates may lead // to multi-field mapping for ECS fields for which such are not defined + String actualMappingType = (String) actualMappings.get("type"); if (actualMappingType.equals(expectedType) == false) { fieldToWrongMappingType.put(fieldName, actualMappingType); } @@ -457,6 +470,8 @@ private void verifyEcsMappings(String indexName) throws IOException { ); }); nonEcsFields.forEach(field -> logger.error("The test document contains '{}', which is not an ECS field", field)); + wronglyIndexedFields.forEach(fieldName -> logger.error("ECS field '{}' should be mapped with \"index: false\"", fieldName)); + wronglyDocValuedFields.forEach(fieldName -> logger.error("ECS field '{}' should be mapped with \"doc_values: false\"", fieldName)); assertTrue("ECS is not fully covered by the current ECS dynamic templates, see details above", shallowFieldMapCopy.isEmpty()); assertTrue( @@ -468,5 +483,14 @@ private void verifyEcsMappings(String indexName) throws IOException { fieldToWrongMappingType.isEmpty() ); assertTrue("The test document contains non-ECS fields, see details above", nonEcsFields.isEmpty()); + assertTrue( + "At least one field was not mapped with \"index: false\" as it should according to its ECS definitions, see details above", + wronglyIndexedFields.isEmpty() + ); + assertTrue( + "At least one field was not mapped with \"doc_values: false\" as it should according to its ECS definitions, see " + + "details above", + wronglyDocValuedFields.isEmpty() + ); } } diff --git a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java index b21e8c0c15811..3930cfe6cd941 100644 --- a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java +++ b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java @@ -47,7 +47,7 @@ public class StackTemplateRegistry extends IndexTemplateRegistry { // The stack template registry version. This number must be incremented when we make changes // to built-in templates. - public static final int REGISTRY_VERSION = 8; + public static final int REGISTRY_VERSION = 9; public static final String TEMPLATE_VERSION_VARIABLE = "xpack.stack.template.version"; public static final Setting STACK_TEMPLATES_ENABLED = Setting.boolSetting( From 755226df23066b0bcd70a06a6ae6e6f340daddd6 Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Wed, 3 Apr 2024 18:09:17 +0300 Subject: [PATCH 002/173] [TEST] Add full cluster restart test for downsampling (#107053) This is a copy of the test added for rolling upgrade in #107036 --- .../FullClusterRestartDownsampleIT.java | 284 ++++++++++++++++++ 1 file changed, 284 insertions(+) create mode 100644 qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartDownsampleIT.java diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartDownsampleIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartDownsampleIT.java new file mode 100644 index 0000000000000..b171c6e6f0358 --- /dev/null +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartDownsampleIT.java @@ -0,0 +1,284 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.upgrades; + +import com.carrotsearch.randomizedtesting.annotations.Name; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.common.Strings; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.FeatureFlag; +import org.elasticsearch.test.cluster.local.LocalClusterConfigProvider; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.rest.RestTestLegacyFeatures; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TemporaryFolder; +import org.junit.rules.TestRule; + +import java.io.IOException; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static org.hamcrest.Matchers.equalTo; + +public class FullClusterRestartDownsampleIT extends ParameterizedFullClusterRestartTestCase { + + private static final String FIXED_INTERVAL = "1h"; + private String index; + private String policy; + private String dataStream; + + private static TemporaryFolder repoDirectory = new TemporaryFolder(); + + protected static LocalClusterConfigProvider clusterConfig = c -> {}; + + private static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .version(getOldClusterTestVersion()) + .nodes(2) + .setting("xpack.security.enabled", "false") + .setting("indices.lifecycle.poll_interval", "5s") + .apply(() -> clusterConfig) + .feature(FeatureFlag.TIME_SERIES_MODE) + .feature(FeatureFlag.FAILURE_STORE_ENABLED) + .build(); + + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(repoDirectory).around(cluster); + + public FullClusterRestartDownsampleIT(@Name("cluster") FullClusterRestartUpgradeStatus upgradeStatus) { + super(upgradeStatus); + } + + @Override + protected ElasticsearchCluster getUpgradeCluster() { + return cluster; + } + + private static final String POLICY = """ + { + "policy": { + "phases": { + "hot": { + "actions": { + "rollover" : { + "max_age": "30s" + }, + "downsample": { + "fixed_interval": "$interval" + } + } + } + } + } + } + """; + + private static final String TEMPLATE = """ + { + "index_patterns": ["%s*"], + "template": { + "settings":{ + "index": { + "number_of_replicas": 0, + "number_of_shards": 1, + "time_series": { + "start_time": "2010-01-01T00:00:00.000Z", + "end_time": "2022-01-01T00:00:00.000Z" + }, + "routing_path": ["metricset"], + "mode": "time_series", + "look_ahead_time": "1m", + "lifecycle.name": "%s" + } + }, + "mappings":{ + "properties": { + "@timestamp" : { + "type": "date" + }, + "metricset": { + "type": "keyword", + "time_series_dimension": true + }, + "volume": { + "type": "double", + "time_series_metric": "gauge" + } + } + } + }, + "data_stream": { } + }"""; + + private static final String TEMPLATE_NO_TIME_BOUNDARIES = """ + { + "index_patterns": ["%s*"], + "template": { + "settings":{ + "index": { + "number_of_replicas": 0, + "number_of_shards": 1, + "routing_path": ["metricset"], + "mode": "time_series", + "lifecycle.name": "%s" + } + }, + "mappings":{ + "properties": { + "@timestamp" : { + "type": "date" + }, + "metricset": { + "type": "keyword", + "time_series_dimension": true + }, + "volume": { + "type": "double", + "time_series_metric": "gauge" + } + } + } + }, + "data_stream": { } + }"""; + + private static final String BULK = """ + {"create": {}} + {"@timestamp": "2020-01-01T05:10:00Z", "metricset": "pod", "volume" : 10} + {"create": {}} + {"@timestamp": "2020-01-01T05:20:00Z", "metricset": "pod", "volume" : 20} + {"create": {}} + {"@timestamp": "2020-01-01T05:30:00Z", "metricset": "pod", "volume" : 30} + {"create": {}} + {"@timestamp": "2020-01-01T05:40:00Z", "metricset": "pod", "volume" : 40} + {"create": {}} + {"@timestamp": "2020-01-01T06:10:00Z", "metricset": "pod", "volume" : 50} + {"create": {}} + {"@timestamp": "2020-01-01T07:10:00Z", "metricset": "pod", "volume" : 60} + {"create": {}} + {"@timestamp": "2020-01-01T09:10:00Z", "metricset": "pod", "volume" : 70} + {"create": {}} + {"@timestamp": "2020-01-01T09:20:00Z", "metricset": "pod", "volume" : 80} + """; + + @Before + public void refreshAbstractions() { + policy = "policy-" + randomAlphaOfLength(5); + dataStream = "ds-" + randomAlphaOfLength(10).toLowerCase(Locale.ROOT); + index = ".ds-" + dataStream; + logger.info("--> running [{}] with index [{}], data stream [{}], and policy [{}]", getTestName(), index, dataStream, policy); + } + + private void createIndex() throws IOException { + var putIndexTemplateRequest = new Request("POST", "/_index_template/1"); + putIndexTemplateRequest.setJsonEntity(Strings.format(TEMPLATE, dataStream, policy)); + assertOK(client().performRequest(putIndexTemplateRequest)); + } + + private void bulk() throws IOException { + var bulkRequest = new Request("POST", "/" + dataStream + "/_bulk"); + bulkRequest.setJsonEntity(BULK); + bulkRequest.addParameter("refresh", "true"); + var response = client().performRequest(bulkRequest); + assertOK(response); + var responseBody = entityAsMap(response); + assertThat("errors in response:\n " + responseBody, responseBody.get("errors"), equalTo(false)); + } + + private void createIlmPolicy() throws IOException { + Request request = new Request("PUT", "_ilm/policy/" + policy); + request.setJsonEntity(POLICY.replace("$interval", FIXED_INTERVAL)); + client().performRequest(request); + } + + private void startDownsampling() throws Exception { + // Update template to not contain time boundaries anymore (rollover is blocked otherwise due to index time + // boundaries overlapping after rollover) + Request updateIndexTemplateRequest = new Request("POST", "/_index_template/1"); + updateIndexTemplateRequest.setJsonEntity(Strings.format(TEMPLATE_NO_TIME_BOUNDARIES, dataStream, policy)); + assertOK(client().performRequest(updateIndexTemplateRequest)); + + // Manual rollover the original index such that it's not the write index in the data stream anymore + Request rolloverRequest = new Request("POST", "/" + dataStream + "/_rollover"); + rolloverRequest.setJsonEntity(""" + { + "conditions": { + "max_docs": "1" + } + }"""); + client().performRequest(rolloverRequest); + logger.info("rollover complete"); + } + + private void runQuery() throws Exception { + String rollup = waitAndGetRollupIndexName(); + assertFalse(rollup.isEmpty()); + + // Retry until the downsample index is populated. + assertBusy(() -> { + Request request = new Request("POST", "/" + dataStream + "/_search"); + var map = entityAsMap(client().performRequest(request)); + var hits = (List) ((Map) map.get("hits")).get("hits"); + assertEquals(4, hits.size()); + for (var hit : hits) { + assertEquals(rollup, ((Map) hit).get("_index")); + } + }, 30, TimeUnit.SECONDS); + } + + private String waitAndGetRollupIndexName() throws InterruptedException, IOException { + final String[] rollupIndexName = new String[1]; + waitUntil(() -> { + try { + rollupIndexName[0] = getRollupIndexName(); + return rollupIndexName[0] != null; + } catch (IOException e) { + return false; + } + }, 120, TimeUnit.SECONDS); + if (rollupIndexName[0] == null) { + logger.warn("--> rollup index name is NULL"); + } else { + logger.info("--> original index name is [{}], rollup index name is [{}]", index, rollupIndexName[0]); + } + return rollupIndexName[0]; + } + + private String getRollupIndexName() throws IOException { + String endpoint = "/downsample-" + FIXED_INTERVAL + "-" + index + "-*/?expand_wildcards=all"; + Response response = client().performRequest(new Request("GET", endpoint)); + Map asMap = responseAsMap(response); + if (asMap.size() == 1) { + return (String) asMap.keySet().toArray()[0]; + } + logger.warn("--> No matching rollup name for path [%s]", endpoint); + return null; + } + + public void testRollupIndex() throws Exception { + assumeTrue( + "Downsample got many stability improvements in 8.10.0", + oldClusterHasFeature(RestTestLegacyFeatures.TSDB_DOWNSAMPLING_STABLE) + ); + if (isRunningAgainstOldCluster()) { + createIlmPolicy(); + createIndex(); + bulk(); + startDownsampling(); + } else { + runQuery(); + } + } +} From 36bcb6b3989a0bc54c8a0d4a68c633c977dd092c Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Wed, 3 Apr 2024 18:33:14 +0300 Subject: [PATCH 003/173] Query API Keys support for both `aggs` and `aggregations` keywords (#107054) The Query API Key Information endpoint supports aggs since #104895. But some lang clients actually use the `aggregations` keyword in requests, as the preferred synonym to `aggs`. This PR adds support for the `aggregations` request keyword as a synonym for the existing `aggs` term. Closes #106839 --- docs/changelog/107054.yaml | 6 ++ .../rest-api/security/query-api-key.asciidoc | 2 +- .../xpack/security/ApiKeyAggsIT.java | 8 +-- .../action/apikey/RestQueryApiKeyAction.java | 28 +++++--- .../apikey/RestQueryApiKeyActionTests.java | 66 +++++++++++++++++++ 5 files changed, 96 insertions(+), 14 deletions(-) create mode 100644 docs/changelog/107054.yaml diff --git a/docs/changelog/107054.yaml b/docs/changelog/107054.yaml new file mode 100644 index 0000000000000..6511cb5185492 --- /dev/null +++ b/docs/changelog/107054.yaml @@ -0,0 +1,6 @@ +pr: 107054 +summary: Query API Keys support for both `aggs` and `aggregations` keywords +area: Security +type: enhancement +issues: + - 106839 diff --git a/docs/reference/rest-api/security/query-api-key.asciidoc b/docs/reference/rest-api/security/query-api-key.asciidoc index 1888a110e072f..ad4184ec34a29 100644 --- a/docs/reference/rest-api/security/query-api-key.asciidoc +++ b/docs/reference/rest-api/security/query-api-key.asciidoc @@ -232,7 +232,7 @@ simply mentioning `metadata` (not followed by any dot and sub-field name). NOTE: You cannot query the role descriptors of an API key. ==== -`aggs`:: +`aggs` or `aggregations`:: (Optional, object) Any <> to run over the corpus of returned API keys. Aggregations and queries work together. Aggregations are computed only on the API keys that match the query. This supports only a subset of aggregation types, namely: <>, diff --git a/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/ApiKeyAggsIT.java b/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/ApiKeyAggsIT.java index 427d918fd64d5..f9d5c42affcf0 100644 --- a/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/ApiKeyAggsIT.java +++ b/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/ApiKeyAggsIT.java @@ -98,7 +98,7 @@ public void testFiltersAggs() throws IOException { // other bucket assertAggs(API_KEY_USER_AUTH_HEADER, typedAggs, """ { - "aggs": { + "aggregations": { "only_user_keys": { "filters": { "other_bucket_key": "other_user_keys", @@ -267,7 +267,7 @@ public void testFiltersAggs() throws IOException { "good-api-key-invalidated": { "term": {"invalidated": false}} } }, - "aggs": { + "aggregations": { "wrong-field": { "filters": { "filters": { @@ -487,7 +487,7 @@ public void testFilterAggs() throws IOException { { "usernames": { "terms": { "field": "username" } } } ] }, - "aggs": { + "aggregations": { "not_expired": { "filter": { "range": { @@ -564,7 +564,7 @@ public void testDisallowedAggTypes() { ); request.setJsonEntity(""" { - "aggs": { + "aggregations": { "all_.security_docs": { "global": {}, "aggs": { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyAction.java index 77c2a080dbb57..59992e42d88d5 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyAction.java @@ -36,6 +36,8 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.search.aggregations.AggregatorFactories.parseAggregators; +import static org.elasticsearch.search.builder.SearchSourceBuilder.AGGREGATIONS_FIELD; +import static org.elasticsearch.search.builder.SearchSourceBuilder.AGGS_FIELD; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; /** @@ -47,19 +49,27 @@ public final class RestQueryApiKeyAction extends ApiKeyBaseRestHandler { @SuppressWarnings("unchecked") private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "query_api_key_request_payload", - a -> new Payload( - (QueryBuilder) a[0], - (AggregatorFactories.Builder) a[1], - (Integer) a[2], - (Integer) a[3], - (List) a[4], - (SearchAfterBuilder) a[5] - ) + a -> { + if (a[1] != null && a[2] != null) { + throw new IllegalArgumentException("Duplicate 'aggs' or 'aggregations' field"); + } else { + return new Payload( + (QueryBuilder) a[0], + (AggregatorFactories.Builder) (a[1] != null ? a[1] : a[2]), + (Integer) a[3], + (Integer) a[4], + (List) a[5], + (SearchAfterBuilder) a[6] + ); + } + } ); static { PARSER.declareObject(optionalConstructorArg(), (p, c) -> parseTopLevelQuery(p), new ParseField("query")); - PARSER.declareObject(optionalConstructorArg(), (p, c) -> parseAggregators(p), new ParseField("aggs")); + // only one of aggs or aggregations is allowed + PARSER.declareObject(optionalConstructorArg(), (p, c) -> parseAggregators(p), AGGREGATIONS_FIELD); + PARSER.declareObject(optionalConstructorArg(), (p, c) -> parseAggregators(p), AGGS_FIELD); PARSER.declareInt(optionalConstructorArg(), new ParseField("from")); PARSER.declareInt(optionalConstructorArg(), new ParseField("size")); PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyActionTests.java index 74d1203fd52ec..2240b72c1a963 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyActionTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; @@ -35,6 +36,7 @@ import org.elasticsearch.test.rest.FakeRestRequest; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xcontent.XContentParseException; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.security.action.apikey.ApiKey; @@ -48,6 +50,7 @@ import java.util.Map; import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; @@ -145,6 +148,69 @@ public void doE assertNotNull(responseSetOnce.get()); } + public void testAggsAndAggregationsTogether() { + String agg1; + String agg2; + if (randomBoolean()) { + agg1 = "aggs"; + agg2 = "aggregations"; + } else { + agg1 = "aggregations"; + agg2 = "aggs"; + } + final String requestBody = Strings.format(""" + { + "%s": { + "all_keys_by_type": { + "composite": { + "sources": [ + { "type": { "terms": { "field": "type" } } } + ] + } + } + }, + "%s": { + "type_cardinality": { + "cardinality": { + "field": "type" + } + } + } + }""", agg1, agg2); + + final FakeRestRequest restRequest = new FakeRestRequest.Builder(xContentRegistry()).withContent( + new BytesArray(requestBody), + XContentType.JSON + ).build(); + final SetOnce responseSetOnce = new SetOnce<>(); + final RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { + @Override + public void sendResponse(RestResponse restResponse) { + responseSetOnce.set(restResponse); + } + }; + final var client = new NodeClient(Settings.EMPTY, threadPool) { + @SuppressWarnings("unchecked") + @Override + public void doExecute( + ActionType action, + Request request, + ActionListener listener + ) { + fail("TEST failed, request parsing should've failed"); + listener.onResponse((Response) QueryApiKeyResponse.EMPTY); + } + }; + RestQueryApiKeyAction restQueryApiKeyAction = new RestQueryApiKeyAction(Settings.EMPTY, mockLicenseState); + XContentParseException ex = expectThrows( + XContentParseException.class, + () -> restQueryApiKeyAction.handleRequest(restRequest, restChannel, client) + ); + assertThat(ex.getCause().getMessage(), containsString("Duplicate 'aggs' or 'aggregations' field")); + assertThat(ex.getMessage(), containsString("Failed to build [query_api_key_request_payload]")); + assertNull(responseSetOnce.get()); + } + public void testParsingSearchParameters() throws Exception { final String requestBody = """ { From 245d69d9e6cc1340fdd4544ab909d06d4d11ddc6 Mon Sep 17 00:00:00 2001 From: Ash <1849116+ashokaditya@users.noreply.github.com> Date: Wed, 3 Apr 2024 17:53:38 +0200 Subject: [PATCH 004/173] [Security Solution] Add `read` permission for third party agent indices for `kibana_system` (#107046) * add read permission for third party agent indices Allow `read` privilege for `kibana_system` role on `logs-sentinel_one*` and `logs-crowdstrike*` index patterns closes elastic/security-team/issues/9046 * Update docs/changelog/107046.yaml * review changes --- docs/changelog/107046.yaml | 6 ++++ .../KibanaOwnedReservedRoleDescriptors.java | 6 ++++ .../authz/store/ReservedRolesStoreTests.java | 31 +++++++++++++++++++ 3 files changed, 43 insertions(+) create mode 100644 docs/changelog/107046.yaml diff --git a/docs/changelog/107046.yaml b/docs/changelog/107046.yaml new file mode 100644 index 0000000000000..6c1373e09d17c --- /dev/null +++ b/docs/changelog/107046.yaml @@ -0,0 +1,6 @@ +pr: 107046 +summary: "[Security Solution] Add `read` permission for third party agent indices\ + \ for `kibana_system`" +area: Authorization +type: enhancement +issues: [] diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java index 3c76734b794d8..cdb7f44d41e4a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java @@ -272,6 +272,12 @@ static RoleDescriptor kibanaSystem(String name) { .indices(".logs-osquery_manager.actions-*") .privileges("auto_configure", "create_index", "read", "index", "write", "delete") .build(), + + // Third party agent (that use non-Elastic Defend integrations) info logs indices. + // Kibana reads from these to display agent status/info to the user. + // These are indices that filebeat writes to, and the data in these indices are ingested by Fleet integrations + // in order to provide support for response actions related to malicious events for such agents. + RoleDescriptor.IndicesPrivileges.builder().indices("logs-sentinel_one.*", "logs-crowdstrike.*").privileges("read").build(), // For ILM policy for APM, Endpoint, & Synthetics packages that have delete action RoleDescriptor.IndicesPrivileges.builder() .indices( diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java index b0d25949947e3..39a94e4a2f0bf 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java @@ -993,6 +993,37 @@ public void testKibanaSystemRole() { assertThat(kibanaRole.indices().allowedIndicesMatcher(RolloverAction.NAME).test(indexAbstraction), is(true)); }); + // Tests for third-party agent indices that `kibana_system` has only `read` access + Arrays.asList( + "logs-sentinel_one." + randomAlphaOfLength(randomIntBetween(0, 13)), + "logs-crowdstrike." + randomAlphaOfLength(randomIntBetween(0, 13)) + ).forEach((index) -> { + final IndexAbstraction indexAbstraction = mockIndexAbstraction(index); + assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:foo").test(indexAbstraction), is(false)); + assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:bar").test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(indexAbstraction), + is(false) + ); + assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(indexAbstraction), is(true)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), + is(false) + ); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(false)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(indexAbstraction), is(false)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportMultiSearchAction.TYPE.name()).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportGetAction.TYPE.name()).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(READ_CROSS_CLUSTER_NAME).test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportUpdateSettingsAction.TYPE.name()).test(indexAbstraction), + is(true) + ); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportPutMappingAction.TYPE.name()).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(RolloverAction.NAME).test(indexAbstraction), is(true)); + }); + // Index for Endpoint specific actions Arrays.asList(".logs-endpoint.actions-" + randomAlphaOfLength(randomIntBetween(0, 13))).forEach((index) -> { final IndexAbstraction indexAbstraction = mockIndexAbstraction(index); From 2f33b012ba508c6c46137f69bcc102f1b08adced Mon Sep 17 00:00:00 2001 From: James Baiera Date: Wed, 3 Apr 2024 12:08:48 -0400 Subject: [PATCH 005/173] Split the mappings for failure stores out of the index template service. (#107025) This will help us keep failure store specific configurations in one place, and hopefully make it easier to evolve the schema in the future. --- .../DataStreamFailureStoreDefinition.java | 134 ++++++++++++++++++ .../MetadataIndexTemplateService.java | 111 +-------------- 2 files changed, 138 insertions(+), 107 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamFailureStoreDefinition.java diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamFailureStoreDefinition.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamFailureStoreDefinition.java new file mode 100644 index 0000000000000..f1fc107df5f62 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamFailureStoreDefinition.java @@ -0,0 +1,134 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.cluster.metadata; + +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.RoutingFieldMapper; + +import java.io.IOException; + +/** + * A utility class that contains the mappings and settings logic for failure store indices that are a part of data streams. + */ +public class DataStreamFailureStoreDefinition { + + public static final CompressedXContent DATA_STREAM_FAILURE_STORE_MAPPING; + + static { + try { + /* + * The data stream failure store mapping. The JSON content is as follows: + * { + * "_doc": { + * "dynamic": false, + * "_routing": { + * "required": false + * }, + * "properties": { + * "@timestamp": { + * "type": "date", + * "ignore_malformed": false + * }, + * "document": { + * "properties": { + * "id": { + * "type": "keyword" + * }, + * "routing": { + * "type": "keyword" + * }, + * "index": { + * "type": "keyword" + * } + * } + * }, + * "error": { + * "properties": { + * "message": { + * "type": "wildcard" + * }, + * "stack_trace": { + * "type": "text" + * }, + * "type": { + * "type": "keyword" + * }, + * "pipeline": { + * "type": "keyword" + * }, + * "pipeline_trace": { + * "type": "keyword" + * }, + * "processor": { + * "type": "keyword" + * } + * } + * } + * } + * } + * } + */ + DATA_STREAM_FAILURE_STORE_MAPPING = new CompressedXContent( + (builder, params) -> builder.startObject(MapperService.SINGLE_MAPPING_NAME) + .field("dynamic", false) + .startObject(RoutingFieldMapper.NAME) + .field("required", false) + .endObject() + .startObject("properties") + .startObject(MetadataIndexTemplateService.DEFAULT_TIMESTAMP_FIELD) + .field("type", DateFieldMapper.CONTENT_TYPE) + .field("ignore_malformed", false) + .endObject() + .startObject("document") + .startObject("properties") + // document.source is unmapped so that it can be persisted in source only without worrying that the document might cause + // a mapping error + .startObject("id") + .field("type", "keyword") + .endObject() + .startObject("routing") + .field("type", "keyword") + .endObject() + .startObject("index") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + .startObject("error") + .startObject("properties") + .startObject("message") + .field("type", "wildcard") + .endObject() + .startObject("stack_trace") + .field("type", "text") + .endObject() + .startObject("type") + .field("type", "keyword") + .endObject() + .startObject("pipeline") + .field("type", "keyword") + .endObject() + .startObject("pipeline_trace") + .field("type", "keyword") + .endObject() + .startObject("processor") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + ); + } catch (IOException e) { + throw new AssertionError(e); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java index 1e2e15a6300c7..0daa12b7ed71f 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java @@ -92,8 +92,6 @@ public class MetadataIndexTemplateService { private static final CompressedXContent DEFAULT_TIMESTAMP_MAPPING_WITH_ROUTING; - private static final CompressedXContent DATA_STREAM_FAILURE_STORE_MAPPING; - static { final Map> defaultTimestampField = Map.of( DEFAULT_TIMESTAMP_FIELD, @@ -122,110 +120,6 @@ public class MetadataIndexTemplateService { .map(defaultTimestampField) .endObject() ); - /* - * The data stream failure store mapping. The JSON content is as follows: - * { - * "_doc": { - * "dynamic": false, - * "_routing": { - * "required": false - * }, - * "properties": { - * "@timestamp": { - * "type": "date", - * "ignore_malformed": false - * }, - * "document": { - * "properties": { - * "id": { - * "type": "keyword" - * }, - * "routing": { - * "type": "keyword" - * }, - * "index": { - * "type": "keyword" - * } - * } - * }, - * "error": { - * "properties": { - * "message": { - * "type": "wildcard" - * }, - * "stack_trace": { - * "type": "text" - * }, - * "type": { - * "type": "keyword" - * }, - * "pipeline": { - * "type": "keyword" - * }, - * "pipeline_trace": { - * "type": "keyword" - * }, - * "processor": { - * "type": "keyword" - * } - * } - * } - * } - * } - * } - */ - DATA_STREAM_FAILURE_STORE_MAPPING = new CompressedXContent( - (builder, params) -> builder.startObject(MapperService.SINGLE_MAPPING_NAME) - .field("dynamic", false) - .startObject(RoutingFieldMapper.NAME) - .field("required", false) - .endObject() - .startObject("properties") - .startObject(DEFAULT_TIMESTAMP_FIELD) - .field("type", DateFieldMapper.CONTENT_TYPE) - .field("ignore_malformed", false) - .endObject() - .startObject("document") - .startObject("properties") - // document.source is unmapped so that it can be persisted in source only without worrying that the document might cause - // a mapping error - .startObject("id") - .field("type", "keyword") - .endObject() - .startObject("routing") - .field("type", "keyword") - .endObject() - .startObject("index") - .field("type", "keyword") - .endObject() - .endObject() - .endObject() - .startObject("error") - .startObject("properties") - .startObject("message") - .field("type", "wildcard") - .endObject() - .startObject("stack_trace") - .field("type", "text") - .endObject() - .startObject("type") - .field("type", "keyword") - .endObject() - .startObject("pipeline") - .field("type", "keyword") - .endObject() - .startObject("pipeline_trace") - .field("type", "keyword") - .endObject() - .startObject("processor") - .field("type", "keyword") - .endObject() - .endObject() - .endObject() - .endObject() - .endObject() - ); - } catch (IOException e) { throw new AssertionError(e); } @@ -1446,7 +1340,10 @@ public static List collectMappings( Objects.requireNonNull(template, "Composable index template must be provided"); // Check if this is a failure store index, and if it is, discard any template mappings. Failure store mappings are predefined. if (template.getDataStreamTemplate() != null && indexName.startsWith(DataStream.FAILURE_STORE_PREFIX)) { - return List.of(DATA_STREAM_FAILURE_STORE_MAPPING, ComposableIndexTemplate.DataStreamTemplate.DATA_STREAM_MAPPING_SNIPPET); + return List.of( + DataStreamFailureStoreDefinition.DATA_STREAM_FAILURE_STORE_MAPPING, + ComposableIndexTemplate.DataStreamTemplate.DATA_STREAM_MAPPING_SNIPPET + ); } List mappings = template.composedOf() .stream() From d88836bb04f9c3e221b7d8b4246a45220a4b61a0 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Wed, 3 Apr 2024 17:19:23 +0100 Subject: [PATCH 006/173] [ML] Add missing TokenizationConfigUpdate named writable to registry (#107056) --- .../MlInferenceNamedXContentProvider.java | 4 ++++ .../action/InferModelActionRequestTests.java | 22 +++++++++++++------ ...erTrainedModelDeploymentRequestsTests.java | 11 ++-------- .../TokenizationConfigUpdateTests.java | 11 +++++++--- 4 files changed, 29 insertions(+), 19 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/MlInferenceNamedXContentProvider.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/MlInferenceNamedXContentProvider.java index 9bcc443f6d7b0..a3fb956c3252d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/MlInferenceNamedXContentProvider.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/MlInferenceNamedXContentProvider.java @@ -74,6 +74,7 @@ import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextSimilarityConfig; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextSimilarityConfigUpdate; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.Tokenization; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TokenizationConfigUpdate; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TokenizationUpdate; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TrainedModel; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TrainedModelLocation; @@ -759,6 +760,9 @@ public List getNamedWriteables() { namedWriteables.add( new NamedWriteableRegistry.Entry(InferenceConfigUpdate.class, TextSimilarityConfigUpdate.NAME, TextSimilarityConfigUpdate::new) ); + namedWriteables.add( + new NamedWriteableRegistry.Entry(InferenceConfigUpdate.class, TokenizationConfigUpdate.NAME, TokenizationConfigUpdate::new) + ); // Location namedWriteables.add( diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferModelActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferModelActionRequestTests.java index 2934d1dc9c42f..983e5d43a946d 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferModelActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferModelActionRequestTests.java @@ -35,6 +35,9 @@ import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextEmbeddingConfigUpdateTests; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextExpansionConfigUpdate; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextExpansionConfigUpdateTests; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextSimilarityConfigUpdate; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextSimilarityConfigUpdateTests; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TokenizationConfigUpdateTests; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ZeroShotClassificationConfigUpdate; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ZeroShotClassificationConfigUpdateTests; @@ -132,17 +135,20 @@ protected Request mutateInstance(Request instance) { public static InferenceConfigUpdate randomInferenceConfigUpdate() { return randomFrom( - RegressionConfigUpdateTests.randomRegressionConfigUpdate(), ClassificationConfigUpdateTests.randomClassificationConfigUpdate(), - ResultsFieldUpdateTests.randomUpdate(), - TextClassificationConfigUpdateTests.randomUpdate(), - TextEmbeddingConfigUpdateTests.randomUpdate(), - NerConfigUpdateTests.randomUpdate(), + EmptyConfigUpdateTests.testInstance(), FillMaskConfigUpdateTests.randomUpdate(), - ZeroShotClassificationConfigUpdateTests.randomUpdate(), + NerConfigUpdateTests.randomUpdate(), PassThroughConfigUpdateTests.randomUpdate(), QuestionAnsweringConfigUpdateTests.randomUpdate(), - EmptyConfigUpdateTests.testInstance() + RegressionConfigUpdateTests.randomRegressionConfigUpdate(), + ResultsFieldUpdateTests.randomUpdate(), + TextClassificationConfigUpdateTests.randomUpdate(), + TextEmbeddingConfigUpdateTests.randomUpdate(), + TextExpansionConfigUpdateTests.randomUpdate(), + TextSimilarityConfigUpdateTests.randomUpdate(), + TokenizationConfigUpdateTests.randomUpdate(), + ZeroShotClassificationConfigUpdateTests.randomUpdate() ); } @@ -165,6 +171,8 @@ public static InferenceConfigUpdate mutateInferenceConfigUpdate(InferenceConfigU adjustedUpdate = QuestionAnsweringConfigUpdateTests.mutateForVersion(update, version); } else if (nlpConfigUpdate instanceof TextExpansionConfigUpdate update) { adjustedUpdate = TextExpansionConfigUpdateTests.mutateForVersion(update, version); + } else if (nlpConfigUpdate instanceof TextSimilarityConfigUpdate update) { + adjustedUpdate = TextSimilarityConfigUpdateTests.mutateForVersion(update, version); } else { throw new IllegalArgumentException("Unknown update [" + currentUpdate.getName() + "]"); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferTrainedModelDeploymentRequestsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferTrainedModelDeploymentRequestsTests.java index e130951da662f..81cad93182ba7 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferTrainedModelDeploymentRequestsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferTrainedModelDeploymentRequestsTests.java @@ -14,9 +14,6 @@ import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xpack.core.ml.inference.MlInferenceNamedXContentProvider; import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; -import org.elasticsearch.xpack.core.ml.inference.trainedmodel.EmptyConfigUpdateTests; -import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfigUpdate; -import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ZeroShotClassificationConfigUpdateTests; import java.util.ArrayList; import java.util.Arrays; @@ -25,10 +22,6 @@ public class InferTrainedModelDeploymentRequestsTests extends AbstractWireSerializingTestCase { - private static InferenceConfigUpdate randomInferenceConfigUpdate() { - return randomFrom(ZeroShotClassificationConfigUpdateTests.createRandom(), EmptyConfigUpdateTests.testInstance()); - } - @Override protected Writeable.Reader instanceReader() { return InferTrainedModelDeploymentAction.Request::new; @@ -42,7 +35,7 @@ protected InferTrainedModelDeploymentAction.Request createTestInstance() { if (createQueryStringRequest) { request = InferTrainedModelDeploymentAction.Request.forTextInput( randomAlphaOfLength(4), - randomBoolean() ? null : randomInferenceConfigUpdate(), + randomBoolean() ? null : InferModelActionRequestTests.randomInferenceConfigUpdate(), Arrays.asList(generateRandomStringArray(4, 7, false)), randomBoolean() ? null : TimeValue.parseTimeValue(randomTimeValue(), "timeout") ); @@ -54,7 +47,7 @@ protected InferTrainedModelDeploymentAction.Request createTestInstance() { request = InferTrainedModelDeploymentAction.Request.forDocs( randomAlphaOfLength(4), - randomBoolean() ? null : randomInferenceConfigUpdate(), + randomBoolean() ? null : InferModelActionRequestTests.randomInferenceConfigUpdate(), docs, randomBoolean() ? null : TimeValue.parseTimeValue(randomTimeValue(), "timeout") ); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TokenizationConfigUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TokenizationConfigUpdateTests.java index 90b5c60a01b62..4e2dce16aac6e 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TokenizationConfigUpdateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TokenizationConfigUpdateTests.java @@ -13,6 +13,13 @@ import java.io.IOException; public class TokenizationConfigUpdateTests extends AbstractWireSerializingTestCase { + + public static TokenizationConfigUpdate randomUpdate() { + Integer maxSequenceLength = randomBoolean() ? null : randomIntBetween(32, 64); + int span = randomIntBetween(8, 16); + return new TokenizationConfigUpdate(maxSequenceLength, span); + } + @Override protected Writeable.Reader instanceReader() { return TokenizationConfigUpdate::new; @@ -20,9 +27,7 @@ protected Writeable.Reader instanceReader() { @Override protected TokenizationConfigUpdate createTestInstance() { - Integer maxSequenceLength = randomBoolean() ? null : randomIntBetween(32, 64); - int span = randomIntBetween(8, 16); - return new TokenizationConfigUpdate(maxSequenceLength, span); + return randomUpdate(); } @Override From 42df8fef50eb3c5895fe09aad9b8be7e14332076 Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Wed, 3 Apr 2024 19:02:24 +0200 Subject: [PATCH 007/173] [Inference API] Make completion task available in inference processor (#107060) --- .../results/ChatCompletionResults.java | 33 +++++++++++++++---- .../results/ChatCompletionResultsTests.java | 14 ++++++++ 2 files changed, 40 insertions(+), 7 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChatCompletionResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChatCompletionResults.java index 50ca46d85190f..bbd4d026f0d55 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChatCompletionResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChatCompletionResults.java @@ -7,14 +7,12 @@ package org.elasticsearch.xpack.core.inference.results; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.inference.TaskType; -import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -69,7 +67,7 @@ public String getWriteableName() { @Override public List transformToCoordinationFormat() { - throw new UnsupportedOperationException(); + return results; } @Override @@ -89,7 +87,7 @@ public Map asMap() { return map; } - public record Result(String content) implements Writeable, ToXContentObject { + public record Result(String content) implements InferenceResults, Writeable { public static final String RESULT = "result"; @@ -112,13 +110,34 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } @Override - public String toString() { - return Strings.toString(this); + public String getResultsField() { + return RESULT; } + @Override public Map asMap() { - return Map.of(RESULT, content); + Map map = new LinkedHashMap<>(); + map.put(RESULT, content); + return map; } + + @Override + public Map asMap(String outputField) { + Map map = new LinkedHashMap<>(); + map.put(outputField, content); + return map; + } + + @Override + public Object predictedValue() { + return content; + } + + @Override + public String getWriteableName() { + return NAME; + } + } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChatCompletionResultsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChatCompletionResultsTests.java index 444f6792abe63..6bbe6eea5394f 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChatCompletionResultsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChatCompletionResultsTests.java @@ -76,6 +76,20 @@ public void testToXContent_CreatesTheRightFormatForMultipleCompletionResults() { }""")); } + public void testTransformToCoordinationFormat() { + String resultOneContent = "content 1"; + String resultTwoContent = "content 2"; + + var entity = new ChatCompletionResults( + List.of(new ChatCompletionResults.Result(resultOneContent), new ChatCompletionResults.Result(resultTwoContent)) + ); + + var transformedEntity = entity.transformToCoordinationFormat(); + + assertThat(transformedEntity.get(0).asMap(), is(Map.of(ChatCompletionResults.Result.RESULT, resultOneContent))); + assertThat(transformedEntity.get(1).asMap(), is(Map.of(ChatCompletionResults.Result.RESULT, resultTwoContent))); + } + @Override protected Writeable.Reader instanceReader() { return ChatCompletionResults::new; From 8cab439a8a3396c486fda095aea98d01da2baa31 Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Wed, 3 Apr 2024 11:15:48 -0600 Subject: [PATCH 008/173] Mark enrich stats API as internal instead of public (#107022) This was previously publically available, however it exposes node counts and IDs. After discussion we've decided it's unnecessary on Serverless. Co-authored-by: Elastic Machine --- .../elasticsearch/xpack/enrich/rest/RestEnrichStatsAction.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/rest/RestEnrichStatsAction.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/rest/RestEnrichStatsAction.java index 9e3848e878ad2..e666319b563ea 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/rest/RestEnrichStatsAction.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/rest/RestEnrichStatsAction.java @@ -19,7 +19,7 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; -@ServerlessScope(Scope.PUBLIC) +@ServerlessScope(Scope.INTERNAL) public class RestEnrichStatsAction extends BaseRestHandler { @Override From 89563c238a2d4124b110044d5abd5daee191a741 Mon Sep 17 00:00:00 2001 From: Andrei Dan Date: Wed, 3 Apr 2024 20:27:13 +0300 Subject: [PATCH 009/173] Fix auto sharding recommending 0 shards for 0.0 workload (#107064) --- .../autosharding/DataStreamAutoShardingService.java | 7 +++++-- .../autosharding/DataStreamAutoShardingServiceTests.java | 6 ++++++ 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingService.java b/server/src/main/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingService.java index a045c73cc83a1..a26be73cc169d 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingService.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingService.java @@ -338,8 +338,11 @@ private AutoShardingResult getDecreaseShardsResult( // Visible for testing static long computeOptimalNumberOfShards(int minNumberWriteThreads, int maxNumberWriteThreads, double indexingLoad) { return Math.max( - Math.min(roundUp(indexingLoad / (minNumberWriteThreads / 2.0)), 3), - roundUp(indexingLoad / (maxNumberWriteThreads / 2.0)) + Math.max( + Math.min(roundUp(indexingLoad / (minNumberWriteThreads / 2.0)), 3), + roundUp(indexingLoad / (maxNumberWriteThreads / 2.0)) + ), + 1 // we don't want to go lower than 1 shard ); } diff --git a/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java b/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java index 7f50ebca36fc5..41a5d0b70ea10 100644 --- a/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java +++ b/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java @@ -514,6 +514,12 @@ public void testCalculateDecreaseShardingRecommendations() { public void testComputeOptimalNumberOfShards() { int minWriteThreads = 2; int maxWriteThreads = 32; + + { + // 0.0 indexing load recommends 1 shard + logger.info("-> indexingLoad {}", 0.0); + assertThat(DataStreamAutoShardingService.computeOptimalNumberOfShards(minWriteThreads, maxWriteThreads, 0.0), is(1L)); + } { // the small values will be very common so let's randomise to make sure we never go below 1L double indexingLoad = randomDoubleBetween(0.0001, 1.0, true); From f49ead7446e722c5cf6d6268e3a043dd1f818fce Mon Sep 17 00:00:00 2001 From: Max Hniebergall <137079448+maxhniebergall@users.noreply.github.com> Date: Wed, 3 Apr 2024 14:48:22 -0400 Subject: [PATCH 010/173] [ML] Add Cohere rerank to _inference service (#106378) --- docs/changelog/106378.yaml | 5 + .../org/elasticsearch/TransportVersions.java | 1 + .../inference/InferenceService.java | 23 ++- .../org/elasticsearch/inference/TaskType.java | 1 + .../org/elasticsearch/test/ESTestCase.java | 4 + .../inference/action/InferenceAction.java | 50 ++++- .../inference/results/RankedDocsResults.java | 133 ++++++++++++ .../action/InferenceActionRequestTests.java | 48 ++++- .../inference/InferenceBaseRestTest.java | 16 +- .../xpack/inference/RerankingIT.java | 119 +++++++++++ .../TestDenseInferenceServiceExtension.java | 3 + .../TestSparseInferenceServiceExtension.java | 3 + .../action/TransportInferenceAction.java | 1 + .../TransportPutInferenceModelAction.java | 7 +- .../external/action/ExecutableAction.java | 5 +- .../action/cohere/CohereActionCreator.java | 8 + .../action/cohere/CohereActionVisitor.java | 3 + .../action/cohere/CohereEmbeddingsAction.java | 6 +- .../action/cohere/CohereRerankAction.java | 54 +++++ .../action/huggingface/HuggingFaceAction.java | 6 +- .../openai/OpenAiChatCompletionAction.java | 16 +- .../action/openai/OpenAiEmbeddingsAction.java | 6 +- .../external/http/RequestExecutor.java | 4 +- ...ereEmbeddingsExecutableRequestCreator.java | 1 + .../CohereRerankExecutableRequestCreator.java | 56 +++++ .../http/sender/DocumentsOnlyInput.java | 25 +++ .../http/sender/ExecutableRequestCreator.java | 2 + .../http/sender/HttpRequestSender.java | 30 +-- .../HuggingFaceExecutableRequestCreator.java | 1 + .../external/http/sender/InferenceInputs.java | 10 + .../http/sender/InferenceRequest.java | 5 + .../external/http/sender/NoopTask.java | 5 + ...nAiCompletionExecutableRequestCreator.java | 2 + ...nAiEmbeddingsExecutableRequestCreator.java | 1 + .../http/sender/QueryAndDocsInputs.java | 33 +++ .../http/sender/RequestExecutorService.java | 10 +- .../external/http/sender/RequestTask.java | 19 +- .../external/http/sender/Sender.java | 5 +- .../http/sender/SingleRequestManager.java | 1 + .../cohere/CohereEmbeddingsRequestEntity.java | 2 - .../request/cohere/CohereRerankRequest.java | 96 +++++++++ .../cohere/CohereRerankRequestEntity.java | 60 ++++++ .../external/request/cohere/CohereUtils.java | 1 + .../CohereEmbeddingsResponseEntity.java | 2 +- .../cohere/CohereRankedResponseEntity.java | 151 ++++++++++++++ .../inference/services/SenderService.java | 32 ++- .../inference/services/ServiceUtils.java | 87 ++++++-- .../services/cohere/CohereService.java | 28 ++- .../cohere/rerank/CohereRerankModel.java | 94 +++++++++ .../rerank/CohereRerankServiceSettings.java | 95 +++++++++ .../rerank/CohereRerankTaskSettings.java | 184 +++++++++++++++++ .../CustomElandInternalServiceSettings.java | 3 +- .../ElasticsearchInternalService.java | 15 ++ .../services/elser/ElserInternalService.java | 13 ++ .../huggingface/HuggingFaceBaseService.java | 17 +- .../services/openai/OpenAiService.java | 16 +- .../cohere/CohereActionCreatorTests.java | 3 +- .../cohere/CohereEmbeddingsActionTests.java | 15 +- .../HuggingFaceActionCreatorTests.java | 13 +- .../huggingface/HuggingFaceActionTests.java | 7 +- .../openai/OpenAiActionCreatorTests.java | 23 ++- .../OpenAiChatCompletionActionTests.java | 15 +- .../openai/OpenAiEmbeddingsActionTests.java | 13 +- .../sender/ExecutableRequestCreatorTests.java | 6 +- .../http/sender/HttpRequestSenderTests.java | 18 +- .../sender/RequestExecutorServiceTests.java | 59 ++++-- .../http/sender/RequestTaskTests.java | 10 +- .../CohereRankedResponseEntityTests.java | 191 ++++++++++++++++++ .../services/SenderServiceTests.java | 14 ++ .../inference/services/ServiceUtilsTests.java | 16 +- .../services/cohere/CohereServiceTests.java | 11 +- .../ElasticsearchInternalServiceTests.java | 1 + .../elser/ElserInternalServiceTests.java | 1 + .../HuggingFaceBaseServiceTests.java | 2 +- .../huggingface/HuggingFaceServiceTests.java | 4 +- .../services/openai/OpenAiServiceTests.java | 6 +- .../TransportCoordinatedInferenceAction.java | 9 +- 77 files changed, 1876 insertions(+), 185 deletions(-) create mode 100644 docs/changelog/106378.yaml create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/RankedDocsResults.java create mode 100644 x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/RerankingIT.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereRerankAction.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereRerankExecutableRequestCreator.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/DocumentsOnlyInput.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/InferenceInputs.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/QueryAndDocsInputs.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereRerankRequest.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereRerankRequestEntity.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereRankedResponseEntity.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankModel.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankServiceSettings.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankTaskSettings.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereRankedResponseEntityTests.java diff --git a/docs/changelog/106378.yaml b/docs/changelog/106378.yaml new file mode 100644 index 0000000000000..b54760553d184 --- /dev/null +++ b/docs/changelog/106378.yaml @@ -0,0 +1,5 @@ +pr: 106378 +summary: Add Cohere rerank to `_inference` service +area: Machine Learning +type: feature +issues: [] diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 5614d9c1dba12..57a3afe083707 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -162,6 +162,7 @@ static TransportVersion def(int id) { public static final TransportVersion FAILURE_STORE_ROLLOVER = def(8_621_00_0); public static final TransportVersion CCR_STATS_API_TIMEOUT_PARAM = def(8_622_00_0); public static final TransportVersion ESQL_ORDINAL_BLOCK = def(8_623_00_0); + public static final TransportVersion ML_INFERENCE_COHERE_RERANK = def(8_624_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/inference/InferenceService.java b/server/src/main/java/org/elasticsearch/inference/InferenceService.java index ccf405e1074e6..8fec9a64bd275 100644 --- a/server/src/main/java/org/elasticsearch/inference/InferenceService.java +++ b/server/src/main/java/org/elasticsearch/inference/InferenceService.java @@ -11,6 +11,7 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.core.Nullable; import java.io.Closeable; import java.util.List; @@ -79,14 +80,16 @@ void parseRequestConfig( /** * Perform inference on the model. * - * @param model The model - * @param input Inference input + * @param model The model + * @param query + * @param input Inference input * @param taskSettings Settings in the request to override the model's defaults - * @param inputType For search, ingest etc - * @param listener Inference result listener + * @param inputType For search, ingest etc + * @param listener Inference result listener */ void infer( Model model, + @Nullable String query, List input, Map taskSettings, InputType inputType, @@ -98,15 +101,17 @@ void infer( * model defaults if {@code chunkingOptions} contains unset * values. * - * @param model The model - * @param input Inference input - * @param taskSettings Settings in the request to override the model's defaults - * @param inputType For search, ingest etc + * @param model The model + * @param query + * @param input Inference input + * @param taskSettings Settings in the request to override the model's defaults + * @param inputType For search, ingest etc * @param chunkingOptions The window and span options to apply - * @param listener Chunked Inference result listener + * @param listener Chunked Inference result listener */ void chunkedInfer( Model model, + @Nullable String query, List input, Map taskSettings, InputType inputType, diff --git a/server/src/main/java/org/elasticsearch/inference/TaskType.java b/server/src/main/java/org/elasticsearch/inference/TaskType.java index 1e301ad796e90..206cbf074af22 100644 --- a/server/src/main/java/org/elasticsearch/inference/TaskType.java +++ b/server/src/main/java/org/elasticsearch/inference/TaskType.java @@ -21,6 +21,7 @@ public enum TaskType implements Writeable { TEXT_EMBEDDING, SPARSE_EMBEDDING, + RERANK, COMPLETION, ANY { @Override diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index f1db2946aa572..f2b4030983db4 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -1047,6 +1047,10 @@ public static String randomAlphaOfLength(int codeUnits) { return RandomizedTest.randomAsciiOfLength(codeUnits); } + public static String randomNullOrAlphaOfLength(int codeUnits) { + return randomBoolean() ? null : randomAlphaOfLength(codeUnits); + } + /** * Creates a valid random identifier such as node id or index name */ diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceAction.java index 4f858a88faa18..22760e6c1f73d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceAction.java @@ -50,12 +50,14 @@ public static class Request extends ActionRequest { public static final ParseField INPUT = new ParseField("input"); public static final ParseField TASK_SETTINGS = new ParseField("task_settings"); + public static final ParseField QUERY = new ParseField("query"); static final ObjectParser PARSER = new ObjectParser<>(NAME, Request.Builder::new); static { // TODO timeout PARSER.declareStringArray(Request.Builder::setInput, INPUT); PARSER.declareObject(Request.Builder::setTaskSettings, (p, c) -> p.mapOrdered(), TASK_SETTINGS); + PARSER.declareString(Request.Builder::setQuery, QUERY); } private static final EnumSet validEnumsBeforeUnspecifiedAdded = EnumSet.of(InputType.INGEST, InputType.SEARCH); @@ -64,7 +66,7 @@ public static class Request extends ActionRequest { InputType.UNSPECIFIED ); - public static Request parseRequest(String inferenceEntityId, TaskType taskType, XContentParser parser) { + public static Request parseRequest(String inferenceEntityId, TaskType taskType, XContentParser parser) throws IOException { Request.Builder builder = PARSER.apply(parser, null); builder.setInferenceEntityId(inferenceEntityId); builder.setTaskType(taskType); @@ -75,6 +77,7 @@ public static Request parseRequest(String inferenceEntityId, TaskType taskType, private final TaskType taskType; private final String inferenceEntityId; + private final String query; private final List input; private final Map taskSettings; private final InputType inputType; @@ -82,12 +85,14 @@ public static Request parseRequest(String inferenceEntityId, TaskType taskType, public Request( TaskType taskType, String inferenceEntityId, + String query, List input, Map taskSettings, InputType inputType ) { this.taskType = taskType; this.inferenceEntityId = inferenceEntityId; + this.query = query; this.input = input; this.taskSettings = taskSettings; this.inputType = inputType; @@ -108,6 +113,12 @@ public Request(StreamInput in) throws IOException { } else { this.inputType = InputType.UNSPECIFIED; } + + if (in.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_COHERE_RERANK)) { + this.query = in.readOptionalString(); + } else { + this.query = null; + } } public TaskType getTaskType() { @@ -122,6 +133,10 @@ public List getInput() { return input; } + public String getQuery() { + return query; + } + public Map getTaskSettings() { return taskSettings; } @@ -161,6 +176,10 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED)) { out.writeEnum(getInputTypeToWrite(inputType, out.getTransportVersion())); } + + if (out.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_COHERE_RERANK)) { + out.writeOptionalString(query); + } } // default for easier testing @@ -185,12 +204,13 @@ public boolean equals(Object o) { && Objects.equals(inferenceEntityId, request.inferenceEntityId) && Objects.equals(input, request.input) && Objects.equals(taskSettings, request.taskSettings) - && Objects.equals(inputType, request.inputType); + && Objects.equals(inputType, request.inputType) + && Objects.equals(query, request.query); } @Override public int hashCode() { - return Objects.hash(taskType, inferenceEntityId, input, taskSettings, inputType); + return Objects.hash(taskType, inferenceEntityId, input, taskSettings, inputType, query); } public static class Builder { @@ -200,6 +220,7 @@ public static class Builder { private List input; private InputType inputType = InputType.UNSPECIFIED; private Map taskSettings = Map.of(); + private String query; private Builder() {} @@ -218,6 +239,11 @@ public Builder setInput(List input) { return this; } + public Builder setQuery(String query) { + this.query = query; + return this; + } + public Builder setInputType(InputType inputType) { this.inputType = inputType; return this; @@ -229,9 +255,25 @@ public Builder setTaskSettings(Map taskSettings) { } public Request build() { - return new Request(taskType, inferenceEntityId, input, taskSettings, inputType); + return new Request(taskType, inferenceEntityId, query, input, taskSettings, inputType); } } + + public String toString() { + return "InferenceAction.Request(taskType=" + + this.getTaskType() + + ", inferenceEntityId=" + + this.getInferenceEntityId() + + ", query=" + + this.getQuery() + + ", input=" + + this.getInput() + + ", taskSettings=" + + this.getTaskSettings() + + ", inputType=" + + this.getInputType() + + ")"; + } } public static class Response extends ActionResponse implements ToXContentObject { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/RankedDocsResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/RankedDocsResults.java new file mode 100644 index 0000000000000..ae96fa6a332bd --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/RankedDocsResults.java @@ -0,0 +1,133 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.inference.results; + +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.inference.InferenceResults; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +public class RankedDocsResults implements InferenceServiceResults { + public static final String NAME = "rerank_service_results"; + public static final String RERANK = TaskType.RERANK.toString(); + + List rankedDocs; + + public RankedDocsResults(List rankedDocs) { + this.rankedDocs = rankedDocs; + } + + /** + * A record representing a document that has been ranked by the cohere rerank API + * @param index the index of the document when it was passed to the cohere rerank API + * @param relevanceScore + * @param text + */ + public record RankedDoc(String index, String relevanceScore, String text) implements Writeable, ToXContentObject { + + public static final String NAME = "ranked_doc"; + public static final String INDEX = "index"; + public static final String RELEVANCE_SCORE = "relevance_score"; + public static final String TEXT = "text"; + + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + builder.field(INDEX, index); + builder.field(RELEVANCE_SCORE, relevanceScore); + builder.field(TEXT, text); + + builder.endObject(); + + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(index); + out.writeString(relevanceScore); + out.writeString(text); + } + + public Map asMap() { + return Map.of(NAME, Map.of(INDEX, index, RELEVANCE_SCORE, relevanceScore, TEXT, text)); + } + + public String toString() { + return "RankedDoc{" + + "index='" + + index + + '\'' + + ", relevanceScore='" + + relevanceScore + + '\'' + + ", text='" + + text + + '\'' + + ", hashcode=" + + hashCode() + + '}'; + } + }; + + public RankedDocsResults() { + this.rankedDocs = new ArrayList(0); + } + + public List getRankedDocs() { + return this.rankedDocs; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startArray(RERANK); + for (RankedDoc rankedDoc : rankedDocs) { + rankedDoc.toXContent(builder, params); + } + builder.endArray(); + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeCollection(rankedDocs); + } + + @Override + public List transformToCoordinationFormat() { + throw new UnsupportedOperationException("Coordination format not supported by " + NAME); + } + + @Override + public List transformToLegacyFormat() { + throw new UnsupportedOperationException("Legacy format not supported by " + NAME); + } + + @Override + public Map asMap() { + Map map = new LinkedHashMap<>(); + map.put(RERANK, rankedDocs.stream().map(RankedDoc::asMap).collect(Collectors.toList())); + return map; + } + +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/InferenceActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/InferenceActionRequestTests.java index 426eebd6340f1..73312974a6323 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/InferenceActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/InferenceActionRequestTests.java @@ -38,6 +38,8 @@ protected InferenceAction.Request createTestInstance() { return new InferenceAction.Request( randomFrom(TaskType.values()), randomAlphaOfLength(6), + // null, + randomNullOrAlphaOfLength(10), randomList(1, 5, () -> randomAlphaOfLength(8)), randomMap(0, 3, () -> new Tuple<>(randomAlphaOfLength(4), randomAlphaOfLength(4))), randomFrom(InputType.values()) @@ -80,13 +82,14 @@ public void testParseRequest_DefaultsInputTypeToIngest() throws IOException { @Override protected InferenceAction.Request mutateInstance(InferenceAction.Request instance) throws IOException { - int select = randomIntBetween(0, 4); + int select = randomIntBetween(0, 5); return switch (select) { case 0 -> { var nextTask = TaskType.values()[(instance.getTaskType().ordinal() + 1) % TaskType.values().length]; yield new InferenceAction.Request( nextTask, instance.getInferenceEntityId(), + instance.getQuery(), instance.getInput(), instance.getTaskSettings(), instance.getInputType() @@ -95,6 +98,7 @@ protected InferenceAction.Request mutateInstance(InferenceAction.Request instanc case 1 -> new InferenceAction.Request( instance.getTaskType(), instance.getInferenceEntityId() + "foo", + instance.getQuery(), instance.getInput(), instance.getTaskSettings(), instance.getInputType() @@ -105,6 +109,7 @@ protected InferenceAction.Request mutateInstance(InferenceAction.Request instanc yield new InferenceAction.Request( instance.getTaskType(), instance.getInferenceEntityId(), + instance.getQuery(), changedInputs, instance.getTaskSettings(), instance.getInputType() @@ -121,6 +126,7 @@ protected InferenceAction.Request mutateInstance(InferenceAction.Request instanc yield new InferenceAction.Request( instance.getTaskType(), instance.getInferenceEntityId(), + instance.getQuery(), instance.getInput(), taskSettings, instance.getInputType() @@ -131,11 +137,22 @@ protected InferenceAction.Request mutateInstance(InferenceAction.Request instanc yield new InferenceAction.Request( instance.getTaskType(), instance.getInferenceEntityId(), + instance.getQuery(), instance.getInput(), instance.getTaskSettings(), nextInputType ); } + case 5 -> { + yield new InferenceAction.Request( + instance.getTaskType(), + instance.getInferenceEntityId(), + instance.getQuery() == null ? randomAlphaOfLength(10) : instance.getQuery() + randomAlphaOfLength(1), + instance.getInput(), + instance.getTaskSettings(), + instance.getInputType() + ); + } default -> throw new UnsupportedOperationException(); }; } @@ -146,6 +163,7 @@ protected InferenceAction.Request mutateInstanceForVersion(InferenceAction.Reque return new InferenceAction.Request( instance.getTaskType(), instance.getInferenceEntityId(), + null, instance.getInput().subList(0, 1), instance.getTaskSettings(), InputType.UNSPECIFIED @@ -154,6 +172,7 @@ protected InferenceAction.Request mutateInstanceForVersion(InferenceAction.Reque return new InferenceAction.Request( instance.getTaskType(), instance.getInferenceEntityId(), + null, instance.getInput(), instance.getTaskSettings(), InputType.UNSPECIFIED @@ -165,6 +184,7 @@ protected InferenceAction.Request mutateInstanceForVersion(InferenceAction.Reque return new InferenceAction.Request( instance.getTaskType(), instance.getInferenceEntityId(), + null, instance.getInput(), instance.getTaskSettings(), InputType.INGEST @@ -174,10 +194,20 @@ protected InferenceAction.Request mutateInstanceForVersion(InferenceAction.Reque return new InferenceAction.Request( instance.getTaskType(), instance.getInferenceEntityId(), + null, instance.getInput(), instance.getTaskSettings(), InputType.UNSPECIFIED ); + } else if (version.before(TransportVersions.ML_INFERENCE_COHERE_RERANK)) { + return new InferenceAction.Request( + instance.getTaskType(), + instance.getInferenceEntityId(), + null, + instance.getInput(), + instance.getTaskSettings(), + instance.getInputType() + ); } return instance; @@ -185,20 +215,20 @@ protected InferenceAction.Request mutateInstanceForVersion(InferenceAction.Reque public void testWriteTo_WhenVersionIsOnAfterUnspecifiedAdded() throws IOException { assertBwcSerialization( - new InferenceAction.Request(TaskType.TEXT_EMBEDDING, "model", List.of(), Map.of(), InputType.UNSPECIFIED), + new InferenceAction.Request(TaskType.TEXT_EMBEDDING, "model", null, List.of(), Map.of(), InputType.UNSPECIFIED), TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_UNSPECIFIED_ADDED ); } public void testWriteTo_WhenVersionIsBeforeUnspecifiedAdded_ButAfterInputTypeAdded_ShouldSetToIngest() throws IOException { assertBwcSerialization( - new InferenceAction.Request(TaskType.TEXT_EMBEDDING, "model", List.of(), Map.of(), InputType.UNSPECIFIED), + new InferenceAction.Request(TaskType.TEXT_EMBEDDING, "model", null, List.of(), Map.of(), InputType.UNSPECIFIED), TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED ); } public void testWriteTo_WhenVersionIsBeforeUnspecifiedAdded_ButAfterInputTypeAdded_ShouldSetToIngest_ManualCheck() throws IOException { - var instance = new InferenceAction.Request(TaskType.TEXT_EMBEDDING, "model", List.of(), Map.of(), InputType.UNSPECIFIED); + var instance = new InferenceAction.Request(TaskType.TEXT_EMBEDDING, "model", null, List.of(), Map.of(), InputType.UNSPECIFIED); InferenceAction.Request deserializedInstance = copyWriteable( instance, @@ -212,7 +242,7 @@ public void testWriteTo_WhenVersionIsBeforeUnspecifiedAdded_ButAfterInputTypeAdd public void testWriteTo_WhenVersionIsBeforeUnspecifiedAdded_ButAfterInputTypeAdded_ShouldSetToIngest_WhenClustering_ManualCheck() throws IOException { - var instance = new InferenceAction.Request(TaskType.TEXT_EMBEDDING, "model", List.of(), Map.of(), InputType.CLUSTERING); + var instance = new InferenceAction.Request(TaskType.TEXT_EMBEDDING, "model", null, List.of(), Map.of(), InputType.CLUSTERING); InferenceAction.Request deserializedInstance = copyWriteable( instance, @@ -226,7 +256,7 @@ public void testWriteTo_WhenVersionIsBeforeUnspecifiedAdded_ButAfterInputTypeAdd public void testWriteTo_WhenVersionIsBeforeUnspecifiedAdded_ButAfterInputTypeAdded_ShouldSetToIngest_WhenClassification_ManualCheck() throws IOException { - var instance = new InferenceAction.Request(TaskType.TEXT_EMBEDDING, "model", List.of(), Map.of(), InputType.CLASSIFICATION); + var instance = new InferenceAction.Request(TaskType.TEXT_EMBEDDING, "model", null, List.of(), Map.of(), InputType.CLASSIFICATION); InferenceAction.Request deserializedInstance = copyWriteable( instance, @@ -242,7 +272,7 @@ public void testWriteTo_WhenVersionIsBeforeUnspecifiedAdded_ButAfterInputTypeAdd void testWriteTo_WhenVersionIsBeforeClusterClassAdded_ButAfterUnspecifiedAdded_ShouldSetToUnspecified_WhenClassification_ManualCheck() throws IOException { - var instance = new InferenceAction.Request(TaskType.TEXT_EMBEDDING, "model", List.of(), Map.of(), InputType.CLASSIFICATION); + var instance = new InferenceAction.Request(TaskType.TEXT_EMBEDDING, "model", null, List.of(), Map.of(), InputType.CLASSIFICATION); InferenceAction.Request deserializedInstance = copyWriteable( instance, @@ -258,7 +288,7 @@ public void testWriteTo_WhenVersionIsBeforeUnspecifiedAdded_ButAfterInputTypeAdd void testWriteTo_WhenVersionIsBeforeClusterClassAdded_ButAfterUnspecifiedAdded_ShouldSetToUnspecified_WhenClustering_ManualCheck() throws IOException { - var instance = new InferenceAction.Request(TaskType.TEXT_EMBEDDING, "model", List.of(), Map.of(), InputType.CLUSTERING); + var instance = new InferenceAction.Request(TaskType.TEXT_EMBEDDING, "model", null, List.of(), Map.of(), InputType.CLUSTERING); InferenceAction.Request deserializedInstance = copyWriteable( instance, @@ -271,7 +301,7 @@ public void testWriteTo_WhenVersionIsBeforeUnspecifiedAdded_ButAfterInputTypeAdd } public void testWriteTo_WhenVersionIsBeforeInputTypeAdded_ShouldSetInputTypeToUnspecified() throws IOException { - var instance = new InferenceAction.Request(TaskType.TEXT_EMBEDDING, "model", List.of(), Map.of(), InputType.INGEST); + var instance = new InferenceAction.Request(TaskType.TEXT_EMBEDDING, "model", null, List.of(), Map.of(), InputType.INGEST); InferenceAction.Request deserializedInstance = copyWriteable( instance, diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java index a9096f9059c5b..ae4a770fe7dd2 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java @@ -121,7 +121,7 @@ protected void deleteModel(String modelId, TaskType taskType) throws IOException protected Map putModel(String modelId, String modelConfig, TaskType taskType) throws IOException { String endpoint = Strings.format("_inference/%s/%s", taskType, modelId); - return putModelInternal(endpoint, modelConfig); + return putRequest(endpoint, modelConfig); } /** @@ -129,12 +129,20 @@ protected Map putModel(String modelId, String modelConfig, TaskT */ protected Map putModel(String modelId, String modelConfig) throws IOException { String endpoint = Strings.format("_inference/%s", modelId); - return putModelInternal(endpoint, modelConfig); + return putRequest(endpoint, modelConfig); } - private Map putModelInternal(String endpoint, String modelConfig) throws IOException { + Map putRequest(String endpoint, String body) throws IOException { var request = new Request("PUT", endpoint); - request.setJsonEntity(modelConfig); + request.setJsonEntity(body); + var response = client().performRequest(request); + assertOkOrCreated(response); + return entityAsMap(response); + } + + Map postRequest(String endpoint, String body) throws IOException { + var request = new Request("POST", endpoint); + request.setJsonEntity(body); var response = client().performRequest(request); assertOkOrCreated(response); return entityAsMap(response); diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/RerankingIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/RerankingIT.java new file mode 100644 index 0000000000000..77251ada4c488 --- /dev/null +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/RerankingIT.java @@ -0,0 +1,119 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + * + * this file was contributed to by a generative AI + */ + +package org.elasticsearch.xpack.inference; + +import org.apache.lucene.tests.util.LuceneTestCase; + +import java.io.IOException; + +@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106967") +public class RerankingIT extends InferenceBaseRestTest { + + public void testPutCohereRerankEndpoint() throws IOException { + String endpoint = putCohereRerankEndpoint(); + postCohereRerankEndpoint( + endpoint, + "what is elasticsearch for?", + new String[] { "for search", "for security", "for logs", "for email", "for rubber bands", "for kiwis" } + ); + } + + private String putCohereRerankEndpoint() throws IOException { + String endpointID = randomAlphaOfLength(10).toLowerCase(); + putRequest("/_inference/rerank/" + endpointID, """ + { + "service": "cohere", + "service_settings": { + "model_id": "rerank-english-v2.0", + "api_key": "" + } + } + """);// TODO remove key + return endpointID; + } + + public void testPutCohereRerankEndpointWithDocuments() throws IOException { + String endpoint = putCohereRerankEndpointWithDocuments(); + postCohereRerankEndpoint( + endpoint, + "what is elasticsearch for?", + new String[] { "for search", "for security", "for logs", "for email", "for rubber bands", "for kiwis" } + ); + } + + private String putCohereRerankEndpointWithDocuments() throws IOException { + String endpointID = randomAlphaOfLength(10).toLowerCase(); + putRequest("/_inference/rerank/" + endpointID, """ + { + "service": "cohere", + "service_settings": { + "model_id": "rerank-english-v2.0", + "api_key": "" + }, + "task_settings": { + "return_documents": true + } + } + """);// TODO remove key + return endpointID; + } + + public void testPutCohereRerankEndpointWithTop2() throws IOException { + String endpoint = putCohereRerankEndpointWithTop2(); + postCohereRerankEndpoint( + endpoint, + "what is elasticsearch for?", + new String[] { "for search", "for security", "for logs", "for email", "for rubber bands", "for kiwis" } + ); + } + + private String putCohereRerankEndpointWithTop2() throws IOException { + String endpointID = randomAlphaOfLength(10).toLowerCase(); + putRequest("/_inference/rerank/" + endpointID, """ + { + "service": "cohere", + "service_settings": { + "model_id": "rerank-english-v2.0", + "api_key": "8TNPBvpBO7oN97009HQHzQbBhNrxmREbcJrZCwkK" + }, + "task_settings": { + "top_n": 2 + } + } + """);// TODO remove key + return endpointID; + } + + public void postCohereRerankEndpoint(String endpoint, String query, String[] input) throws IOException { + StringBuilder body = new StringBuilder(); + + // Start the JSON object + body.append("{"); + + // Add the query to the JSON object + body.append("\"query\":\"").append(query).append("\","); + + // Start the input array + body.append("\"input\":["); + + // Add each element of the input array to the JSON array + for (int i = 0; i < input.length; i++) { + body.append("\"").append(input[i]).append("\""); + if (i < input.length - 1) { + body.append(","); + } + } + + // End the input array and the JSON object + body.append("]}"); + postRequest("/_inference/rerank/" + endpoint, body.toString()); + } + +} diff --git a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java index 54fe6e01946b4..c53ed82b9fe50 100644 --- a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java +++ b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.inference.ChunkingOptions; import org.elasticsearch.inference.InferenceServiceExtension; @@ -73,6 +74,7 @@ public void parseRequestConfig( @Override public void infer( Model model, + @Nullable String query, List input, Map taskSettings, InputType inputType, @@ -94,6 +96,7 @@ public void infer( @Override public void chunkedInfer( Model model, + @Nullable String query, List input, Map taskSettings, InputType inputType, diff --git a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestSparseInferenceServiceExtension.java b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestSparseInferenceServiceExtension.java index e5020774a70f3..30977c23ef5aa 100644 --- a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestSparseInferenceServiceExtension.java +++ b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestSparseInferenceServiceExtension.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.inference.ChunkingOptions; import org.elasticsearch.inference.InferenceServiceExtension; @@ -74,6 +75,7 @@ public void parseRequestConfig( @Override public void infer( Model model, + @Nullable String query, List input, Map taskSettings, InputType inputType, @@ -93,6 +95,7 @@ public void infer( @Override public void chunkedInfer( Model model, + @Nullable String query, List input, Map taskSettings, InputType inputType, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java index ece4fee1c935f..a480763f33c47 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java @@ -90,6 +90,7 @@ private void inferOnService( ) { service.infer( model, + request.getQuery(), request.getInput(), request.getTaskSettings(), request.getInputType(), diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java index 6667e314a62b8..556acfd89c9c6 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java @@ -105,7 +105,12 @@ protected void masterOperation( String serviceName = (String) requestAsMap.remove(ModelConfigurations.SERVICE); if (serviceName == null) { - listener.onFailure(new ElasticsearchStatusException("Model configuration is missing a service", RestStatus.BAD_REQUEST)); + listener.onFailure( + new ElasticsearchStatusException( + "Model configuration is missing [" + ModelConfigurations.SERVICE + "]", + RestStatus.BAD_REQUEST + ) + ); return; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/ExecutableAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/ExecutableAction.java index 9991abf71fb12..76e997f248f1a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/ExecutableAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/ExecutableAction.java @@ -9,12 +9,11 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.inference.InferenceServiceResults; - -import java.util.List; +import org.elasticsearch.xpack.inference.external.http.sender.InferenceInputs; /** * Defines an inference request to a 3rd party service. The success or failure response is communicated through the provided listener. */ public interface ExecutableAction { - void execute(List input, ActionListener listener); + void execute(InferenceInputs inferenceInputs, ActionListener listener); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionCreator.java index 91db5e691cb61..b8e1b34c11f27 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionCreator.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionCreator.java @@ -12,6 +12,7 @@ import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.services.ServiceComponents; import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsModel; +import org.elasticsearch.xpack.inference.services.cohere.rerank.CohereRerankModel; import java.util.Map; import java.util.Objects; @@ -34,4 +35,11 @@ public ExecutableAction create(CohereEmbeddingsModel model, Map return new CohereEmbeddingsAction(sender, overriddenModel); } + + @Override + public ExecutableAction create(CohereRerankModel model, Map taskSettings) { + var overriddenModel = CohereRerankModel.of(model, taskSettings); + + return new CohereRerankAction(sender, overriddenModel); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionVisitor.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionVisitor.java index cc732e7ab8dc5..5431308850f36 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionVisitor.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionVisitor.java @@ -10,9 +10,12 @@ import org.elasticsearch.inference.InputType; import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsModel; +import org.elasticsearch.xpack.inference.services.cohere.rerank.CohereRerankModel; import java.util.Map; public interface CohereActionVisitor { ExecutableAction create(CohereEmbeddingsModel model, Map taskSettings, InputType inputType); + + ExecutableAction create(CohereRerankModel model, Map taskSettings); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsAction.java index a49fc85200894..712e242e80560 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsAction.java @@ -12,10 +12,10 @@ import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.external.http.sender.CohereEmbeddingsExecutableRequestCreator; +import org.elasticsearch.xpack.inference.external.http.sender.InferenceInputs; import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsModel; -import java.util.List; import java.util.Objects; import static org.elasticsearch.xpack.inference.external.action.ActionUtils.constructFailedToSendRequestMessage; @@ -38,13 +38,13 @@ public CohereEmbeddingsAction(Sender sender, CohereEmbeddingsModel model) { } @Override - public void execute(List input, ActionListener listener) { + public void execute(InferenceInputs inferenceInputs, ActionListener listener) { try { ActionListener wrappedListener = wrapFailuresInElasticsearchException( failedToSendRequestErrorMessage, listener ); - sender.send(requestCreator, input, wrappedListener); + sender.send(requestCreator, inferenceInputs, wrappedListener); } catch (ElasticsearchException e) { listener.onFailure(e); } catch (Exception e) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereRerankAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereRerankAction.java new file mode 100644 index 0000000000000..7e4edf7c59103 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereRerankAction.java @@ -0,0 +1,54 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.action.cohere; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.http.sender.CohereRerankExecutableRequestCreator; +import org.elasticsearch.xpack.inference.external.http.sender.InferenceInputs; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.services.cohere.rerank.CohereRerankModel; + +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.external.action.ActionUtils.constructFailedToSendRequestMessage; +import static org.elasticsearch.xpack.inference.external.action.ActionUtils.createInternalServerError; +import static org.elasticsearch.xpack.inference.external.action.ActionUtils.wrapFailuresInElasticsearchException; + +public class CohereRerankAction implements ExecutableAction { + private final String failedToSendRequestErrorMessage; + private final Sender sender; + private final CohereRerankExecutableRequestCreator requestCreator; + + public CohereRerankAction(Sender sender, CohereRerankModel model) { + Objects.requireNonNull(model); + this.sender = Objects.requireNonNull(sender); + this.failedToSendRequestErrorMessage = constructFailedToSendRequestMessage( + model.getServiceSettings().getCommonSettings().uri(), + "Cohere rerank" + ); + requestCreator = new CohereRerankExecutableRequestCreator(model); + } + + @Override + public void execute(InferenceInputs inferenceInputs, ActionListener listener) { + try { + ActionListener wrappedListener = wrapFailuresInElasticsearchException( + failedToSendRequestErrorMessage, + listener + ); + sender.send(requestCreator, inferenceInputs, wrappedListener); + } catch (ElasticsearchException e) { + listener.onFailure(e); + } catch (Exception e) { + listener.onFailure(createInternalServerError(e, failedToSendRequestErrorMessage)); + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceAction.java index 928d396c991f8..ca228ed0e906a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceAction.java @@ -13,11 +13,11 @@ import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.external.http.retry.ResponseHandler; import org.elasticsearch.xpack.inference.external.http.sender.HuggingFaceExecutableRequestCreator; +import org.elasticsearch.xpack.inference.external.http.sender.InferenceInputs; import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.services.ServiceComponents; import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceModel; -import java.util.List; import java.util.Objects; import static org.elasticsearch.core.Strings.format; @@ -48,10 +48,10 @@ public HuggingFaceAction( } @Override - public void execute(List input, ActionListener listener) { + public void execute(InferenceInputs inferenceInputs, ActionListener listener) { try { ActionListener wrappedListener = wrapFailuresInElasticsearchException(errorMessage, listener); - sender.send(requestCreator, input, wrappedListener); + sender.send(requestCreator, inferenceInputs, wrappedListener); } catch (ElasticsearchException e) { listener.onFailure(e); } catch (Exception e) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionAction.java index 31fd6a35ef26b..bba51d8e5bd23 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionAction.java @@ -13,12 +13,13 @@ import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; +import org.elasticsearch.xpack.inference.external.http.sender.InferenceInputs; import org.elasticsearch.xpack.inference.external.http.sender.OpenAiCompletionExecutableRequestCreator; import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.services.ServiceComponents; import org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModel; -import java.util.List; import java.util.Objects; import static org.elasticsearch.xpack.inference.external.action.ActionUtils.constructFailedToSendRequestMessage; @@ -41,16 +42,21 @@ public OpenAiChatCompletionAction(Sender sender, OpenAiChatCompletionModel model } @Override - public void execute(List input, ActionListener listener) { - if (input.size() > 1) { - listener.onFailure(new ElasticsearchStatusException("OpenAI completions only accepts 1 input", RestStatus.BAD_REQUEST)); + public void execute(InferenceInputs inferenceInputs, ActionListener listener) { + if (inferenceInputs instanceof DocumentsOnlyInput docsOnlyInput) { + if (docsOnlyInput.getInputs().size() > 1) { + listener.onFailure(new ElasticsearchStatusException("OpenAI completions only accepts 1 input", RestStatus.BAD_REQUEST)); + return; + } + } else { + listener.onFailure(new ElasticsearchStatusException("Invalid inference input type", RestStatus.INTERNAL_SERVER_ERROR)); return; } try { ActionListener wrappedListener = wrapFailuresInElasticsearchException(errorMessage, listener); - sender.send(requestCreator, input, wrappedListener); + sender.send(requestCreator, inferenceInputs, wrappedListener); } catch (ElasticsearchException e) { listener.onFailure(e); } catch (Exception e) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiEmbeddingsAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiEmbeddingsAction.java index d5f083ac8aa90..e9cd81968471d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiEmbeddingsAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiEmbeddingsAction.java @@ -11,12 +11,12 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.http.sender.InferenceInputs; import org.elasticsearch.xpack.inference.external.http.sender.OpenAiEmbeddingsExecutableRequestCreator; import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.services.ServiceComponents; import org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsModel; -import java.util.List; import java.util.Objects; import static org.elasticsearch.xpack.inference.external.action.ActionUtils.constructFailedToSendRequestMessage; @@ -38,11 +38,11 @@ public OpenAiEmbeddingsAction(Sender sender, OpenAiEmbeddingsModel model, Servic } @Override - public void execute(List input, ActionListener listener) { + public void execute(InferenceInputs inferenceInputs, ActionListener listener) { try { ActionListener wrappedListener = wrapFailuresInElasticsearchException(errorMessage, listener); - sender.send(requestCreator, input, wrappedListener); + sender.send(requestCreator, inferenceInputs, wrappedListener); } catch (ElasticsearchException e) { listener.onFailure(e); } catch (Exception e) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/RequestExecutor.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/RequestExecutor.java index 77b4d49d62b9f..7b0287e9652f7 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/RequestExecutor.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/RequestExecutor.java @@ -12,8 +12,8 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.xpack.inference.external.http.sender.ExecutableRequestCreator; +import org.elasticsearch.xpack.inference.external.http.sender.InferenceInputs; -import java.util.List; import java.util.concurrent.TimeUnit; public interface RequestExecutor { @@ -29,7 +29,7 @@ public interface RequestExecutor { void execute( ExecutableRequestCreator requestCreator, - List input, + InferenceInputs inferenceInputs, @Nullable TimeValue timeout, ActionListener listener ); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereEmbeddingsExecutableRequestCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereEmbeddingsExecutableRequestCreator.java index ff4f9847da8a1..6488996d2edc9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereEmbeddingsExecutableRequestCreator.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereEmbeddingsExecutableRequestCreator.java @@ -42,6 +42,7 @@ public CohereEmbeddingsExecutableRequestCreator(CohereEmbeddingsModel model) { @Override public Runnable create( + String query, List input, RequestSender requestSender, Supplier hasRequestCompletedFunction, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereRerankExecutableRequestCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereRerankExecutableRequestCreator.java new file mode 100644 index 0000000000000..432a5334ac001 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereRerankExecutableRequestCreator.java @@ -0,0 +1,56 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.sender; + +import org.apache.http.client.protocol.HttpClientContext; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.xpack.inference.external.cohere.CohereAccount; +import org.elasticsearch.xpack.inference.external.cohere.CohereResponseHandler; +import org.elasticsearch.xpack.inference.external.http.retry.RequestSender; +import org.elasticsearch.xpack.inference.external.http.retry.ResponseHandler; +import org.elasticsearch.xpack.inference.external.request.cohere.CohereRerankRequest; +import org.elasticsearch.xpack.inference.external.response.cohere.CohereRankedResponseEntity; +import org.elasticsearch.xpack.inference.services.cohere.rerank.CohereRerankModel; + +import java.util.List; +import java.util.Objects; +import java.util.function.Supplier; + +public class CohereRerankExecutableRequestCreator implements ExecutableRequestCreator { + private static final Logger logger = LogManager.getLogger(CohereRerankExecutableRequestCreator.class); + private static final ResponseHandler HANDLER = createCohereResponseHandler(); + + private static ResponseHandler createCohereResponseHandler() { + return new CohereResponseHandler("cohere rerank", (request, response) -> CohereRankedResponseEntity.fromResponse(response)); + } + + private final CohereAccount account; + private final CohereRerankModel model; + + public CohereRerankExecutableRequestCreator(CohereRerankModel model) { + this.model = Objects.requireNonNull(model); + account = new CohereAccount(this.model.getServiceSettings().getCommonSettings().uri(), this.model.getSecretSettings().apiKey()); + } + + @Override + public Runnable create( + String query, + List input, + RequestSender requestSender, + Supplier hasRequestCompletedFunction, + HttpClientContext context, + ActionListener listener + ) { + CohereRerankRequest request = new CohereRerankRequest(account, query, input, model); + + return new ExecutableInferenceRequest(requestSender, logger, request, context, HANDLER, hasRequestCompletedFunction, listener); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/DocumentsOnlyInput.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/DocumentsOnlyInput.java new file mode 100644 index 0000000000000..a11be003585fd --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/DocumentsOnlyInput.java @@ -0,0 +1,25 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.sender; + +import java.util.List; +import java.util.Objects; + +public class DocumentsOnlyInput extends InferenceInputs { + + List input; + + public DocumentsOnlyInput(List chunks) { + super(); + this.input = Objects.requireNonNull(chunks); + } + + public List getInputs() { + return this.input; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/ExecutableRequestCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/ExecutableRequestCreator.java index 96455ca4b1cb1..dc279573d5c92 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/ExecutableRequestCreator.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/ExecutableRequestCreator.java @@ -9,6 +9,7 @@ import org.apache.http.client.protocol.HttpClientContext; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.xpack.inference.external.http.retry.RequestSender; @@ -20,6 +21,7 @@ */ public interface ExecutableRequestCreator { Runnable create( + @Nullable String query, List input, RequestSender requestSender, Supplier hasRequestCompletedFunction, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSender.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSender.java index 0131bf2989f6f..cd6658d3d70d7 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSender.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSender.java @@ -147,21 +147,22 @@ public void close() throws IOException { /** * Send a request at some point in the future. The timeout used is retrieved from the settings. - * @param requestCreator a factory for creating a request to be sent to a 3rd party service - * @param input the list of string input to send in the request - * @param timeout the maximum time the request should wait for a response before timing out. If null, the timeout is ignored. - * The queuing logic may still throw a timeout if it fails to send the request because it couldn't get a leased - * @param listener a listener to handle the response + * + * @param requestCreator a factory for creating a request to be sent to a 3rd party service + * @param inferenceInputs the list of string input to send in the request + * @param timeout the maximum time the request should wait for a response before timing out. If null, the timeout is ignored. + * The queuing logic may still throw a timeout if it fails to send the request because it couldn't get a leased + * @param listener a listener to handle the response */ public void send( ExecutableRequestCreator requestCreator, - List input, + InferenceInputs inferenceInputs, @Nullable TimeValue timeout, ActionListener listener ) { assert started.get() : "call start() before sending a request"; waitForStartToComplete(); - service.execute(requestCreator, input, timeout, listener); + service.execute(requestCreator, inferenceInputs, timeout, listener); } private void waitForStartToComplete() { @@ -176,14 +177,19 @@ private void waitForStartToComplete() { /** * Send a request at some point in the future. The timeout used is retrieved from the settings. - * @param requestCreator a factory for creating a request to be sent to a 3rd party service - * @param input the list of string input to send in the request - * @param listener a listener to handle the response + * + * @param requestCreator a factory for creating a request to be sent to a 3rd party service + * @param inferenceInputs the list of string input to send in the request + * @param listener a listener to handle the response */ - public void send(ExecutableRequestCreator requestCreator, List input, ActionListener listener) { + public void send( + ExecutableRequestCreator requestCreator, + InferenceInputs inferenceInputs, + ActionListener listener + ) { assert started.get() : "call start() before sending a request"; waitForStartToComplete(); - service.execute(requestCreator, input, maxRequestTimeout, listener); + service.execute(requestCreator, inferenceInputs, maxRequestTimeout, listener); } public static List> getSettings() { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HuggingFaceExecutableRequestCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HuggingFaceExecutableRequestCreator.java index 62558fe6071ac..7c70f738105d1 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HuggingFaceExecutableRequestCreator.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HuggingFaceExecutableRequestCreator.java @@ -42,6 +42,7 @@ public HuggingFaceExecutableRequestCreator(HuggingFaceModel model, ResponseHandl @Override public Runnable create( + String query, List input, RequestSender requestSender, Supplier hasRequestCompletedFunction, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/InferenceInputs.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/InferenceInputs.java new file mode 100644 index 0000000000000..d7e07e734ce80 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/InferenceInputs.java @@ -0,0 +1,10 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.sender; + +public abstract class InferenceInputs {} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/InferenceRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/InferenceRequest.java index ed77e4b207a94..5d5e8df40c22d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/InferenceRequest.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/InferenceRequest.java @@ -23,6 +23,11 @@ public interface InferenceRequest { */ ExecutableRequestCreator getRequestCreator(); + /** + * Returns the query associated with this request. Used for Rerank tasks. + */ + String getQuery(); + /** * Returns the text input associated with this request. */ diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/NoopTask.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/NoopTask.java index 6cdcd38d224a9..cca00b2e9bf58 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/NoopTask.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/NoopTask.java @@ -20,6 +20,11 @@ public ExecutableRequestCreator getRequestCreator() { return null; } + @Override + public String getQuery() { + return null; + } + @Override public List getInput() { return null; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiCompletionExecutableRequestCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiCompletionExecutableRequestCreator.java index 44ab670843335..853038e1a7ca4 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiCompletionExecutableRequestCreator.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiCompletionExecutableRequestCreator.java @@ -11,6 +11,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.xpack.inference.external.http.retry.RequestSender; import org.elasticsearch.xpack.inference.external.http.retry.ResponseHandler; @@ -45,6 +46,7 @@ public OpenAiCompletionExecutableRequestCreator(OpenAiChatCompletionModel model) @Override public Runnable create( + @Nullable String query, List input, RequestSender requestSender, Supplier hasRequestCompletedFunction, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiEmbeddingsExecutableRequestCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiEmbeddingsExecutableRequestCreator.java index 708e67944441c..8f867c374e2d3 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiEmbeddingsExecutableRequestCreator.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiEmbeddingsExecutableRequestCreator.java @@ -53,6 +53,7 @@ public OpenAiEmbeddingsExecutableRequestCreator(OpenAiEmbeddingsModel model, Tru @Override public Runnable create( + String query, List input, RequestSender requestSender, Supplier hasRequestCompletedFunction, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/QueryAndDocsInputs.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/QueryAndDocsInputs.java new file mode 100644 index 0000000000000..4d24598d67831 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/QueryAndDocsInputs.java @@ -0,0 +1,33 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.sender; + +import java.util.List; +import java.util.Objects; + +public class QueryAndDocsInputs extends InferenceInputs { + + String query; + + public String getQuery() { + return query; + } + + public List getChunks() { + return chunks; + } + + List chunks; + + public QueryAndDocsInputs(String query, List chunks) { + super(); + this.query = Objects.requireNonNull(query); + this.chunks = Objects.requireNonNull(chunks); + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorService.java index ecbaf26ea17f4..0a5ab8f87ef1b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorService.java @@ -258,7 +258,7 @@ public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedE * Execute the request at some point in the future. * * @param requestCreator the http request to send - * @param input the text to perform inference on + * @param inferenceInputs the inputs to send in the request * @param timeout the maximum time to wait for this request to complete (failing or succeeding). Once the time elapses, the * listener::onFailure is called with a {@link org.elasticsearch.ElasticsearchTimeoutException}. * If null, then the request will wait forever @@ -266,13 +266,13 @@ public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedE */ public void execute( ExecutableRequestCreator requestCreator, - List input, + InferenceInputs inferenceInputs, @Nullable TimeValue timeout, ActionListener listener ) { var task = new RequestTask( requestCreator, - input, + inferenceInputs, timeout, threadPool, // TODO when multi-tenancy (as well as batching) is implemented we need to be very careful that we preserve @@ -280,6 +280,10 @@ public void execute( ContextPreservingActionListener.wrapPreservingContext(listener, threadPool.getThreadContext()) ); + completeExecution(task); + } + + private void completeExecution(RequestTask task) { if (isShutdown()) { EsRejectedExecutionException rejected = new EsRejectedExecutionException( format("Failed to enqueue task because the http executor service [%s] has already shutdown", serviceName), diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTask.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTask.java index 970366f7765dd..6628b9ef425e2 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTask.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTask.java @@ -27,19 +27,29 @@ class RequestTask implements RejectableTask { private final AtomicBoolean finished = new AtomicBoolean(); private final ExecutableRequestCreator requestCreator; + private final String query; private final List input; private final ActionListener listener; RequestTask( ExecutableRequestCreator requestCreator, - List input, + InferenceInputs inferenceInputs, @Nullable TimeValue timeout, ThreadPool threadPool, ActionListener listener ) { this.requestCreator = Objects.requireNonNull(requestCreator); - this.input = Objects.requireNonNull(input); this.listener = getListener(Objects.requireNonNull(listener), timeout, Objects.requireNonNull(threadPool)); + + if (inferenceInputs instanceof QueryAndDocsInputs) { + this.query = ((QueryAndDocsInputs) inferenceInputs).getQuery(); + this.input = ((QueryAndDocsInputs) inferenceInputs).getChunks(); + } else if (inferenceInputs instanceof DocumentsOnlyInput) { + this.query = null; + this.input = ((DocumentsOnlyInput) inferenceInputs).getInputs(); + } else { + throw new IllegalArgumentException("Unsupported inference inputs type: " + inferenceInputs.getClass()); + } } private ActionListener getListener( @@ -85,6 +95,11 @@ public List getInput() { return input; } + @Override + public String getQuery() { + return query; + } + @Override public ActionListener getListener() { return listener; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/Sender.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/Sender.java index 0272f4b0e351c..3902a154b2b99 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/Sender.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/Sender.java @@ -13,17 +13,16 @@ import org.elasticsearch.inference.InferenceServiceResults; import java.io.Closeable; -import java.util.List; public interface Sender extends Closeable { void start(); void send( ExecutableRequestCreator requestCreator, - List input, + InferenceInputs inferenceInputs, @Nullable TimeValue timeout, ActionListener listener ); - void send(ExecutableRequestCreator requestCreator, List input, ActionListener listener); + void send(ExecutableRequestCreator requestCreator, InferenceInputs inferenceInputs, ActionListener listener); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/SingleRequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/SingleRequestManager.java index ecd12814d0877..494c77964080f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/SingleRequestManager.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/SingleRequestManager.java @@ -30,6 +30,7 @@ public void execute(InferenceRequest inferenceRequest, HttpClientContext context inferenceRequest.getRequestCreator() .create( + inferenceRequest.getQuery(), inferenceRequest.getInput(), requestSender, inferenceRequest.getRequestCompletedFunction(), diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntity.java index 035bd44ebf405..6e389e8537d27 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntity.java @@ -33,9 +33,7 @@ public record CohereEmbeddingsRequestEntity( private static final String SEARCH_QUERY = "search_query"; private static final String CLUSTERING = "clustering"; private static final String CLASSIFICATION = "classification"; - private static final String TEXTS_FIELD = "texts"; - static final String INPUT_TYPE_FIELD = "input_type"; static final String EMBEDDING_TYPES_FIELD = "embedding_types"; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereRerankRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereRerankRequest.java new file mode 100644 index 0000000000000..b8f3916582bf2 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereRerankRequest.java @@ -0,0 +1,96 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.cohere; + +import org.apache.http.HttpHeaders; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.utils.URIBuilder; +import org.apache.http.entity.ByteArrayEntity; +import org.elasticsearch.common.Strings; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.cohere.CohereAccount; +import org.elasticsearch.xpack.inference.external.request.HttpRequest; +import org.elasticsearch.xpack.inference.external.request.Request; +import org.elasticsearch.xpack.inference.services.cohere.rerank.CohereRerankModel; +import org.elasticsearch.xpack.inference.services.cohere.rerank.CohereRerankTaskSettings; + +import java.net.URI; +import java.net.URISyntaxException; +import java.nio.charset.StandardCharsets; +import java.util.List; +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.external.request.RequestUtils.buildUri; +import static org.elasticsearch.xpack.inference.external.request.RequestUtils.createAuthBearerHeader; + +public class CohereRerankRequest implements Request { + + private final CohereAccount account; + private final String query; + private final List input; + private final URI uri; + private final CohereRerankTaskSettings taskSettings; + private final String model; + private final String inferenceEntityId; + + public CohereRerankRequest(CohereAccount account, String query, List input, CohereRerankModel model) { + Objects.requireNonNull(model); + + this.account = Objects.requireNonNull(account); + this.input = Objects.requireNonNull(input); + this.query = Objects.requireNonNull(query); + uri = buildUri(this.account.url(), "Cohere", CohereRerankRequest::buildDefaultUri); + taskSettings = model.getTaskSettings(); + this.model = model.getServiceSettings().getCommonSettings().modelId(); + inferenceEntityId = model.getInferenceEntityId(); + } + + @Override + public HttpRequest createHttpRequest() { + HttpPost httpPost = new HttpPost(uri); + + ByteArrayEntity byteEntity = new ByteArrayEntity( + Strings.toString(new CohereRerankRequestEntity(query, input, taskSettings, model)).getBytes(StandardCharsets.UTF_8) + ); + httpPost.setEntity(byteEntity); + + httpPost.setHeader(HttpHeaders.CONTENT_TYPE, XContentType.JSON.mediaType()); + httpPost.setHeader(createAuthBearerHeader(account.apiKey())); + httpPost.setHeader(CohereUtils.createRequestSourceHeader()); + + return new HttpRequest(httpPost, getInferenceEntityId()); + } + + @Override + public String getInferenceEntityId() { + return inferenceEntityId; + } + + @Override + public URI getURI() { + return uri; + } + + @Override + public Request truncate() { + return this; // TODO? + } + + @Override + public boolean[] getTruncationInfo() { + return null; + } + + // default for testing + static URI buildDefaultUri() throws URISyntaxException { + return new URIBuilder().setScheme("https") + .setHost(CohereUtils.HOST) + .setPathSegments(CohereUtils.VERSION_1, CohereUtils.RERANK_PATH) + .build(); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereRerankRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereRerankRequestEntity.java new file mode 100644 index 0000000000000..e7abe0990eb0c --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereRerankRequestEntity.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.cohere; + +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.services.cohere.rerank.CohereRerankTaskSettings; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +public record CohereRerankRequestEntity(String model, String query, List documents, CohereRerankTaskSettings taskSettings) + implements + ToXContentObject { + + private static final String DOCUMENTS_FIELD = "documents"; + private static final String QUERY_FIELD = "query"; + private static final String MODEL_FIELD = "model"; + + public CohereRerankRequestEntity { + Objects.requireNonNull(query); + Objects.requireNonNull(documents); + Objects.requireNonNull(taskSettings); + } + + public CohereRerankRequestEntity(String query, List input, CohereRerankTaskSettings taskSettings, String model) { + this(model, query, input, taskSettings); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + builder.field(MODEL_FIELD, model); + builder.field(QUERY_FIELD, query); + builder.field(DOCUMENTS_FIELD, documents); + + if (taskSettings.getDoesReturnDocuments() != null) { + builder.field(CohereRerankTaskSettings.RETURN_DOCUMENTS, taskSettings.getDoesReturnDocuments()); + } + + if (taskSettings.getTopNDocumentsOnly() != null) { + builder.field(CohereRerankTaskSettings.TOP_N_DOCS_ONLY, taskSettings.getTopNDocumentsOnly()); + } + + if (taskSettings.getMaxChunksPerDoc() != null) { + builder.field(CohereRerankTaskSettings.MAX_CHUNKS_PER_DOC, taskSettings.getMaxChunksPerDoc()); + } + + builder.endObject(); + return builder; + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereUtils.java index e54328df1dbf7..e6344f4d17b40 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereUtils.java @@ -14,6 +14,7 @@ public class CohereUtils { public static final String HOST = "api.cohere.ai"; public static final String VERSION_1 = "v1"; public static final String EMBEDDINGS_PATH = "embed"; + public static final String RERANK_PATH = "rerank"; public static final String REQUEST_SOURCE_HEADER = "Request-Source"; public static final String ELASTIC_REQUEST_SOURCE = "unspecified:elasticsearch"; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereEmbeddingsResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereEmbeddingsResponseEntity.java index 9221e5c5deed8..fabd96b543594 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereEmbeddingsResponseEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereEmbeddingsResponseEntity.java @@ -50,7 +50,7 @@ private static String supportedEmbeddingTypes() { } /** - * Parses the OpenAI json response. + * Parses the Cohere embed json response. * For a request like: * *
diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereRankedResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereRankedResponseEntity.java
new file mode 100644
index 0000000000000..8574fb2ba520f
--- /dev/null
+++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereRankedResponseEntity.java
@@ -0,0 +1,151 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ *
+ * this file was contributed to by a generative AI
+ */
+
+package org.elasticsearch.xpack.inference.external.response.cohere;
+
+import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
+import org.elasticsearch.common.xcontent.XContentParserUtils;
+import org.elasticsearch.inference.InferenceServiceResults;
+import org.elasticsearch.xcontent.XContentFactory;
+import org.elasticsearch.xcontent.XContentParser;
+import org.elasticsearch.xcontent.XContentParserConfiguration;
+import org.elasticsearch.xcontent.XContentType;
+import org.elasticsearch.xpack.core.inference.results.RankedDocsResults;
+import org.elasticsearch.xpack.inference.external.http.HttpResult;
+
+import java.io.IOException;
+
+import static org.elasticsearch.common.xcontent.XContentParserUtils.parseList;
+import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownToken;
+import static org.elasticsearch.xpack.inference.external.response.XContentUtils.moveToFirstToken;
+import static org.elasticsearch.xpack.inference.external.response.XContentUtils.positionParserAtTokenAfterField;
+
+public class CohereRankedResponseEntity {
+
+    /**
+     * Parses the Cohere ranked response.
+     *
+     * For a request like:
+     *     "model": "rerank-english-v2.0",
+     *     "query": "What is the capital of the United States?",
+     *     "return_documents": true,
+     *     "top_n": 3,
+     *     "documents": ["Carson City is the capital city of the American state of Nevada.",
+     *                   "The Commonwealth of the Northern Mariana ... Its capital is Saipan.",
+     *                   "Washington, D.C. (also known as simply Washington or D.C., ... It is a federal district.",
+     *                   "Capital punishment (the death penalty) ... As of 2017, capital punishment is legal in 30 of the 50 states."]
+     * 

+ * The response will look like (without whitespace): + * { + * "id": "1983d114-a6e8-4940-b121-eb4ac3f6f703", + * "results": [ + * { + * "document": { + * "text": "Washington, D.C. is the capital of the United States. It is a federal district." + * }, + * "index": 2, + * "relevance_score": 0.98005307 + * }, + * { + * "document": { + * "text": "Capital punishment (the death penalty) As of 2017, capital punishment is legal in 30 of the 50 states." + * }, + * "index": 3, + * "relevance_score": 0.27904198 + * }, + * { + * "document": { + * "text": "Carson City is the capital city of the American state of Nevada." + * }, + * "index": 0, + * "relevance_score": 0.10194652 + * } + * ], + * "meta": { + * "api_version": { + * "version": "1" + * }, + * "billed_units": { + * "search_units": 1 + * } + * } + * + * @param response the http response from cohere + * @return the parsed response + * @throws IOException if there is an error parsing the response + */ + public static InferenceServiceResults fromResponse(HttpResult response) throws IOException { + var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); + + try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) { + moveToFirstToken(jsonParser); + + XContentParser.Token token = jsonParser.currentToken(); + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, token, jsonParser); + + positionParserAtTokenAfterField(jsonParser, "results", FAILED_TO_FIND_FIELD_TEMPLATE); // TODO error message + + token = jsonParser.currentToken(); + if (token == XContentParser.Token.START_ARRAY) { + return new RankedDocsResults(parseList(jsonParser, CohereRankedResponseEntity::parseRankedDocObject)); + } else { + throwUnknownToken(token, jsonParser); + } + + // This should never be reached. The above code should either return successfully or hit the throwUnknownToken + // or throw a parsing exception + throw new IllegalStateException("Reached an invalid state while parsing the Cohere response"); + } + } + + private static RankedDocsResults.RankedDoc parseRankedDocObject(XContentParser parser) throws IOException { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); + Integer index = null; + Float relevanceScore = null; + String documentText = null; + parser.nextToken(); + while (parser.currentToken() != XContentParser.Token.END_OBJECT) { + if (parser.currentToken() == XContentParser.Token.FIELD_NAME) { + switch (parser.currentName()) { + case "index": + parser.nextToken(); // move to VALUE_NUMBER + index = parser.intValue(); + parser.nextToken(); // move to next FIELD_NAME or END_OBJECT + break; + case "relevance_score": + parser.nextToken(); // move to VALUE_NUMBER + relevanceScore = parser.floatValue(); + parser.nextToken(); // move to next FIELD_NAME or END_OBJECT + break; + case "document": + parser.nextToken(); // move to START_OBJECT; document text is wrapped in an object + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); + do { + if (parser.currentToken() == XContentParser.Token.FIELD_NAME && parser.currentName().equals("text")) { + parser.nextToken(); // move to VALUE_STRING + documentText = parser.text(); + } + } while (parser.nextToken() != XContentParser.Token.END_OBJECT); + parser.nextToken();// move past END_OBJECT + // parser should now be at the next FIELD_NAME or END_OBJECT + break; + default: + XContentParserUtils.throwUnknownField(parser.currentName(), parser); + } + } else { + parser.nextToken(); + } + } + return new RankedDocsResults.RankedDoc(String.valueOf(index), String.valueOf(relevanceScore), String.valueOf(documentText)); + } + + private CohereRankedResponseEntity() {} + + static String FAILED_TO_FIND_FIELD_TEMPLATE = "Failed to find required field [%s] in Cohere embeddings response"; +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/SenderService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/SenderService.java index 98b004cd1aa7f..60fc219ba5c66 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/SenderService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/SenderService.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.core.IOUtils; +import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.inference.ChunkingOptions; import org.elasticsearch.inference.InferenceService; @@ -44,19 +45,36 @@ protected ServiceComponents getServiceComponents() { @Override public void infer( Model model, + @Nullable String query, List input, Map taskSettings, InputType inputType, ActionListener listener ) { init(); + if (query != null) { + doInfer(model, query, input, taskSettings, inputType, listener); + } else { + doInfer(model, input, taskSettings, inputType, listener); + } + } - doInfer(model, input, taskSettings, inputType, listener); + public void chunkedInfer( + Model model, + List input, + Map taskSettings, + InputType inputType, + ChunkingOptions chunkingOptions, + ActionListener> listener + ) { + init(); + chunkedInfer(model, null, input, taskSettings, inputType, chunkingOptions, listener); } @Override public void chunkedInfer( Model model, + @Nullable String query, List input, Map taskSettings, InputType inputType, @@ -64,7 +82,7 @@ public void chunkedInfer( ActionListener> listener ) { init(); - doChunkedInfer(model, input, taskSettings, inputType, chunkingOptions, listener); + doChunkedInfer(model, null, input, taskSettings, inputType, chunkingOptions, listener); } protected abstract void doInfer( @@ -75,8 +93,18 @@ protected abstract void doInfer( ActionListener listener ); + protected abstract void doInfer( + Model model, + String query, + List input, + Map taskSettings, + InputType inputType, + ActionListener listener + ); + protected abstract void doChunkedInfer( Model model, + @Nullable String query, List input, Map taskSettings, InputType inputType, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java index 96846f3f71142..11f8c6f53fb7b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java @@ -121,6 +121,10 @@ public static String mustBeNonEmptyString(String settingName, String scope) { return Strings.format("[%s] Invalid value empty string. [%s] must be a non-empty string", scope, settingName); } + public static String mustBeNonNonNull(String settingName, String scope) { + return Strings.format("[%s] Invalid value empty string. [%s] must be non-null", scope, settingName); + } + public static String invalidValue(String settingName, String scope, String invalidType, String[] requiredValues) { var copyOfRequiredValues = requiredValues.clone(); Arrays.sort(copyOfRequiredValues); @@ -232,6 +236,25 @@ public static String extractOptionalString( return optionalField; } + public static Integer extractOptionalPositiveInteger( + Map map, + String settingName, + String scope, + ValidationException validationException + ) { + Integer optionalField = ServiceUtils.removeAsType(map, settingName, Integer.class); + + if (optionalField != null && optionalField <= 0) { + validationException.addValidationError(ServiceUtils.mustBeAPositiveNumberErrorMessage(settingName, optionalField)); + } + + if (validationException.validationErrors().isEmpty() == false) { + return null; + } + + return optionalField; + } + public static > E extractOptionalEnum( Map map, String settingName, @@ -259,6 +282,21 @@ public static > E extractOptionalEnum( return null; } + public static Boolean extractOptionalBoolean( + Map map, + String settingName, + String scope, + ValidationException validationException + ) { + Boolean optionalField = ServiceUtils.removeAsType(map, settingName, Boolean.class); + + if (validationException.validationErrors().isEmpty() == false) { + return null; + } + + return optionalField; + } + private static > void validateEnumValue(E enumValue, EnumSet validValues) { if (validValues.contains(enumValue) == false) { throw new IllegalArgumentException(Strings.format("Enum value [%s] is not one of the acceptable values", enumValue.toString())); @@ -310,27 +348,36 @@ public static ElasticsearchStatusException createInvalidModelException(Model mod public static void getEmbeddingSize(Model model, InferenceService service, ActionListener listener) { assert model.getTaskType() == TaskType.TEXT_EMBEDDING; - service.infer(model, List.of(TEST_EMBEDDING_INPUT), Map.of(), InputType.INGEST, listener.delegateFailureAndWrap((delegate, r) -> { - if (r instanceof TextEmbedding embeddingResults) { - try { - delegate.onResponse(embeddingResults.getFirstEmbeddingSize()); - } catch (Exception e) { - delegate.onFailure(new ElasticsearchStatusException("Could not determine embedding size", RestStatus.BAD_REQUEST, e)); + service.infer( + model, + null, + List.of(TEST_EMBEDDING_INPUT), + Map.of(), + InputType.INGEST, + listener.delegateFailureAndWrap((delegate, r) -> { + if (r instanceof TextEmbedding embeddingResults) { + try { + delegate.onResponse(embeddingResults.getFirstEmbeddingSize()); + } catch (Exception e) { + delegate.onFailure( + new ElasticsearchStatusException("Could not determine embedding size", RestStatus.BAD_REQUEST, e) + ); + } + } else { + delegate.onFailure( + new ElasticsearchStatusException( + "Could not determine embedding size. " + + "Expected a result of type [" + + TextEmbeddingResults.NAME + + "] got [" + + r.getWriteableName() + + "]", + RestStatus.BAD_REQUEST + ) + ); } - } else { - delegate.onFailure( - new ElasticsearchStatusException( - "Could not determine embedding size. " - + "Expected a result of type [" - + TextEmbeddingResults.NAME - + "] got [" - + r.getWriteableName() - + "]", - RestStatus.BAD_REQUEST - ) - ); - } - })); + }) + ); } private static final String TEST_EMBEDDING_INPUT = "how big"; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java index 28ccccecb9627..40f3bcda57a47 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java @@ -29,13 +29,16 @@ import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; import org.elasticsearch.xpack.inference.external.action.cohere.CohereActionCreator; +import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; +import org.elasticsearch.xpack.inference.external.http.sender.QueryAndDocsInputs; import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.SenderService; import org.elasticsearch.xpack.inference.services.ServiceComponents; import org.elasticsearch.xpack.inference.services.ServiceUtils; import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsModel; import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsServiceSettings; +import org.elasticsearch.xpack.inference.services.cohere.rerank.CohereRerankModel; import java.util.List; import java.util.Map; @@ -130,6 +133,7 @@ private static CohereModel createModel( secretSettings, context ); + case RERANK -> new CohereRerankModel(inferenceEntityId, taskType, NAME, serviceSettings, taskSettings, secretSettings, context); default -> throw new ElasticsearchStatusException(failureMessage, RestStatus.BAD_REQUEST); }; } @@ -173,6 +177,7 @@ public CohereModel parsePersistedConfig(String inferenceEntityId, TaskType taskT @Override public void doInfer( Model model, + String query, List input, Map taskSettings, InputType inputType, @@ -187,12 +192,33 @@ public void doInfer( var actionCreator = new CohereActionCreator(getSender(), getServiceComponents()); var action = cohereModel.accept(actionCreator, taskSettings, inputType); - action.execute(input, listener); + action.execute(new QueryAndDocsInputs(query, input), listener); + } + + @Override + public void doInfer( + Model model, + List input, + Map taskSettings, + InputType inputType, + ActionListener listener + ) { + if (model instanceof CohereModel == false) { + listener.onFailure(createInvalidModelException(model)); + return; + } + + CohereModel cohereModel = (CohereModel) model; + var actionCreator = new CohereActionCreator(getSender(), getServiceComponents()); + + var action = cohereModel.accept(actionCreator, taskSettings, inputType); + action.execute(new DocumentsOnlyInput(input), listener); } @Override protected void doChunkedInfer( Model model, + @Nullable String query, List input, Map taskSettings, InputType inputType, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankModel.java new file mode 100644 index 0000000000000..78e0e419c418d --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankModel.java @@ -0,0 +1,94 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.cohere.rerank; + +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.InputType; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ModelSecrets; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.action.cohere.CohereActionVisitor; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; +import org.elasticsearch.xpack.inference.services.cohere.CohereModel; +import org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettings; + +import java.util.Map; + +public class CohereRerankModel extends CohereModel { + public static CohereRerankModel of(CohereRerankModel model, Map taskSettings) { + var requestTaskSettings = CohereRerankTaskSettings.fromMap(taskSettings); + return new CohereRerankModel(model, CohereRerankTaskSettings.of(model.getTaskSettings(), requestTaskSettings)); + } + + public CohereRerankModel( + String modelId, + TaskType taskType, + String service, + Map serviceSettings, + Map taskSettings, + @Nullable Map secrets, + ConfigurationParseContext context + ) { + this( + modelId, + taskType, + service, + CohereRerankServiceSettings.fromMap(serviceSettings, context), + CohereRerankTaskSettings.fromMap(taskSettings), + DefaultSecretSettings.fromMap(secrets) + ); + } + + // should only be used for testing + CohereRerankModel( + String modelId, + TaskType taskType, + String service, + CohereRerankServiceSettings serviceSettings, + CohereRerankTaskSettings taskSettings, + @Nullable DefaultSecretSettings secretSettings + ) { + super(new ModelConfigurations(modelId, taskType, service, serviceSettings, taskSettings), new ModelSecrets(secretSettings)); + } + + private CohereRerankModel(CohereRerankModel model, CohereRerankTaskSettings taskSettings) { + super(model, taskSettings); + } + + public CohereRerankModel(CohereRerankModel model, CohereRerankServiceSettings serviceSettings) { + super(model, serviceSettings); + } + + @Override + public CohereRerankServiceSettings getServiceSettings() { + return (CohereRerankServiceSettings) super.getServiceSettings(); + } + + @Override + public CohereRerankTaskSettings getTaskSettings() { + return (CohereRerankTaskSettings) super.getTaskSettings(); + } + + @Override + public DefaultSecretSettings getSecretSettings() { + return (DefaultSecretSettings) super.getSecretSettings(); + } + + /** + * Accepts a visitor to create an executable action. The returned action will not return documents in the response. + * @param visitor _ + * @param taskSettings _ + * @param inputType ignored for rerank task + * @return the rerank action + */ + @Override + public ExecutableAction accept(CohereActionVisitor visitor, Map taskSettings, InputType inputType) { + return visitor.create(this, taskSettings); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankServiceSettings.java new file mode 100644 index 0000000000000..a14ffb7ef8216 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankServiceSettings.java @@ -0,0 +1,95 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.cohere.rerank; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.inference.ServiceSettings; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; +import org.elasticsearch.xpack.inference.services.cohere.CohereServiceSettings; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +public class CohereRerankServiceSettings implements ServiceSettings { + public static final String NAME = "cohere_rerank_service_settings"; + + public static CohereRerankServiceSettings fromMap(Map map, ConfigurationParseContext parseContext) { + ValidationException validationException = new ValidationException(); + var commonServiceSettings = CohereServiceSettings.fromMap(map, parseContext); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return new CohereRerankServiceSettings(commonServiceSettings); + } + + private final CohereServiceSettings commonSettings; + + public CohereRerankServiceSettings(CohereServiceSettings commonSettings) { + this.commonSettings = commonSettings; + } + + public CohereRerankServiceSettings(StreamInput in) throws IOException { + commonSettings = new CohereServiceSettings(in); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + commonSettings.toXContentFragment(builder); + + builder.endObject(); + return builder; + } + + @Override + public ToXContentObject getFilteredXContentObject() { + return this; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ML_INFERENCE_COHERE_RERANK; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + commonSettings.writeTo(out); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CohereRerankServiceSettings that = (CohereRerankServiceSettings) o; + return Objects.equals(commonSettings, that.commonSettings); + } + + @Override + public int hashCode() { + return Objects.hash(commonSettings); + } + + public CohereServiceSettings getCommonSettings() { + return commonSettings; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankTaskSettings.java new file mode 100644 index 0000000000000..75588aa2b5036 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankTaskSettings.java @@ -0,0 +1,184 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.cohere.rerank; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.InputType; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.TaskSettings; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalBoolean; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalPositiveInteger; + +/** + * Defines the task settings for the cohere rerank service. + * + *

+ * See api docs for details. + *

+ */ +public class CohereRerankTaskSettings implements TaskSettings { + + public static final String NAME = "cohere_rerank_task_settings"; + public static final String RETURN_DOCUMENTS = "return_documents"; + public static final String TOP_N_DOCS_ONLY = "top_n"; + public static final String MAX_CHUNKS_PER_DOC = "max_chunks_per_doc"; + + static final CohereRerankTaskSettings EMPTY_SETTINGS = new CohereRerankTaskSettings(null, null, null); + + public static CohereRerankTaskSettings fromMap(Map map) { + ValidationException validationException = new ValidationException(); + + if (map == null || map.isEmpty()) { + return EMPTY_SETTINGS; + } + + Boolean returnDocuments = extractOptionalBoolean(map, RETURN_DOCUMENTS, ModelConfigurations.TASK_SETTINGS, validationException); + Integer topNDocumentsOnly = extractOptionalPositiveInteger( + map, + TOP_N_DOCS_ONLY, + ModelConfigurations.TASK_SETTINGS, + validationException + ); + Integer maxChunksPerDoc = extractOptionalPositiveInteger( + map, + MAX_CHUNKS_PER_DOC, + ModelConfigurations.TASK_SETTINGS, + validationException + ); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return of(topNDocumentsOnly, returnDocuments, maxChunksPerDoc); + } + + /** + * Creates a new {@link CohereRerankTaskSettings} by preferring non-null fields from the request settings over the original settings. + * + * @param originalSettings the settings stored as part of the inference entity configuration + * @param requestTaskSettings the settings passed in within the task_settings field of the request + * @return a constructed {@link CohereRerankTaskSettings} + */ + public static CohereRerankTaskSettings of(CohereRerankTaskSettings originalSettings, CohereRerankTaskSettings requestTaskSettings) { + return new CohereRerankTaskSettings( + requestTaskSettings.getTopNDocumentsOnly() != null + ? requestTaskSettings.getTopNDocumentsOnly() + : originalSettings.getTopNDocumentsOnly(), + requestTaskSettings.getReturnDocuments() != null + ? requestTaskSettings.getReturnDocuments() + : originalSettings.getReturnDocuments(), + requestTaskSettings.getMaxChunksPerDoc() != null + ? requestTaskSettings.getMaxChunksPerDoc() + : originalSettings.getMaxChunksPerDoc() + ); + } + + public static CohereRerankTaskSettings of(Integer topNDocumentsOnly, Boolean returnDocuments, Integer maxChunksPerDoc) { + return new CohereRerankTaskSettings(topNDocumentsOnly, returnDocuments, maxChunksPerDoc); + } + + private final Integer topNDocumentsOnly; + private final Boolean returnDocuments; + private final Integer maxChunksPerDoc; + + public CohereRerankTaskSettings(StreamInput in) throws IOException { + this(in.readOptionalInt(), in.readOptionalBoolean(), in.readOptionalInt()); + } + + public CohereRerankTaskSettings( + @Nullable Integer topNDocumentsOnly, + @Nullable Boolean doReturnDocuments, + @Nullable Integer maxChunksPerDoc + ) { + this.topNDocumentsOnly = topNDocumentsOnly; + this.returnDocuments = doReturnDocuments; + this.maxChunksPerDoc = maxChunksPerDoc; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + if (topNDocumentsOnly != null) { + builder.field(TOP_N_DOCS_ONLY, topNDocumentsOnly); + } + if (returnDocuments != null) { + builder.field(RETURN_DOCUMENTS, returnDocuments); + } + if (maxChunksPerDoc != null) { + builder.field(MAX_CHUNKS_PER_DOC, maxChunksPerDoc); + } + builder.endObject(); + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ML_INFERENCE_COHERE_RERANK; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalInt(topNDocumentsOnly); + out.writeOptionalBoolean(returnDocuments); + out.writeOptionalInt(maxChunksPerDoc); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CohereRerankTaskSettings that = (CohereRerankTaskSettings) o; + return Objects.equals(returnDocuments, that.returnDocuments) + && Objects.equals(topNDocumentsOnly, that.topNDocumentsOnly) + && Objects.equals(maxChunksPerDoc, that.maxChunksPerDoc); + } + + @Override + public int hashCode() { + return Objects.hash(returnDocuments, topNDocumentsOnly, maxChunksPerDoc); + } + + public static String invalidInputTypeMessage(InputType inputType) { + return Strings.format("received invalid input type value [%s]", inputType.toString()); + } + + public Boolean getDoesReturnDocuments() { + return returnDocuments; + } + + public Integer getTopNDocumentsOnly() { + return topNDocumentsOnly; + } + + public Boolean getReturnDocuments() { + return returnDocuments; + } + + public Integer getMaxChunksPerDoc() { + return maxChunksPerDoc; + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalServiceSettings.java index ee22d51914b15..86ac5bbaaa272 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalServiceSettings.java @@ -46,7 +46,8 @@ public static Builder fromMap(Map map) { validateParameters(numAllocations, validationException, numThreads); - String modelId = ServiceUtils.extractRequiredString(map, MODEL_ID, "ServiceSettings", validationException); + String modelId = ServiceUtils.extractRequiredString(map, MODEL_ID, "ServiceSettings", validationException); // TODO check if this is + // the correct scope if (validationException.validationErrors().isEmpty() == false) { throw validationException; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java index a9dc487bfca8a..3f91bcfe648e5 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java @@ -14,6 +14,7 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.internal.OriginSettingClient; +import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.inference.ChunkingOptions; @@ -210,6 +211,7 @@ public ElasticsearchModel parsePersistedConfig(String inferenceEntityId, TaskTyp @Override public void infer( Model model, + @Nullable String query, List input, Map taskSettings, InputType inputType, @@ -236,9 +238,22 @@ public void infer( ); } + public void chunkedInfer( + Model model, + List input, + Map taskSettings, + InputType inputType, + ChunkingOptions chunkingOptions, + ActionListener> listener + ) { + + chunkedInfer(model, null, input, taskSettings, inputType, chunkingOptions, listener); + } + @Override public void chunkedInfer( Model model, + @Nullable String query, List input, Map taskSettings, InputType inputType, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalService.java index bb88193612ff4..d6323635f0cec 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalService.java @@ -256,6 +256,7 @@ public void stop(String inferenceEntityId, ActionListener listener) { @Override public void infer( Model model, + @Nullable String query, List input, Map taskSettings, InputType inputType, @@ -283,9 +284,21 @@ public void infer( ); } + public void chunkedInfer( + Model model, + List input, + Map taskSettings, + InputType inputType, + @Nullable ChunkingOptions chunkingOptions, + ActionListener> listener + ) { + chunkedInfer(model, null, input, taskSettings, inputType, chunkingOptions, listener); + } + @Override public void chunkedInfer( Model model, + @Nullable String query, List input, Map taskSettings, InputType inputType, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseService.java index 7075ff63d60a7..1225c471ec9c3 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseService.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.inference.services.huggingface; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.inference.ChunkingOptions; import org.elasticsearch.inference.InferenceServiceResults; @@ -23,6 +24,7 @@ import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; import org.elasticsearch.xpack.inference.external.action.huggingface.HuggingFaceActionCreator; +import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; import org.elasticsearch.xpack.inference.services.SenderService; import org.elasticsearch.xpack.inference.services.ServiceComponents; @@ -122,12 +124,25 @@ public void doInfer( var actionCreator = new HuggingFaceActionCreator(getSender(), getServiceComponents()); var action = huggingFaceModel.accept(actionCreator); - action.execute(input, listener); + action.execute(new DocumentsOnlyInput(input), listener); + } + + @Override + protected void doInfer( + Model model, + String query, + List input, + Map taskSettings, + InputType inputType, + ActionListener listener + ) { + throw new UnsupportedOperationException("Hugging Face service does not support inference with query input"); } @Override protected void doChunkedInfer( Model model, + @Nullable String query, List input, Map taskSettings, InputType inputType, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java index 1a47f00519cb6..e30d6f2eae592 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java @@ -28,6 +28,7 @@ import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; import org.elasticsearch.xpack.inference.external.action.openai.OpenAiActionCreator; +import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.SenderService; @@ -202,12 +203,25 @@ public void doInfer( var actionCreator = new OpenAiActionCreator(getSender(), getServiceComponents()); var action = openAiModel.accept(actionCreator, taskSettings); - action.execute(input, listener); + action.execute(new DocumentsOnlyInput(input), listener); + } + + @Override + protected void doInfer( + Model model, + String query, + List input, + Map taskSettings, + InputType inputType, + ActionListener listener + ) { + throw new UnsupportedOperationException("OpenAI service does not support inference with query input"); } @Override protected void doChunkedInfer( Model model, + @Nullable String query, List input, Map taskSettings, InputType inputType, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionCreatorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionCreatorTests.java index 66ef9910a2649..3b3b1539367ff 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionCreatorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionCreatorTests.java @@ -19,6 +19,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; +import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import org.elasticsearch.xpack.inference.services.cohere.CohereTruncation; @@ -113,7 +114,7 @@ public void testCreate_CohereEmbeddingsModel() throws IOException { var action = actionCreator.create(model, overriddenTaskSettings, InputType.UNSPECIFIED); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var result = listener.actionGet(TIMEOUT); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsActionTests.java index b504744bfe5f3..b5220fbc0960e 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsActionTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests; import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.external.request.cohere.CohereUtils; @@ -118,7 +119,7 @@ public void testExecute_ReturnsSuccessfulResponse() throws IOException { ); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var result = listener.actionGet(TIMEOUT); @@ -199,7 +200,7 @@ public void testExecute_ReturnsSuccessfulResponse_ForInt8ResponseType() throws I ); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var result = listener.actionGet(TIMEOUT); @@ -257,7 +258,7 @@ public void testExecute_ThrowsElasticsearchException() { var action = createAction(getUrl(webServer), "secret", CohereEmbeddingsTaskSettings.EMPTY_SETTINGS, null, null, sender); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); @@ -278,7 +279,7 @@ public void testExecute_ThrowsElasticsearchException_WhenSenderOnFailureIsCalled var action = createAction(getUrl(webServer), "secret", CohereEmbeddingsTaskSettings.EMPTY_SETTINGS, null, null, sender); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); @@ -302,7 +303,7 @@ public void testExecute_ThrowsElasticsearchException_WhenSenderOnFailureIsCalled var action = createAction(null, "secret", CohereEmbeddingsTaskSettings.EMPTY_SETTINGS, null, null, sender); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); @@ -316,7 +317,7 @@ public void testExecute_ThrowsException() { var action = createAction(getUrl(webServer), "secret", CohereEmbeddingsTaskSettings.EMPTY_SETTINGS, null, null, sender); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); @@ -333,7 +334,7 @@ public void testExecute_ThrowsExceptionWithNullUrl() { var action = createAction(null, "secret", CohereEmbeddingsTaskSettings.EMPTY_SETTINGS, null, null, sender); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionCreatorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionCreatorTests.java index 6334c669d0c1f..5351d1db833e3 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionCreatorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionCreatorTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.common.TruncatorTests; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; +import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import org.elasticsearch.xpack.inference.results.SparseEmbeddingResultsTests; @@ -90,7 +91,7 @@ public void testExecute_ReturnsSuccessfulResponse_ForElserAction() throws IOExce var action = actionCreator.create(model); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var result = listener.actionGet(TIMEOUT); @@ -157,7 +158,7 @@ public void testSend_FailsFromInvalidResponseFormat_ForElserAction() throws IOEx var action = actionCreator.create(model); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); assertThat( @@ -205,7 +206,7 @@ public void testExecute_ReturnsSuccessfulResponse_ForEmbeddingsAction() throws I var action = actionCreator.create(model); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var result = listener.actionGet(TIMEOUT); @@ -263,7 +264,7 @@ public void testSend_FailsFromInvalidResponseFormat_ForEmbeddingsAction() throws var action = actionCreator.create(model); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); assertThat( @@ -318,7 +319,7 @@ public void testExecute_ReturnsSuccessfulResponse_AfterTruncating() throws IOExc var action = actionCreator.create(model); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abcd"), listener); + action.execute(new DocumentsOnlyInput(List.of("abcd")), listener); var result = listener.actionGet(TIMEOUT); @@ -376,7 +377,7 @@ public void testExecute_TruncatesInputBeforeSending() throws IOException { var action = actionCreator.create(model); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("123456"), listener); + action.execute(new DocumentsOnlyInput(List.of("123456")), listener); var result = listener.actionGet(TIMEOUT); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionTests.java index 7b332e8c6634d..0faee28d1af7d 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.inference.common.TruncatorTests; import org.elasticsearch.xpack.inference.external.http.retry.AlwaysRetryingResponseHandler; +import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import org.elasticsearch.xpack.inference.services.ServiceComponents; @@ -58,7 +59,7 @@ public void testExecute_ThrowsElasticsearchException_WhenSenderThrows() { var action = createAction(URL, sender); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); @@ -79,7 +80,7 @@ public void testExecute_ThrowsElasticsearchException_WhenSenderOnFailureIsCalled var action = createAction(URL, sender, "inferenceEntityId"); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); @@ -96,7 +97,7 @@ public void testExecute_ThrowsException() { var action = createAction(URL, sender, "inferenceEntityId"); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreatorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreatorTests.java index 9b14cf259522c..a1cc0321cb74e 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreatorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreatorTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; +import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import org.junit.After; @@ -103,7 +104,7 @@ public void testCreate_OpenAiEmbeddingsModel() throws IOException { var action = actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var result = listener.actionGet(TIMEOUT); @@ -156,7 +157,7 @@ public void testCreate_OpenAiEmbeddingsModel_WithoutUser() throws IOException { var action = actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var result = listener.actionGet(TIMEOUT); @@ -208,7 +209,7 @@ public void testCreate_OpenAiEmbeddingsModel_WithoutOrganization() throws IOExce var action = actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var result = listener.actionGet(TIMEOUT); @@ -267,7 +268,7 @@ public void testCreate_OpenAiEmbeddingsModel_FailsFromInvalidResponseFormat() th var action = actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); assertThat(thrownException.getMessage(), is(format("Failed to send OpenAI embeddings request to [%s]", getUrl(webServer)))); @@ -327,7 +328,7 @@ public void testCreate_OpenAiChatCompletionModel() throws IOException { var action = actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var result = listener.actionGet(TIMEOUT); @@ -390,7 +391,7 @@ public void testCreate_OpenAiChatCompletionModel_WithoutUser() throws IOExceptio var action = actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var result = listener.actionGet(TIMEOUT); @@ -452,7 +453,7 @@ public void testCreate_OpenAiChatCompletionModel_WithoutOrganization() throws IO var action = actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var result = listener.actionGet(TIMEOUT); @@ -520,7 +521,7 @@ public void testCreate_OpenAiChatCompletionModel_FailsFromInvalidResponseFormat( var action = actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); assertThat( @@ -597,7 +598,7 @@ public void testExecute_ReturnsSuccessfulResponse_AfterTruncating_From413StatusC var action = actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abcd"), listener); + action.execute(new DocumentsOnlyInput(List.of("abcd")), listener); var result = listener.actionGet(TIMEOUT); @@ -680,7 +681,7 @@ public void testExecute_ReturnsSuccessfulResponse_AfterTruncating_From400StatusC var action = actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abcd"), listener); + action.execute(new DocumentsOnlyInput(List.of("abcd")), listener); var result = listener.actionGet(TIMEOUT); @@ -748,7 +749,7 @@ public void testExecute_TruncatesInputBeforeSending() throws IOException { var action = actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("super long input"), listener); + action.execute(new DocumentsOnlyInput(List.of("super long input")), listener); var result = listener.actionGet(TIMEOUT); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionActionTests.java index 15998469d08d0..ff8cdeab61ad3 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionActionTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.inference.results.ChatCompletionResults; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; +import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests; import org.elasticsearch.xpack.inference.external.http.sender.Sender; @@ -112,7 +113,7 @@ public void testExecute_ReturnsSuccessfulResponse() throws IOException { var action = createAction(getUrl(webServer), "org", "secret", "model", "user", sender); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var result = listener.actionGet(TIMEOUT); @@ -152,7 +153,7 @@ public void testExecute_ThrowsElasticsearchException() { var action = createAction(getUrl(webServer), "org", "secret", "model", "user", sender); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); @@ -173,7 +174,7 @@ public void testExecute_ThrowsElasticsearchException_WhenSenderOnFailureIsCalled var action = createAction(getUrl(webServer), "org", "secret", "model", "user", sender); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); @@ -194,7 +195,7 @@ public void testExecute_ThrowsElasticsearchException_WhenSenderOnFailureIsCalled var action = createAction(null, "org", "secret", "model", "user", sender); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); @@ -208,7 +209,7 @@ public void testExecute_ThrowsException() { var action = createAction(getUrl(webServer), "org", "secret", "model", "user", sender); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); @@ -222,7 +223,7 @@ public void testExecute_ThrowsExceptionWithNullUrl() { var action = createAction(null, "org", "secret", "model", "user", sender); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); @@ -266,7 +267,7 @@ public void testExecute_ThrowsException_WhenInputIsGreaterThanOne() throws IOExc var action = createAction(getUrl(webServer), "org", "secret", "model", "user", sender); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc", "def"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc", "def")), listener); var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiEmbeddingsActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiEmbeddingsActionTests.java index c803121e6da79..6c83eaf96201a 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiEmbeddingsActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiEmbeddingsActionTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; +import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; @@ -104,7 +105,7 @@ public void testExecute_ReturnsSuccessfulResponse() throws IOException { var action = createAction(getUrl(webServer), "org", "secret", "model", "user", sender); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var result = listener.actionGet(TIMEOUT); @@ -140,7 +141,7 @@ public void testExecute_ThrowsElasticsearchException() { var action = createAction(getUrl(webServer), "org", "secret", "model", "user", sender); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); @@ -161,7 +162,7 @@ public void testExecute_ThrowsElasticsearchException_WhenSenderOnFailureIsCalled var action = createAction(getUrl(webServer), "org", "secret", "model", "user", sender); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); @@ -182,7 +183,7 @@ public void testExecute_ThrowsElasticsearchException_WhenSenderOnFailureIsCalled var action = createAction(null, "org", "secret", "model", "user", sender); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); @@ -196,7 +197,7 @@ public void testExecute_ThrowsException() { var action = createAction(getUrl(webServer), "org", "secret", "model", "user", sender); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); @@ -210,7 +211,7 @@ public void testExecute_ThrowsExceptionWithNullUrl() { var action = createAction(null, "org", "secret", "model", "user", sender); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); + action.execute(new DocumentsOnlyInput(List.of("abc")), listener); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/ExecutableRequestCreatorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/ExecutableRequestCreatorTests.java index b4e770141939b..9a85b00c01485 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/ExecutableRequestCreatorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/ExecutableRequestCreatorTests.java @@ -24,7 +24,7 @@ public class ExecutableRequestCreatorTests { public static ExecutableRequestCreator createMock() { var mockCreator = mock(ExecutableRequestCreator.class); - when(mockCreator.create(anyList(), any(), any(), any(), any())).thenReturn(() -> {}); + when(mockCreator.create(any(), anyList(), any(), any(), any(), any())).thenReturn(() -> {}); return mockCreator; } @@ -38,7 +38,7 @@ public static ExecutableRequestCreator createMock(RequestSender requestSender, S doAnswer(invocation -> { @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocation.getArguments()[4]; + ActionListener listener = (ActionListener) invocation.getArguments()[5]; return (Runnable) () -> requestSender.send( mock(Logger.class), RequestTests.mockRequest(modelId), @@ -47,7 +47,7 @@ public static ExecutableRequestCreator createMock(RequestSender requestSender, S mock(ResponseHandler.class), listener ); - }).when(mockCreator).create(anyList(), any(), any(), any(), any()); + }).when(mockCreator).create(any(), anyList(), any(), any(), any(), any()); return mockCreator; } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderTests.java index 79b17f8dff29d..829a6f981db4c 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderTests.java @@ -107,7 +107,7 @@ public void testCreateSender_SendsRequestAndReceivesResponse() throws Exception PlainActionFuture listener = new PlainActionFuture<>(); sender.send( OpenAiEmbeddingsExecutableRequestCreatorTests.makeCreator(getUrl(webServer), null, "key", "model", null), - List.of("abc"), + new DocumentsOnlyInput(List.of("abc")), listener ); @@ -138,7 +138,7 @@ public void testHttpRequestSender_Throws_WhenCallingSendBeforeStart() throws Exc PlainActionFuture listener = new PlainActionFuture<>(); var thrownException = expectThrows( AssertionError.class, - () -> sender.send(ExecutableRequestCreatorTests.createMock(), List.of(), listener) + () -> sender.send(ExecutableRequestCreatorTests.createMock(), new DocumentsOnlyInput(List.of()), listener) ); assertThat(thrownException.getMessage(), is("call start() before sending a request")); } @@ -162,7 +162,12 @@ public void testHttpRequestSender_Throws_WhenATimeoutOccurs() throws Exception { sender.start(); PlainActionFuture listener = new PlainActionFuture<>(); - sender.send(ExecutableRequestCreatorTests.createMock(), List.of(), TimeValue.timeValueNanos(1), listener); + sender.send( + ExecutableRequestCreatorTests.createMock(), + new DocumentsOnlyInput(List.of()), + TimeValue.timeValueNanos(1), + listener + ); var thrownException = expectThrows(ElasticsearchTimeoutException.class, () -> listener.actionGet(TIMEOUT)); @@ -187,7 +192,12 @@ public void testHttpRequestSenderWithTimeout_Throws_WhenATimeoutOccurs() throws sender.start(); PlainActionFuture listener = new PlainActionFuture<>(); - sender.send(ExecutableRequestCreatorTests.createMock(), List.of(), TimeValue.timeValueNanos(1), listener); + sender.send( + ExecutableRequestCreatorTests.createMock(), + new DocumentsOnlyInput(List.of()), + TimeValue.timeValueNanos(1), + listener + ); var thrownException = expectThrows(ElasticsearchTimeoutException.class, () -> listener.actionGet(TIMEOUT)); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceTests.java index 5e88c3f1bb8f5..24a261dfe47c4 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceTests.java @@ -70,7 +70,7 @@ public void testQueueSize_IsEmpty() { public void testQueueSize_IsOne() { var service = createRequestExecutorServiceWithMocks(); - service.execute(ExecutableRequestCreatorTests.createMock(), List.of(), null, new PlainActionFuture<>()); + service.execute(ExecutableRequestCreatorTests.createMock(), new DocumentsOnlyInput(List.of()), null, new PlainActionFuture<>()); assertThat(service.queueSize(), is(1)); } @@ -110,7 +110,7 @@ public void testIsTerminated_AfterStopFromSeparateThread() throws Exception { PlainActionFuture listener = new PlainActionFuture<>(); service.execute( OpenAiEmbeddingsExecutableRequestCreatorTests.makeCreator("url", null, "key", "id", null), - List.of(), + new DocumentsOnlyInput(List.of()), null, listener ); @@ -133,7 +133,7 @@ public void testSend_AfterShutdown_Throws() { service.shutdown(); var listener = new PlainActionFuture(); - service.execute(ExecutableRequestCreatorTests.createMock(), List.of(), null, listener); + service.execute(ExecutableRequestCreatorTests.createMock(), new DocumentsOnlyInput(List.of()), null, listener); var thrownException = expectThrows(EsRejectedExecutionException.class, () -> listener.actionGet(TIMEOUT)); @@ -149,13 +149,13 @@ public void testSend_Throws_WhenQueueIsFull() { "test_service", threadPool, null, - RequestExecutorServiceSettingsTests.createRequestExecutorServiceSettings(1), + createRequestExecutorServiceSettings(1), new SingleRequestManager(mock(RetryingHttpSender.class)) ); - service.execute(ExecutableRequestCreatorTests.createMock(), List.of(), null, new PlainActionFuture<>()); + service.execute(ExecutableRequestCreatorTests.createMock(), new DocumentsOnlyInput(List.of()), null, new PlainActionFuture<>()); var listener = new PlainActionFuture(); - service.execute(ExecutableRequestCreatorTests.createMock(), List.of(), null, listener); + service.execute(ExecutableRequestCreatorTests.createMock(), new DocumentsOnlyInput(List.of()), null, listener); var thrownException = expectThrows(EsRejectedExecutionException.class, () -> listener.actionGet(TIMEOUT)); @@ -180,7 +180,7 @@ public void testTaskThrowsError_CallsOnFailure() { service.execute( OpenAiEmbeddingsExecutableRequestCreatorTests.makeCreator("url", null, "key", "id", null), - List.of(), + new DocumentsOnlyInput(List.of()), null, listener ); @@ -207,7 +207,12 @@ public void testSend_CallsOnFailure_WhenRequestTimesOut() { var service = createRequestExecutorServiceWithMocks(); var listener = new PlainActionFuture(); - service.execute(ExecutableRequestCreatorTests.createMock(), List.of(), TimeValue.timeValueNanos(1), listener); + service.execute( + ExecutableRequestCreatorTests.createMock(), + new DocumentsOnlyInput(List.of()), + TimeValue.timeValueNanos(1), + listener + ); var thrownException = expectThrows(ElasticsearchTimeoutException.class, () -> listener.actionGet(TIMEOUT)); @@ -265,7 +270,7 @@ public void onFailure(Exception e) { } }; - service.execute(ExecutableRequestCreatorTests.createMock(requestSender), List.of(), null, listener); + service.execute(ExecutableRequestCreatorTests.createMock(requestSender), new DocumentsOnlyInput(List.of()), null, listener); Future executorTermination = submitShutdownRequest(waitToShutdown, waitToReturnFromSend, service); @@ -279,7 +284,7 @@ public void testSend_NotifiesTasksOfShutdown() { var service = createRequestExecutorServiceWithMocks(); var listener = new PlainActionFuture(); - service.execute(ExecutableRequestCreatorTests.createMock(), List.of(), null, listener); + service.execute(ExecutableRequestCreatorTests.createMock(), new DocumentsOnlyInput(List.of()), null, listener); service.shutdown(); service.start(); @@ -387,11 +392,16 @@ public void testChangingCapacity_SetsCapacityToTwo() throws ExecutionException, var settings = createRequestExecutorServiceSettings(1); var service = new RequestExecutorService("test_service", threadPool, null, settings, new SingleRequestManager(requestSender)); - service.execute(ExecutableRequestCreatorTests.createMock(requestSender), List.of(), null, new PlainActionFuture<>()); + service.execute( + ExecutableRequestCreatorTests.createMock(requestSender), + new DocumentsOnlyInput(List.of()), + null, + new PlainActionFuture<>() + ); assertThat(service.queueSize(), is(1)); PlainActionFuture listener = new PlainActionFuture<>(); - service.execute(ExecutableRequestCreatorTests.createMock(requestSender), List.of(), null, listener); + service.execute(ExecutableRequestCreatorTests.createMock(requestSender), new DocumentsOnlyInput(List.of()), null, listener); var thrownException = expectThrows(EsRejectedExecutionException.class, () -> listener.actionGet(TIMEOUT)); assertThat( @@ -426,11 +436,21 @@ public void testChangingCapacity_DoesNotRejectsOverflowTasks_BecauseOfQueueFull( var settings = createRequestExecutorServiceSettings(3); var service = new RequestExecutorService("test_service", threadPool, null, settings, new SingleRequestManager(requestSender)); - service.execute(ExecutableRequestCreatorTests.createMock(requestSender), List.of(), null, new PlainActionFuture<>()); - service.execute(ExecutableRequestCreatorTests.createMock(requestSender), List.of(), null, new PlainActionFuture<>()); + service.execute( + ExecutableRequestCreatorTests.createMock(requestSender), + new DocumentsOnlyInput(List.of()), + null, + new PlainActionFuture<>() + ); + service.execute( + ExecutableRequestCreatorTests.createMock(requestSender), + new DocumentsOnlyInput(List.of()), + null, + new PlainActionFuture<>() + ); PlainActionFuture listener = new PlainActionFuture<>(); - service.execute(ExecutableRequestCreatorTests.createMock(requestSender), List.of(), null, listener); + service.execute(ExecutableRequestCreatorTests.createMock(requestSender), new DocumentsOnlyInput(List.of()), null, listener); assertThat(service.queueSize(), is(3)); settings.setQueueCapacity(1); @@ -471,11 +491,16 @@ public void testChangingCapacity_ToZero_SetsQueueCapacityToUnbounded() throws IO var settings = createRequestExecutorServiceSettings(1); var service = new RequestExecutorService("test_service", threadPool, null, settings, new SingleRequestManager(requestSender)); - service.execute(ExecutableRequestCreatorTests.createMock(requestSender), List.of(), null, new PlainActionFuture<>()); + service.execute( + ExecutableRequestCreatorTests.createMock(requestSender), + new DocumentsOnlyInput(List.of()), + null, + new PlainActionFuture<>() + ); assertThat(service.queueSize(), is(1)); PlainActionFuture listener = new PlainActionFuture<>(); - service.execute(ExecutableRequestCreatorTests.createMock(requestSender), List.of(), null, listener); + service.execute(ExecutableRequestCreatorTests.createMock(requestSender), new DocumentsOnlyInput(List.of()), null, listener); var thrownException = expectThrows(EsRejectedExecutionException.class, () -> listener.actionGet(TIMEOUT)); assertThat( diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTaskTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTaskTests.java index 5c35d8ce49b60..14a7e28eb84db 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTaskTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTaskTests.java @@ -60,7 +60,7 @@ public void testExecuting_DoesNotCallOnFailureForTimeout_AfterIllegalArgumentExc var requestTask = new RequestTask( OpenAiEmbeddingsExecutableRequestCreatorTests.makeCreator("url", null, "key", "model", null, "id"), - List.of("abc"), + new DocumentsOnlyInput(List.of("abc")), TimeValue.timeValueMillis(1), mockThreadPool, listener @@ -80,7 +80,7 @@ public void testRequest_ReturnsTimeoutException() { PlainActionFuture listener = new PlainActionFuture<>(); var requestTask = new RequestTask( OpenAiEmbeddingsExecutableRequestCreatorTests.makeCreator("url", null, "key", "model", null, "id"), - List.of("abc"), + new DocumentsOnlyInput(List.of("abc")), TimeValue.timeValueMillis(1), threadPool, listener @@ -106,7 +106,7 @@ public void testRequest_DoesNotCallOnFailureTwiceWhenTimingOut() throws Exceptio var requestTask = new RequestTask( OpenAiEmbeddingsExecutableRequestCreatorTests.makeCreator("url", null, "key", "model", null, "id"), - List.of("abc"), + new DocumentsOnlyInput(List.of("abc")), TimeValue.timeValueMillis(1), threadPool, listener @@ -138,7 +138,7 @@ public void testRequest_DoesNotCallOnResponseAfterTimingOut() throws Exception { var requestTask = new RequestTask( OpenAiEmbeddingsExecutableRequestCreatorTests.makeCreator("url", null, "key", "model", null, "id"), - List.of("abc"), + new DocumentsOnlyInput(List.of("abc")), TimeValue.timeValueMillis(1), threadPool, listener @@ -168,7 +168,7 @@ public void testRequest_DoesNotCallOnFailureForTimeout_AfterAlreadyCallingOnResp var requestTask = new RequestTask( OpenAiEmbeddingsExecutableRequestCreatorTests.makeCreator("url", null, "key", "model", null, "id"), - List.of("abc"), + new DocumentsOnlyInput(List.of("abc")), TimeValue.timeValueMillis(1), mockThreadPool, listener diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereRankedResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereRankedResponseEntityTests.java new file mode 100644 index 0000000000000..441c0f15e8224 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereRankedResponseEntityTests.java @@ -0,0 +1,191 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.cohere; + +import org.apache.http.HttpResponse; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.inference.results.RankedDocsResults; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.hamcrest.MatcherAssert; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.List; + +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; + +public class CohereRankedResponseEntityTests extends ESTestCase { + + public void testResponseLiteral() throws IOException { + InferenceServiceResults parsedResults = CohereRankedResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseLiteral.getBytes(StandardCharsets.UTF_8)) + ); + + MatcherAssert.assertThat(parsedResults, instanceOf(RankedDocsResults.class)); + List expected = responseLiteralDocs(); + for (int i = 0; i < ((RankedDocsResults) parsedResults).getRankedDocs().size(); i++) { + assertEquals(((RankedDocsResults) parsedResults).getRankedDocs().get(i).index(), expected.get(i).index()); + } + } + + public void testGeneratedResponse() throws IOException { + int numDocs = randomIntBetween(1, 10); + + List expected = new ArrayList<>(numDocs); + StringBuilder responseBuilder = new StringBuilder(); + + responseBuilder.append("{"); + responseBuilder.append("\"index\":\"").append(randomAlphaOfLength(36)).append("\","); + responseBuilder.append("\"results\": ["); + List indices = linear(numDocs); + List scores = linearDoubles(numDocs); + for (int i = 0; i < numDocs; i++) { + int index = indices.remove(randomInt(indices.size() - 1)); + + responseBuilder.append("{"); + responseBuilder.append("\"index\":").append(index).append(","); + responseBuilder.append("\"relevance_score\":").append(scores.get(i).toString()).append("}"); + expected.add(new RankedDocsResults.RankedDoc(String.valueOf(index), scores.get(i).toString(), null)); + if (i < numDocs - 1) { + responseBuilder.append(","); + } + } + responseBuilder.append("],"); + responseBuilder.append("\"meta\": {"); + responseBuilder.append("\"api_version\": {"); + responseBuilder.append("\"version\": \"1\"},"); + responseBuilder.append("\"billed_units\": {"); + responseBuilder.append("\"search_units\":").append(randomIntBetween(1, 10)).append("}}}"); + + InferenceServiceResults parsedResults = CohereRankedResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseBuilder.toString().getBytes(StandardCharsets.UTF_8)) + ); + MatcherAssert.assertThat(parsedResults, instanceOf(RankedDocsResults.class)); + for (int i = 0; i < ((RankedDocsResults) parsedResults).getRankedDocs().size(); i++) { + assertEquals(((RankedDocsResults) parsedResults).getRankedDocs().get(i).index(), expected.get(i).index()); + } + } + + private ArrayList responseLiteralDocs() { + var list = new ArrayList(); + + list.add(new RankedDocsResults.RankedDoc("2", "0.98005307", null)); + list.add(new RankedDocsResults.RankedDoc("3", "0.27904198", null)); + list.add(new RankedDocsResults.RankedDoc("0", "0.10194652", null)); + return list; + + }; + + private final String responseLiteral = """ + { + "index": "d0760819-5a73-4d58-b163-3956d3648b62", + "results": [ + { + "index": 2, + "relevance_score": 0.98005307 + }, + { + "index": 3, + "relevance_score": 0.27904198 + }, + { + "index": 0, + "relevance_score": 0.10194652 + } + ], + "meta": { + "api_version": { + "version": "1" + }, + "billed_units": { + "search_units": 1 + } + } + } + """; + + public void testResponseLiteralWithDocuments() throws IOException { + InferenceServiceResults parsedResults = CohereRankedResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseLiteralWithDocuments.getBytes(StandardCharsets.UTF_8)) + ); + + MatcherAssert.assertThat(parsedResults, instanceOf(RankedDocsResults.class)); + MatcherAssert.assertThat(((RankedDocsResults) parsedResults).getRankedDocs(), is(responseLiteralDocsWithText)); + } + + private final String responseLiteralWithDocuments = """ + { + "index": "44873262-1315-4c06-8433-fdc90c9790d0", + "results": [ + { + "document": { + "text": "Washington, D.C.." + }, + "index": 2, + "relevance_score": 0.98005307 + }, + { + "document": { + "text": "Capital punishment has existed in the United States since beforethe United States was a country. " + }, + "index": 3, + "relevance_score": 0.27904198 + }, + { + "document": { + "text": "Carson City is the capital city of the American state of Nevada." + }, + "index": 0, + "relevance_score": 0.10194652 + } + ], + "meta": { + "api_version": { + "version": "1" + }, + "billed_units": { + "search_units": 1 + } + } + } + """; + + private final List responseLiteralDocsWithText = List.of( + new RankedDocsResults.RankedDoc("2", "0.98005307", "Washington, D.C.."), + new RankedDocsResults.RankedDoc( + "3", + "0.27904198", + "Capital punishment has existed in the United States since beforethe United States was a country. " + ), + new RankedDocsResults.RankedDoc("0", "0.10194652", "Carson City is the capital city of the American state of Nevada.") + ); + + private ArrayList linear(int n) { + ArrayList list = new ArrayList<>(); + for (int i = 0; i <= n; i++) { + list.add(i); + } + return list; + } + + // creates a list of doubles of monotonically decreasing magnitude + private ArrayList linearDoubles(int n) { + ArrayList list = new ArrayList<>(); + double startValue = 1.0; + double decrement = startValue / n + 1; + for (int i = 0; i <= n; i++) { + list.add(startValue - i * decrement); + } + return list; + } + +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/SenderServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/SenderServiceTests.java index 5c438644a18c5..33b8e10963c4c 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/SenderServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/SenderServiceTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.inference.ChunkingOptions; @@ -113,9 +114,22 @@ protected void doInfer( } + @Override + protected void doInfer( + Model model, + @Nullable String query, + List input, + Map taskSettings, + InputType inputType, + ActionListener listener + ) { + + } + @Override protected void doChunkedInfer( Model model, + @Nullable String query, List input, Map taskSettings, InputType inputType, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java index 1491511b84bb3..ebfa8be65c6df 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java @@ -336,11 +336,11 @@ public void testGetEmbeddingSize_ReturnsError_WhenTextEmbeddingResults_IsEmpty() doAnswer(invocation -> { @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocation.getArguments()[4]; + ActionListener listener = (ActionListener) invocation.getArguments()[5]; listener.onResponse(new TextEmbeddingResults(List.of())); return Void.TYPE; - }).when(service).infer(any(), any(), any(), any(), any()); + }).when(service).infer(any(), any(), any(), any(), any(), any()); PlainActionFuture listener = new PlainActionFuture<>(); getEmbeddingSize(model, service, listener); @@ -359,11 +359,11 @@ public void testGetEmbeddingSize_ReturnsError_WhenTextEmbeddingByteResults_IsEmp doAnswer(invocation -> { @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocation.getArguments()[4]; + ActionListener listener = (ActionListener) invocation.getArguments()[5]; listener.onResponse(new TextEmbeddingByteResults(List.of())); return Void.TYPE; - }).when(service).infer(any(), any(), any(), any(), any()); + }).when(service).infer(any(), any(), any(), any(), any(), any()); PlainActionFuture listener = new PlainActionFuture<>(); getEmbeddingSize(model, service, listener); @@ -384,11 +384,11 @@ public void testGetEmbeddingSize_ReturnsSize_ForTextEmbeddingResults() { doAnswer(invocation -> { @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocation.getArguments()[4]; + ActionListener listener = (ActionListener) invocation.getArguments()[5]; listener.onResponse(textEmbedding); return Void.TYPE; - }).when(service).infer(any(), any(), any(), any(), any()); + }).when(service).infer(any(), any(), any(), any(), any(), any()); PlainActionFuture listener = new PlainActionFuture<>(); getEmbeddingSize(model, service, listener); @@ -408,11 +408,11 @@ public void testGetEmbeddingSize_ReturnsSize_ForTextEmbeddingByteResults() { doAnswer(invocation -> { @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocation.getArguments()[4]; + ActionListener listener = (ActionListener) invocation.getArguments()[5]; listener.onResponse(textEmbedding); return Void.TYPE; - }).when(service).infer(any(), any(), any(), any(), any()); + }).when(service).infer(any(), any(), any(), any(), any(), any()); PlainActionFuture listener = new PlainActionFuture<>(); getEmbeddingSize(model, service, listener); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java index 97de300615b9b..fb45bb813fb3d 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java @@ -620,7 +620,7 @@ public void testInfer_ThrowsErrorWhenModelIsNotCohereModel() throws IOException try (var service = new CohereService(factory, createWithEmptySettings(threadPool))) { PlainActionFuture listener = new PlainActionFuture<>(); - service.infer(mockModel, List.of(""), new HashMap<>(), InputType.INGEST, listener); + service.infer(mockModel, null, List.of(""), new HashMap<>(), InputType.INGEST, listener); var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); MatcherAssert.assertThat( @@ -679,7 +679,7 @@ public void testInfer_SendsRequest() throws IOException { null ); PlainActionFuture listener = new PlainActionFuture<>(); - service.infer(model, List.of("abc"), new HashMap<>(), InputType.INGEST, listener); + service.infer(model, null, List.of("abc"), new HashMap<>(), InputType.INGEST, listener); var result = listener.actionGet(TIMEOUT); @@ -914,7 +914,7 @@ public void testInfer_UnauthorisedResponse() throws IOException { null ); PlainActionFuture listener = new PlainActionFuture<>(); - service.infer(model, List.of("abc"), new HashMap<>(), InputType.INGEST, listener); + service.infer(model, null, List.of("abc"), new HashMap<>(), InputType.INGEST, listener); var error = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); MatcherAssert.assertThat(error.getMessage(), containsString("Received an authentication error status code for request")); @@ -965,7 +965,7 @@ public void testInfer_SetsInputTypeToIngest_FromInferParameter_WhenTaskSettingsA null ); PlainActionFuture listener = new PlainActionFuture<>(); - service.infer(model, List.of("abc"), new HashMap<>(), InputType.INGEST, listener); + service.infer(model, null, List.of("abc"), new HashMap<>(), InputType.INGEST, listener); var result = listener.actionGet(TIMEOUT); @@ -1031,6 +1031,7 @@ public void testInfer_SetsInputTypeToIngestFromInferParameter_WhenModelSettingIs PlainActionFuture listener = new PlainActionFuture<>(); service.infer( model, + null, List.of("abc"), CohereEmbeddingsTaskSettingsTests.getTaskSettingsMap(InputType.SEARCH, null), InputType.INGEST, @@ -1098,7 +1099,7 @@ public void testInfer_DoesNotSetInputType_WhenNotPresentInTaskSettings_AndUnspec null ); PlainActionFuture listener = new PlainActionFuture<>(); - service.infer(model, List.of("abc"), new HashMap<>(), InputType.UNSPECIFIED, listener); + service.infer(model, null, List.of("abc"), new HashMap<>(), InputType.UNSPECIFIED, listener); var result = listener.actionGet(TIMEOUT); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java index 80ceb855f9e94..87121ffa87c74 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java @@ -405,6 +405,7 @@ public void testChunkInfer() { service.chunkedInfer( model, + null, List.of("foo", "bar"), Map.of(), InputType.SEARCH, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalServiceTests.java index dbb50260edaf1..b35e75fd6786d 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalServiceTests.java @@ -384,6 +384,7 @@ public void testChunkInfer() { service.chunkedInfer( model, + null, List.of("foo", "bar"), Map.of(), InputType.SEARCH, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseServiceTests.java index cd896cb18440a..b88ecc66c77b3 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseServiceTests.java @@ -64,7 +64,7 @@ public void testInfer_ThrowsErrorWhenModelIsNotHuggingFaceModel() throws IOExcep try (var service = new TestService(factory, createWithEmptySettings(threadPool))) { PlainActionFuture listener = new PlainActionFuture<>(); - service.infer(mockModel, List.of(""), new HashMap<>(), InputType.INGEST, listener); + service.infer(mockModel, null, List.of(""), new HashMap<>(), InputType.INGEST, listener); var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); assertThat( diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java index b47956ff00c81..d72888ee9cd4b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java @@ -435,7 +435,7 @@ public void testInfer_SendsEmbeddingsRequest() throws IOException { var model = HuggingFaceEmbeddingsModelTests.createModel(getUrl(webServer), "secret"); PlainActionFuture listener = new PlainActionFuture<>(); - service.infer(model, List.of("abc"), new HashMap<>(), InputType.INGEST, listener); + service.infer(model, null, List.of("abc"), new HashMap<>(), InputType.INGEST, listener); var result = listener.actionGet(TIMEOUT); @@ -470,7 +470,7 @@ public void testInfer_SendsElserRequest() throws IOException { var model = HuggingFaceElserModelTests.createModel(getUrl(webServer), "secret"); PlainActionFuture listener = new PlainActionFuture<>(); - service.infer(model, List.of("abc"), new HashMap<>(), InputType.INGEST, listener); + service.infer(model, null, List.of("abc"), new HashMap<>(), InputType.INGEST, listener); var result = listener.actionGet(TIMEOUT); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java index a1718488be5d0..5ebb5ca274816 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java @@ -681,7 +681,7 @@ public void testInfer_ThrowsErrorWhenModelIsNotOpenAiModel() throws IOException try (var service = new OpenAiService(factory, createWithEmptySettings(threadPool))) { PlainActionFuture listener = new PlainActionFuture<>(); - service.infer(mockModel, List.of(""), new HashMap<>(), InputType.INGEST, listener); + service.infer(mockModel, null, List.of(""), new HashMap<>(), InputType.INGEST, listener); var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); assertThat( @@ -727,7 +727,7 @@ public void testInfer_SendsRequest() throws IOException { var model = OpenAiEmbeddingsModelTests.createModel(getUrl(webServer), "org", "secret", "model", "user"); PlainActionFuture listener = new PlainActionFuture<>(); - service.infer(model, List.of("abc"), new HashMap<>(), InputType.INGEST, listener); + service.infer(model, null, List.of("abc"), new HashMap<>(), InputType.INGEST, listener); var result = listener.actionGet(TIMEOUT); @@ -1152,7 +1152,7 @@ public void testInfer_UnauthorisedResponse() throws IOException { var model = OpenAiEmbeddingsModelTests.createModel(getUrl(webServer), "org", "secret", "model", "user"); PlainActionFuture listener = new PlainActionFuture<>(); - service.infer(model, List.of("abc"), new HashMap<>(), InputType.INGEST, listener); + service.infer(model, null, List.of("abc"), new HashMap<>(), InputType.INGEST, listener); var error = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); assertThat(error.getMessage(), containsString("Received an authentication error status code for request")); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCoordinatedInferenceAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCoordinatedInferenceAction.java index 710ffe14b31e2..c657d80bccbcb 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCoordinatedInferenceAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCoordinatedInferenceAction.java @@ -119,7 +119,14 @@ private void doInferenceServiceModel(CoordinatedInferenceAction.Request request, client, INFERENCE_ORIGIN, InferenceAction.INSTANCE, - new InferenceAction.Request(TaskType.ANY, request.getModelId(), request.getInputs(), request.getTaskSettings(), inputType), + new InferenceAction.Request( + TaskType.ANY, + request.getModelId(), + null, + request.getInputs(), + request.getTaskSettings(), + inputType + ), listener.delegateFailureAndWrap((l, r) -> l.onResponse(translateInferenceServiceResponse(r.getResults()))) ); } From 147f5a00a41d55dd2331ee896d85adec0665e54a Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Wed, 3 Apr 2024 21:23:51 +0200 Subject: [PATCH 011/173] ESQL: Introduce language versioning to REST API (#106824) For the _query endpoint, add a parameter for the ESQL language version to the JSON payload. For now, it is optional and is only validated with no further action. --- docs/changelog/106824.yaml | 5 + .../core/esql/action/EsqlQueryRequest.java | 3 + .../esql/action/EsqlQueryRequestBuilder.java | 2 + .../xpack/esql/action/EsqlQueryRequest.java | 51 ++++++- .../esql/action/EsqlQueryRequestBuilder.java | 6 + .../xpack/esql/action/RequestXContent.java | 6 +- .../xpack/esql/version/EsqlVersion.java | 111 +++++++++++++++ .../esql/action/EsqlQueryRequestTests.java | 130 +++++++++++++++++- .../xpack/esql/version/EsqlVersionTests.java | 81 +++++++++++ 9 files changed, 384 insertions(+), 11 deletions(-) create mode 100644 docs/changelog/106824.yaml create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/version/EsqlVersion.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/version/EsqlVersionTests.java diff --git a/docs/changelog/106824.yaml b/docs/changelog/106824.yaml new file mode 100644 index 0000000000000..0a2001df5039a --- /dev/null +++ b/docs/changelog/106824.yaml @@ -0,0 +1,5 @@ +pr: 106824 +summary: "ESQL: Introduce language versioning to REST API" +area: ES|QL +type: enhancement +issues: [] diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/action/EsqlQueryRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/action/EsqlQueryRequest.java index 9faa78d3b34f9..dcd89c200db26 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/action/EsqlQueryRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/action/EsqlQueryRequest.java @@ -21,6 +21,9 @@ protected EsqlQueryRequest(StreamInput in) throws IOException { super(in); } + // Use the unparsed version String, so we don't have to serialize a version object. + public abstract String esqlVersion(); + public abstract String query(); public abstract QueryBuilder filter(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/action/EsqlQueryRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/action/EsqlQueryRequestBuilder.java index a0a2bbc3bed19..acd44165cad65 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/action/EsqlQueryRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/action/EsqlQueryRequestBuilder.java @@ -35,6 +35,8 @@ public final ActionType action() { return action; } + public abstract EsqlQueryRequestBuilder esqlVersion(String esqlVersion); + public abstract EsqlQueryRequestBuilder query(String query); public abstract EsqlQueryRequestBuilder filter(QueryBuilder filter); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequest.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequest.java index e5ff790619d14..54ae2f4c90fc1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequest.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequest.java @@ -20,6 +20,7 @@ import org.elasticsearch.tasks.TaskId; import org.elasticsearch.xpack.esql.parser.TypedParamValue; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; +import org.elasticsearch.xpack.esql.version.EsqlVersion; import java.io.IOException; import java.util.List; @@ -35,6 +36,7 @@ public class EsqlQueryRequest extends org.elasticsearch.xpack.core.esql.action.E private boolean async; + private String esqlVersion; private String query; private boolean columnar; private boolean profile; @@ -45,6 +47,7 @@ public class EsqlQueryRequest extends org.elasticsearch.xpack.core.esql.action.E private TimeValue waitForCompletionTimeout = DEFAULT_WAIT_FOR_COMPLETION; private TimeValue keepAlive = DEFAULT_KEEP_ALIVE; private boolean keepOnCompletion; + private boolean onSnapshotBuild = Build.current().isSnapshot(); static EsqlQueryRequest syncEsqlQueryRequest() { return new EsqlQueryRequest(false); @@ -65,17 +68,54 @@ public EsqlQueryRequest(StreamInput in) throws IOException { @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; + if (Strings.hasText(esqlVersion) == false) { + // TODO: make this required + // "https://github.com/elastic/elasticsearch/issues/104890" + // validationException = addValidationError(invalidVersion("is required"), validationException); + } else { + EsqlVersion version = EsqlVersion.parse(esqlVersion); + if (version == null) { + validationException = addValidationError(invalidVersion("has invalid value [" + esqlVersion + "]"), validationException); + } else if (version == EsqlVersion.SNAPSHOT && onSnapshotBuild == false) { + validationException = addValidationError( + invalidVersion("with value [" + esqlVersion + "] only allowed in snapshot builds"), + validationException + ); + } + } if (Strings.hasText(query) == false) { - validationException = addValidationError("[query] is required", validationException); + validationException = addValidationError("[" + RequestXContent.QUERY_FIELD + "] is required", validationException); } - if (Build.current().isSnapshot() == false && pragmas.isEmpty() == false) { - validationException = addValidationError("[pragma] only allowed in snapshot builds", validationException); + if (onSnapshotBuild == false && pragmas.isEmpty() == false) { + validationException = addValidationError( + "[" + RequestXContent.PRAGMA_FIELD + "] only allowed in snapshot builds", + validationException + ); } return validationException; } + private static String invalidVersion(String reason) { + return "[" + + RequestXContent.ESQL_VERSION_FIELD + + "] " + + reason + + ", latest available version is [" + + EsqlVersion.latestReleased().versionStringWithoutEmoji() + + "]"; + } + public EsqlQueryRequest() {} + public void esqlVersion(String esqlVersion) { + this.esqlVersion = esqlVersion; + } + + @Override + public String esqlVersion() { + return esqlVersion; + } + public void query(String query) { this.query = query; } @@ -174,4 +214,9 @@ public Task createTask(long id, String type, String action, TaskId parentTaskId, // Pass the query as the description return new CancellableTask(id, type, action, query, parentTaskId, headers); } + + // Setter for tests + void onSnapshotBuild(boolean onSnapshotBuild) { + this.onSnapshotBuild = onSnapshotBuild; + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestBuilder.java index 7df5c95cbc953..511fbd9f1c275 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestBuilder.java @@ -29,6 +29,12 @@ private EsqlQueryRequestBuilder(ElasticsearchClient client, EsqlQueryRequest req super(client, EsqlQueryAction.INSTANCE, request); } + @Override + public EsqlQueryRequestBuilder esqlVersion(String esqlVersion) { + request.esqlVersion(esqlVersion); + return this; + } + @Override public EsqlQueryRequestBuilder query(String query) { request.query(query); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RequestXContent.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RequestXContent.java index 8db940d5a4779..ef82f666ce904 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RequestXContent.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RequestXContent.java @@ -46,10 +46,11 @@ final class RequestXContent { PARAM_PARSER.declareString(constructorArg(), TYPE); } - private static final ParseField QUERY_FIELD = new ParseField("query"); + static final ParseField ESQL_VERSION_FIELD = new ParseField("version"); + static final ParseField QUERY_FIELD = new ParseField("query"); private static final ParseField COLUMNAR_FIELD = new ParseField("columnar"); private static final ParseField FILTER_FIELD = new ParseField("filter"); - private static final ParseField PRAGMA_FIELD = new ParseField("pragma"); + static final ParseField PRAGMA_FIELD = new ParseField("pragma"); private static final ParseField PARAMS_FIELD = new ParseField("params"); private static final ParseField LOCALE_FIELD = new ParseField("locale"); private static final ParseField PROFILE_FIELD = new ParseField("profile"); @@ -72,6 +73,7 @@ static EsqlQueryRequest parseAsync(XContentParser parser) { } private static void objectParserCommon(ObjectParser parser) { + parser.declareString(EsqlQueryRequest::esqlVersion, ESQL_VERSION_FIELD); parser.declareString(EsqlQueryRequest::query, QUERY_FIELD); parser.declareBoolean(EsqlQueryRequest::columnar, COLUMNAR_FIELD); parser.declareObject(EsqlQueryRequest::filter, (p, c) -> AbstractQueryBuilder.parseTopLevelQuery(p), FILTER_FIELD); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/version/EsqlVersion.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/version/EsqlVersion.java new file mode 100644 index 0000000000000..9f96ba0e64e17 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/version/EsqlVersion.java @@ -0,0 +1,111 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.version; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.VersionId; + +import java.util.Arrays; +import java.util.Comparator; +import java.util.LinkedHashMap; +import java.util.Map; + +public enum EsqlVersion implements VersionId { + /** + * Breaking changes go here until the next version is released. + */ + SNAPSHOT(Integer.MAX_VALUE, 12, 99, "📷"), + ROCKET(2024, 4, "🚀"); + + static final Map VERSION_MAP_WITH_AND_WITHOUT_EMOJI = versionMapWithAndWithoutEmoji(); + + private static Map versionMapWithAndWithoutEmoji() { + Map stringToVersion = new LinkedHashMap<>(EsqlVersion.values().length * 2); + + for (EsqlVersion version : EsqlVersion.values()) { + putVersionCheckNoDups(stringToVersion, version.versionStringWithoutEmoji(), version); + putVersionCheckNoDups(stringToVersion, version.toString(), version); + } + + return stringToVersion; + } + + private static void putVersionCheckNoDups(Map stringToVersion, String versionString, EsqlVersion version) { + EsqlVersion existingVersionForKey = stringToVersion.put(versionString, version); + if (existingVersionForKey != null) { + throw new IllegalArgumentException("Duplicate esql version with version string [" + versionString + "]"); + } + } + + /** + * Accepts a version string with the emoji suffix or without it. + * E.g. both "2024.04.01.🚀" and "2024.04.01" will be interpreted as {@link EsqlVersion#ROCKET}. + */ + public static EsqlVersion parse(String versionString) { + return VERSION_MAP_WITH_AND_WITHOUT_EMOJI.get(versionString); + } + + public static EsqlVersion latestReleased() { + return Arrays.stream(EsqlVersion.values()).filter(v -> v != SNAPSHOT).max(Comparator.comparingInt(EsqlVersion::id)).get(); + } + + private int year; + private byte month; + private byte revision; + private String emoji; + + EsqlVersion(int year, int month, String emoji) { + this(year, month, 1, emoji); + } + + EsqlVersion(int year, int month, int revision, String emoji) { + if ((1 <= revision && revision <= 99) == false) { + throw new IllegalArgumentException("Version revision number must be between 1 and 99 but was [" + revision + "]"); + } + if ((1 <= month && month <= 12) == false) { + throw new IllegalArgumentException("Version month must be between 1 and 12 but was [" + month + "]"); + } + if ((emoji.codePointCount(0, emoji.length()) == 1) == false) { + throw new IllegalArgumentException("Version emoji must be a single unicode character but was [" + emoji + "]"); + } + this.year = year; + this.month = (byte) month; + this.revision = (byte) revision; + this.emoji = emoji; + } + + public int year() { + return year; + } + + public byte month() { + return month; + } + + public byte revision() { + return revision; + } + + public String emoji() { + return emoji; + } + + public String versionStringWithoutEmoji() { + return this == SNAPSHOT ? "snapshot" : Strings.format("%d.%02d.%02d", year, month, revision); + } + + @Override + public String toString() { + return versionStringWithoutEmoji() + "." + emoji; + } + + @Override + public int id() { + return this == SNAPSHOT ? Integer.MAX_VALUE : (10000 * year + 100 * month + revision); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java index 5b16691bcee77..44066ff3d091d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java @@ -24,6 +24,8 @@ import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.esql.parser.TypedParamValue; +import org.elasticsearch.xpack.esql.version.EsqlVersion; +import org.elasticsearch.xpack.esql.version.EsqlVersionTests; import java.io.IOException; import java.util.ArrayList; @@ -44,20 +46,23 @@ public void testParseFields() throws IOException { boolean columnar = randomBoolean(); Locale locale = randomLocale(random()); QueryBuilder filter = randomQueryBuilder(); + EsqlVersion esqlVersion = randomFrom(EsqlVersion.values()); List params = randomParameters(); boolean hasParams = params.isEmpty() == false; StringBuilder paramsString = paramsString(params, hasParams); String json = String.format(Locale.ROOT, """ { + "version": "%s", "query": "%s", "columnar": %s, "locale": "%s", "filter": %s - %s""", query, columnar, locale.toLanguageTag(), filter, paramsString); + %s""", esqlVersion, query, columnar, locale.toLanguageTag(), filter, paramsString); EsqlQueryRequest request = parseEsqlQueryRequestSync(json); + assertEquals(esqlVersion.toString(), request.esqlVersion()); assertEquals(query, request.query()); assertEquals(columnar, request.columnar()); assertEquals(locale.toLanguageTag(), request.locale().toLanguageTag()); @@ -75,6 +80,7 @@ public void testParseFieldsForAsync() throws IOException { boolean columnar = randomBoolean(); Locale locale = randomLocale(random()); QueryBuilder filter = randomQueryBuilder(); + EsqlVersion esqlVersion = randomFrom(EsqlVersion.values()); List params = randomParameters(); boolean hasParams = params.isEmpty() == false; @@ -86,6 +92,7 @@ public void testParseFieldsForAsync() throws IOException { Locale.ROOT, """ { + "version": "%s", "query": "%s", "columnar": %s, "locale": "%s", @@ -94,6 +101,7 @@ public void testParseFieldsForAsync() throws IOException { "wait_for_completion_timeout": "%s", "keep_alive": "%s" %s""", + esqlVersion, query, columnar, locale.toLanguageTag(), @@ -106,6 +114,7 @@ public void testParseFieldsForAsync() throws IOException { EsqlQueryRequest request = parseEsqlQueryRequestAsync(json); + assertEquals(esqlVersion.toString(), request.esqlVersion()); assertEquals(query, request.query()); assertEquals(columnar, request.columnar()); assertEquals(locale.toLanguageTag(), request.locale().toLanguageTag()); @@ -149,18 +158,123 @@ public void testRejectUnknownFields() { }""", "unknown field [asdf]"); } - public void testMissingQueryIsNotValidation() throws IOException { + public void testKnownVersionIsValid() throws IOException { + for (EsqlVersion version : EsqlVersion.values()) { + String validVersionString = randomBoolean() ? version.versionStringWithoutEmoji() : version.toString(); + + String json = String.format(Locale.ROOT, """ + { + "version": "%s", + "query": "ROW x = 1" + } + """, validVersionString); + + EsqlQueryRequest request = parseEsqlQueryRequest(json, randomBoolean()); + assertNull(request.validate()); + + request = parseEsqlQueryRequestAsync(json); + assertNull(request.validate()); + } + } + + public void testUnknownVersionIsNotValid() throws IOException { + String invalidVersionString = EsqlVersionTests.randomInvalidVersionString(); + + String json = String.format(Locale.ROOT, """ + { + "version": "%s", + "query": "ROW x = 1" + } + """, invalidVersionString); + + EsqlQueryRequest request = parseEsqlQueryRequest(json, randomBoolean()); + assertNotNull(request.validate()); + assertThat( + request.validate().getMessage(), + containsString( + "[version] has invalid value [" + + invalidVersionString + + "], latest available version is [" + + EsqlVersion.latestReleased().versionStringWithoutEmoji() + + "]" + ) + ); + } + + public void testSnapshotVersionIsOnlyValidOnSnapshot() throws IOException { + String esqlVersion = randomBoolean() ? "snapshot" : "snapshot.📷"; + String json = String.format(Locale.ROOT, """ + { + "version": "%s", + "query": "ROW x = 1" + } + """, esqlVersion); + + EsqlQueryRequest request = parseEsqlQueryRequest(json, randomBoolean()); + request.onSnapshotBuild(true); + assertNull(request.validate()); + + request.onSnapshotBuild(false); + assertNotNull(request.validate()); + assertThat( + request.validate().getMessage(), + containsString( + "[version] with value [" + + esqlVersion + + "] only allowed in snapshot builds, latest available version is [" + + EsqlVersion.latestReleased().versionStringWithoutEmoji() + + "]" + ) + ); + } + + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104890") + public void testMissingVersionIsNotValid() throws IOException { + String missingVersion = randomBoolean() ? "" : ", \"version\": \"\""; + String json = String.format(Locale.ROOT, """ + { + "columnar": true, + "query": "row x = 1" + %s + }""", missingVersion); + + EsqlQueryRequest request = parseEsqlQueryRequest(json, randomBoolean()); + assertNotNull(request.validate()); + assertThat( + request.validate().getMessage(), + containsString( + "[version] is required, latest available version is [" + EsqlVersion.latestReleased().versionStringWithoutEmoji() + "]" + ) + ); + } + + public void testMissingQueryIsNotValid() throws IOException { String json = """ { - "columnar": true + "columnar": true, + "version": "snapshot" }"""; - EsqlQueryRequest request = parseEsqlQueryRequestSync(json); + EsqlQueryRequest request = parseEsqlQueryRequest(json, randomBoolean()); assertNotNull(request.validate()); assertThat(request.validate().getMessage(), containsString("[query] is required")); + } + + public void testPragmasOnlyValidOnSnapshot() throws IOException { + String json = """ + { + "version": "2024.04.01", + "query": "ROW x = 1", + "pragma": {"foo": "bar"} + } + """; + + EsqlQueryRequest request = parseEsqlQueryRequest(json, randomBoolean()); + request.onSnapshotBuild(true); + assertNull(request.validate()); - request = parseEsqlQueryRequestAsync(json); + request.onSnapshotBuild(false); assertNotNull(request.validate()); - assertThat(request.validate().getMessage(), containsString("[query] is required")); + assertThat(request.validate().getMessage(), containsString("[pragma] only allowed in snapshot builds")); } public void testTask() throws IOException { @@ -260,6 +374,10 @@ private static void assertParserErrorMessage(String json, String message) { assertThat(e.getMessage(), containsString(message)); } + static EsqlQueryRequest parseEsqlQueryRequest(String json, boolean sync) throws IOException { + return sync ? parseEsqlQueryRequestSync(json) : parseEsqlQueryRequestAsync(json); + } + static EsqlQueryRequest parseEsqlQueryRequestSync(String json) throws IOException { var request = parseEsqlQueryRequest(json, RequestXContent::parseSync); assertFalse(request.async()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/version/EsqlVersionTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/version/EsqlVersionTests.java new file mode 100644 index 0000000000000..cd4fd77a8dd22 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/version/EsqlVersionTests.java @@ -0,0 +1,81 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.version; + +import org.elasticsearch.test.ESTestCase; + +import java.util.Arrays; +import java.util.Comparator; +import java.util.List; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; + +public class EsqlVersionTests extends ESTestCase { + public void testLatestReleased() { + assertThat(EsqlVersion.latestReleased(), is(EsqlVersion.ROCKET)); + } + + public void testVersionString() { + assertThat(EsqlVersion.SNAPSHOT.toString(), equalTo("snapshot.📷")); + assertThat(EsqlVersion.ROCKET.toString(), equalTo("2024.04.01.🚀")); + } + + public void testVersionId() { + assertThat(EsqlVersion.SNAPSHOT.id(), equalTo(Integer.MAX_VALUE)); + assertThat(EsqlVersion.ROCKET.id(), equalTo(20240401)); + + for (EsqlVersion version : EsqlVersion.values()) { + assertTrue(EsqlVersion.SNAPSHOT.onOrAfter(version)); + if (version != EsqlVersion.SNAPSHOT) { + assertTrue(version.before(EsqlVersion.SNAPSHOT)); + } else { + assertTrue(version.onOrAfter(EsqlVersion.SNAPSHOT)); + } + } + + List versionsSortedAsc = Arrays.stream(EsqlVersion.values()) + .sorted(Comparator.comparing(EsqlVersion::year).thenComparing(EsqlVersion::month).thenComparing(EsqlVersion::revision)) + .toList(); + for (int i = 0; i < versionsSortedAsc.size() - 1; i++) { + assertTrue(versionsSortedAsc.get(i).before(versionsSortedAsc.get(i + 1))); + } + } + + public void testVersionStringNoEmoji() { + for (EsqlVersion version : EsqlVersion.values()) { + String[] versionSegments = version.toString().split("\\."); + String[] parsingPrefixSegments = Arrays.copyOf(versionSegments, versionSegments.length - 1); + + String expectedParsingPrefix = String.join(".", parsingPrefixSegments); + assertThat(version.versionStringWithoutEmoji(), equalTo(expectedParsingPrefix)); + } + } + + public void testParsing() { + for (EsqlVersion version : EsqlVersion.values()) { + String versionStringWithoutEmoji = version.versionStringWithoutEmoji(); + + assertThat(EsqlVersion.parse(versionStringWithoutEmoji), is(version)); + assertThat(EsqlVersion.parse(versionStringWithoutEmoji + "." + version.emoji()), is(version)); + } + + assertNull(EsqlVersion.parse(randomInvalidVersionString())); + } + + public static String randomInvalidVersionString() { + String[] invalidVersionString = new String[1]; + + do { + int length = randomIntBetween(1, 10); + invalidVersionString[0] = randomAlphaOfLength(length); + } while (EsqlVersion.VERSION_MAP_WITH_AND_WITHOUT_EMOJI.containsKey(invalidVersionString[0])); + + return invalidVersionString[0]; + } +} From a37debdbea8b98ae8d785eb56a45e8436aa51239 Mon Sep 17 00:00:00 2001 From: Jake Landis Date: Wed, 3 Apr 2024 16:40:41 -0500 Subject: [PATCH 012/173] Ensure getUser() is the logical user, not API key creator for RCS 2.0 (#107023) This commit changes SecurityContext#getUser() to provide the original user that initiated the call when run across clusters for RCS 2.0. Before this change the getUser() would provide the RCS 2.0 API key creator as the current user. --- .../xpack/core/security/SecurityContext.java | 6 ++++++ .../xpack/security/SecurityContextTests.java | 15 +++++++++++++++ 2 files changed, 21 insertions(+) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityContext.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityContext.java index e13102796ae48..05ef5d3f70fd9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityContext.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityContext.java @@ -35,6 +35,7 @@ import java.util.function.Consumer; import java.util.function.Function; +import static org.elasticsearch.xpack.core.security.authc.Authentication.getAuthenticationFromCrossClusterAccessMetadata; import static org.elasticsearch.xpack.core.security.authc.AuthenticationField.AUTHENTICATION_KEY; import static org.elasticsearch.xpack.core.security.authz.AuthorizationServiceField.AUTHORIZATION_INFO_KEY; @@ -71,6 +72,11 @@ public User requireUser() { @Nullable public User getUser() { Authentication authentication = getAuthentication(); + if (authentication != null) { + if (authentication.isCrossClusterAccess()) { + authentication = getAuthenticationFromCrossClusterAccessMetadata(authentication); + } + } return authentication == null ? null : authentication.getEffectiveSubject().getUser(); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityContextTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityContextTests.java index 07c858f10f447..22488334d85c0 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityContextTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityContextTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.xpack.core.security.authc.Authentication.RealmRef; import org.elasticsearch.xpack.core.security.authc.AuthenticationField; import org.elasticsearch.xpack.core.security.authc.AuthenticationTestHelper; +import org.elasticsearch.xpack.core.security.authc.CrossClusterAccessSubjectInfo; import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.AuthorizationInfo; import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.ParentActionAuthorization; import org.elasticsearch.xpack.core.security.authz.AuthorizationServiceField; @@ -78,6 +79,20 @@ public void testGetAuthenticationAndUser() throws IOException { assertEquals(user, securityContext.getUser()); } + public void testGetUserForAPIKeyBasedCrossCluster() throws IOException { + final User user = new User("test"); + final CrossClusterAccessSubjectInfo crossClusterAccessSubjectInfo = AuthenticationTestHelper.randomCrossClusterAccessSubjectInfo( + AuthenticationTestHelper.builder().user(user).realmRef(new RealmRef("ldap", "foo", "node1")).build(false) + ); + final Authentication authentication = AuthenticationTestHelper.builder() + .crossClusterAccess(randomAlphaOfLengthBetween(10, 20), crossClusterAccessSubjectInfo) + .build(false); + User apiKeyUser = authentication.getEffectiveSubject().getUser(); + authentication.writeToContext(threadContext); + assertEquals(user, securityContext.getUser()); + assertNotEquals(apiKeyUser, securityContext.getUser()); + } + public void testGetAuthenticationDoesNotSwallowIOException() { threadContext.putHeader(AuthenticationField.AUTHENTICATION_KEY, ""); // an intentionally corrupt header final SecurityContext securityContext = new SecurityContext(Settings.EMPTY, threadContext); From 72a824819b7d8e8982904d75f10f8f0ab054843a Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Thu, 4 Apr 2024 07:05:18 +0200 Subject: [PATCH 013/173] [Profiling] Use default task cancellation check (#107037) With this commit we remove our custom implementation of whether a task has been cancelled and instead use the standard implementation that is already provided by the task API. --- .../TransportGetStackTracesAction.java | 21 ++----------------- 1 file changed, 2 insertions(+), 19 deletions(-) diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java index aa5f3efb179a2..8fff0dab53b08 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java @@ -45,7 +45,6 @@ import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; -import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.ObjectPath; @@ -155,22 +154,6 @@ protected void doExecute(Task submitTask, GetStackTracesRequest request, ActionL } } - /** - * Checks whether a task has been cancelled and notifies the provided listener if required. - * @param task The task to check. May be a cancelable task. - * @param listener Listener to notify. - * @return true iff the task has been cancelled. Callers must terminate as early as possible. - */ - private boolean mayNotifyOfCancellation(Task task, ActionListener listener) { - if (task instanceof CancellableTask && ((CancellableTask) task).isCancelled()) { - log.info("{} got cancelled.", task); - listener.onFailure(new TaskCancelledException("get stacktraces task cancelled")); - return true; - } else { - return false; - } - } - private void searchProfilingEvents( Task submitTask, Client client, @@ -447,7 +430,7 @@ private void retrieveStackTraces( GetStackTracesResponseBuilder responseBuilder, ActionListener submitListener ) { - if (mayNotifyOfCancellation(submitTask, submitListener)) { + if (submitTask instanceof CancellableTask c && c.notifyIfCancelled(submitListener)) { return; } List eventIds = new ArrayList<>(responseBuilder.getStackTraceEvents().keySet()); @@ -670,7 +653,7 @@ private void retrieveStackTraceDetails( List executableIds, ActionListener submitListener ) { - if (mayNotifyOfCancellation(submitTask, submitListener)) { + if (submitTask instanceof CancellableTask c && c.notifyIfCancelled(submitListener)) { return; } List stackFrameIndices = resolver.resolve( From 9a2f8a80eb729c33b33a819a6246d03459e168c9 Mon Sep 17 00:00:00 2001 From: David Turner Date: Thu, 4 Apr 2024 07:37:13 +0100 Subject: [PATCH 014/173] Add remote cluster network troubleshooting docs (#107072) Spells out in a little more detail our expectations for remote cluster connections, including an example log message when the network is unreliable and some suggestions for how to troubleshoot further. --- .../remote-clusters-troubleshooting.asciidoc | 40 +++++++++++++++++++ 1 file changed, 40 insertions(+) diff --git a/docs/reference/modules/cluster/remote-clusters-troubleshooting.asciidoc b/docs/reference/modules/cluster/remote-clusters-troubleshooting.asciidoc index f7b08b40bb7ef..df3c54794dc06 100644 --- a/docs/reference/modules/cluster/remote-clusters-troubleshooting.asciidoc +++ b/docs/reference/modules/cluster/remote-clusters-troubleshooting.asciidoc @@ -77,6 +77,46 @@ org.elasticsearch.transport.ConnectTransportException: [][192.168.0.42:9443] *co server is enabled>> on the remote cluster. * Ensure no firewall is blocking the communication. +[[remote-clusters-unreliable-network]] +===== Remote cluster connection is unreliable + +====== Symptom + +The local cluster can connect to the remote cluster, but the connection does +not work reliably. For example, some cross-cluster requests may succeed while +others report connection errors, time out, or appear to be stuck waiting for +the remote cluster to respond. + +When {es} detects that the remote cluster connection is not working, it will +report the following message in its logs: +[source,txt,subs=+quotes] +---- +[2023-06-28T16:36:47,264][INFO ][o.e.t.ClusterConnectionManager] [local-node] transport connection to [{my-remote#192.168.0.42:9443}{...}] closed by remote +---- +This message will also be logged if the node of the remote cluster to which +{es} is connected is shut down or restarted. + +Note that with some network configurations it could take minutes or hours for +the operating system to detect that a connection has stopped working. Until the +failure is detected and reported to {es}, requests involving the remote cluster +may time out or may appear to be stuck. + +====== Resolution + +* Ensure that the network between the clusters is as reliable as possible. + +* Ensure that the network is configured to permit <>. + +* Ensure that the network is configured to detect faulty connections quickly. + In particular, you must enable and fully support TCP keepalives, and set a + short <>. + +* On Linux systems, execute `ss -tonie` to verify the details of the + configuration of each network connection between the clusters. + +* If the problems persist, capture network packets at both ends of the + connection and analyse the traffic to look for delays and lost messages. + [[remote-clusters-troubleshooting-tls-trust]] ===== TLS trust not established From daa90069305cdf5c837c5cfd404fd88464956ee5 Mon Sep 17 00:00:00 2001 From: David Turner Date: Thu, 4 Apr 2024 08:17:14 +0100 Subject: [PATCH 015/173] Apply snapshot `?after` filter inline (#107003) In `TransportGetSnapshotsAction` today we build a list of all candidate snapshots and then copy them into another list to apply the `?after` filter. With this commit we construct the final filtered list directly. --- .../get/TransportGetSnapshotsAction.java | 82 ++++++++++++------- 1 file changed, 52 insertions(+), 30 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java index cb4942cc0efb8..190c4c565f1b7 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java @@ -179,8 +179,18 @@ private class GetSnapshotsOperation { // results private final Map failuresByRepository = ConcurrentCollections.newConcurrentMap(); private final Queue> allSnapshotInfos = ConcurrentCollections.newQueue(); + + /** + * Accumulates number of snapshots that match the name/fromSortValue/slmPolicy predicates, to be returned in the response. + */ private final AtomicInteger totalCount = new AtomicInteger(); + /** + * Accumulates the number of snapshots that match the name/fromSortValue/slmPolicy/after predicates, for sizing the final result + * list. + */ + private final AtomicInteger resultsCount = new AtomicInteger(); + GetSnapshotsOperation( CancellableTask cancellableTask, ResolvedRepositories resolvedRepositories, @@ -261,7 +271,7 @@ void getMultipleReposSnapshotInfo(ActionListener listener) } }) - .addListener(listener.map(ignored -> buildResponse())); + .addListener(listener.map(ignored -> buildResponse()), executor, threadPool.getThreadContext()); } private boolean skipRepository(String repositoryName) { @@ -306,7 +316,7 @@ private void loadSnapshotInfos(String repo, @Nullable RepositoryData repositoryD } if (verbose) { - snapshots(repo, toResolve.stream().map(Snapshot::getSnapshotId).toList(), listener); + loadSnapshotInfos(repo, toResolve.stream().map(Snapshot::getSnapshotId).toList(), listener); } else { assert fromSortValuePredicates.isMatchAll() : "filtering is not supported in non-verbose mode"; assert slmPolicyPredicate == SlmPolicyPredicate.MATCH_ALL_POLICIES : "filtering is not supported in non-verbose mode"; @@ -321,10 +331,11 @@ private void loadSnapshotInfos(String repo, @Nullable RepositoryData repositoryD } } - private void snapshots(String repositoryName, Collection snapshotIds, ActionListener listener) { + private void loadSnapshotInfos(String repositoryName, Collection snapshotIds, ActionListener listener) { if (cancellableTask.notifyIfCancelled(listener)) { return; } + final AtomicInteger repositoryTotalCount = new AtomicInteger(); final List snapshots = new ArrayList<>(snapshotIds.size()); final Set snapshotIdsToIterate = new HashSet<>(snapshotIds); // first, look at the snapshots in progress @@ -337,7 +348,10 @@ private void snapshots(String repositoryName, Collection snapshotIds if (snapshotIdsToIterate.remove(entry.snapshot().getSnapshotId())) { final SnapshotInfo snapshotInfo = SnapshotInfo.inProgress(entry); if (matchesPredicates(snapshotInfo)) { - snapshots.add(snapshotInfo.maybeWithoutIndices(indices)); + repositoryTotalCount.incrementAndGet(); + if (afterPredicate.test(snapshotInfo)) { + snapshots.add(snapshotInfo.maybeWithoutIndices(indices)); + } } } } @@ -372,7 +386,10 @@ private void snapshots(String repositoryName, Collection snapshotIds @Override public void onResponse(SnapshotInfo snapshotInfo) { if (matchesPredicates(snapshotInfo)) { - syncSnapshots.add(snapshotInfo.maybeWithoutIndices(indices)); + repositoryTotalCount.incrementAndGet(); + if (afterPredicate.test(snapshotInfo)) { + syncSnapshots.add(snapshotInfo.maybeWithoutIndices(indices)); + } } refListener.onResponse(null); } @@ -398,11 +415,16 @@ public void onFailure(Exception e) { } }) - .addListener(listener.safeMap(v -> { - // no need to synchronize access to snapshots: Repository#getSnapshotInfo fails fast but we're on the success path here - applyAfterPredicateAndAdd(snapshots); - return null; - }), executor, threadPool.getThreadContext()); + // no need to synchronize access to snapshots: Repository#getSnapshotInfo fails fast but we're on the success path here + .andThenAccept(ignored -> addResults(repositoryTotalCount.get(), snapshots)) + + .addListener(listener); + } + + private void addResults(int repositoryTotalCount, List snapshots) { + totalCount.addAndGet(repositoryTotalCount); + resultsCount.addAndGet(snapshots.size()); + allSnapshotInfos.add(snapshots); } private void addSimpleSnapshotInfos( @@ -413,15 +435,19 @@ private void addSimpleSnapshotInfos( ) { if (repositoryData == null) { // only want current snapshots - applyAfterPredicateAndAdd(currentSnapshots); + addResults(currentSnapshots.size(), currentSnapshots.stream().filter(afterPredicate).toList()); return; } // else want non-current snapshots as well, which are found in the repository data - List snapshotInfos = new ArrayList<>(); + List snapshotInfos = new ArrayList<>(currentSnapshots.size() + toResolve.size()); + int repositoryTotalCount = 0; for (SnapshotInfo snapshotInfo : currentSnapshots) { assert snapshotInfo.startTime() == 0L && snapshotInfo.endTime() == 0L && snapshotInfo.totalShards() == 0L : snapshotInfo; if (toResolve.remove(snapshotInfo.snapshot())) { - snapshotInfos.add(snapshotInfo); + repositoryTotalCount += 1; + if (afterPredicate.test(snapshotInfo)) { + snapshotInfos.add(snapshotInfo); + } } } Map> snapshotsToIndices = new HashMap<>(); @@ -435,22 +461,19 @@ private void addSimpleSnapshotInfos( } } for (Snapshot snapshot : toResolve) { - snapshotInfos.add( - new SnapshotInfo( - snapshot, - snapshotsToIndices.getOrDefault(snapshot.getSnapshotId(), Collections.emptyList()), - Collections.emptyList(), - Collections.emptyList(), - repositoryData.getSnapshotState(snapshot.getSnapshotId()) - ) + final var snapshotInfo = new SnapshotInfo( + snapshot, + snapshotsToIndices.getOrDefault(snapshot.getSnapshotId(), Collections.emptyList()), + Collections.emptyList(), + Collections.emptyList(), + repositoryData.getSnapshotState(snapshot.getSnapshotId()) ); + repositoryTotalCount += 1; + if (afterPredicate.test(snapshotInfo)) { + snapshotInfos.add(snapshotInfo); + } } - applyAfterPredicateAndAdd(snapshotInfos); - } - - private void applyAfterPredicateAndAdd(List snapshotInfos) { - allSnapshotInfos.add(snapshotInfos.stream().filter(afterPredicate).toList()); - totalCount.addAndGet(snapshotInfos.size()); + addResults(repositoryTotalCount, snapshotInfos); } private GetSnapshotsResponse buildResponse() { @@ -463,11 +486,10 @@ private GetSnapshotsResponse buildResponse() { .sorted(sortBy.getSnapshotInfoComparator(order)) .skip(offset); final List snapshotInfos; - if (size == GetSnapshotsRequest.NO_LIMIT) { + if (size == GetSnapshotsRequest.NO_LIMIT || resultsCount.get() <= size) { snapshotInfos = resultsStream.toList(); } else { - final var allocateSize = Math.min(size, 1000); // ignore excessively-large sizes in request params - snapshotInfos = new ArrayList<>(allocateSize); + snapshotInfos = new ArrayList<>(size); for (var iterator = resultsStream.iterator(); iterator.hasNext();) { final var snapshotInfo = iterator.next(); if (snapshotInfos.size() < size) { From 996a164bd572e07961964112cf7b6ac18d9d92f4 Mon Sep 17 00:00:00 2001 From: Niels Bauman <33722607+nielsbauman@users.noreply.github.com> Date: Thu, 4 Apr 2024 09:47:14 +0200 Subject: [PATCH 016/173] Set visibility of failure_store param of Rollover API to feature_flag (#107061) --- .../main/resources/rest-api-spec/api/indices.rollover.json | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json index e3c06ab080597..e04786ec14cf7 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json @@ -66,7 +66,9 @@ }, "failure_store":{ "type":"boolean", - "description":"If set to true, the rollover action will be applied on the failure store of the data stream." + "description":"If set to true, the rollover action will be applied on the failure store of the data stream.", + "visibility": "feature_flag", + "feature_flag": "es.failure_store_feature_flag_enabled" } }, "body":{ From 7b254218fb048a0e9245f05b4b08cc57ef5755e7 Mon Sep 17 00:00:00 2001 From: Ioana Tagirta Date: Thu, 4 Apr 2024 09:48:35 +0200 Subject: [PATCH 017/173] Add ES|QL signum function (#106866) * Add ES|QL signum function * Update docs/changelog/106866.yaml * Skip csv tests for versions older than 8.14 * Reference layout docs file and fix instructions for adding functions * Break csv specs by param type * More tests --- docs/changelog/106866.yaml | 5 + .../functions/description/signum.asciidoc | 5 + .../esql/functions/examples/signum.asciidoc | 13 +++ .../esql/functions/layout/signum.asciidoc | 15 +++ .../esql/functions/math-functions.asciidoc | 2 + .../esql/functions/parameters/signum.asciidoc | 6 + .../esql/functions/signature/signum.svg | 1 + .../esql/functions/types/signum.asciidoc | 12 ++ .../src/main/resources/floats.csv-spec | 47 ++++++++ .../src/main/resources/ints.csv-spec | 67 +++++++++++ .../src/main/resources/math.csv-spec | 13 +++ .../src/main/resources/meta.csv-spec | 6 +- .../src/main/resources/unsigned_long.csv-spec | 31 +++++ .../scalar/math/SignumDoubleEvaluator.java | 108 +++++++++++++++++ .../scalar/math/SignumIntEvaluator.java | 110 ++++++++++++++++++ .../scalar/math/SignumLongEvaluator.java | 110 ++++++++++++++++++ .../math/SignumUnsignedLongEvaluator.java | 110 ++++++++++++++++++ .../function/EsqlFunctionRegistry.java | 2 + .../function/scalar/math/Signum.java | 102 ++++++++++++++++ .../function/scalar/package-info.java | 21 +++- .../xpack/esql/io/stream/PlanNamedTypes.java | 3 + .../function/scalar/math/SignumTests.java | 82 +++++++++++++ 22 files changed, 864 insertions(+), 7 deletions(-) create mode 100644 docs/changelog/106866.yaml create mode 100644 docs/reference/esql/functions/description/signum.asciidoc create mode 100644 docs/reference/esql/functions/examples/signum.asciidoc create mode 100644 docs/reference/esql/functions/layout/signum.asciidoc create mode 100644 docs/reference/esql/functions/parameters/signum.asciidoc create mode 100644 docs/reference/esql/functions/signature/signum.svg create mode 100644 docs/reference/esql/functions/types/signum.asciidoc create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumDoubleEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumIntEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumLongEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumUnsignedLongEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Signum.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumTests.java diff --git a/docs/changelog/106866.yaml b/docs/changelog/106866.yaml new file mode 100644 index 0000000000000..ffc34e5962850 --- /dev/null +++ b/docs/changelog/106866.yaml @@ -0,0 +1,5 @@ +pr: 106866 +summary: Add ES|QL signum function +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/reference/esql/functions/description/signum.asciidoc b/docs/reference/esql/functions/description/signum.asciidoc new file mode 100644 index 0000000000000..db44c019e247e --- /dev/null +++ b/docs/reference/esql/functions/description/signum.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns the sign of the given number. It returns `-1` for negative numbers, `0` for `0` and `1` for positive numbers. diff --git a/docs/reference/esql/functions/examples/signum.asciidoc b/docs/reference/esql/functions/examples/signum.asciidoc new file mode 100644 index 0000000000000..190c1d0f71136 --- /dev/null +++ b/docs/reference/esql/functions/examples/signum.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/math.csv-spec[tag=signum] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/math.csv-spec[tag=signum-result] +|=== + diff --git a/docs/reference/esql/functions/layout/signum.asciidoc b/docs/reference/esql/functions/layout/signum.asciidoc new file mode 100644 index 0000000000000..f5b565993f392 --- /dev/null +++ b/docs/reference/esql/functions/layout/signum.asciidoc @@ -0,0 +1,15 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-signum]] +=== `SIGNUM` + +*Syntax* + +[.text-center] +image::esql/functions/signature/signum.svg[Embedded,opts=inline] + +include::../parameters/signum.asciidoc[] +include::../description/signum.asciidoc[] +include::../types/signum.asciidoc[] +include::../examples/signum.asciidoc[] diff --git a/docs/reference/esql/functions/math-functions.asciidoc b/docs/reference/esql/functions/math-functions.asciidoc index 8748b35443e8e..dd5b8a0a3d4e0 100644 --- a/docs/reference/esql/functions/math-functions.asciidoc +++ b/docs/reference/esql/functions/math-functions.asciidoc @@ -23,6 +23,7 @@ * <> * <> * <> +* <> * <> * <> * <> @@ -46,6 +47,7 @@ include::layout/log10.asciidoc[] include::pi.asciidoc[] include::pow.asciidoc[] include::round.asciidoc[] +include::layout/signum.asciidoc[] include::layout/sin.asciidoc[] include::layout/sinh.asciidoc[] include::sqrt.asciidoc[] diff --git a/docs/reference/esql/functions/parameters/signum.asciidoc b/docs/reference/esql/functions/parameters/signum.asciidoc new file mode 100644 index 0000000000000..65013f4c21265 --- /dev/null +++ b/docs/reference/esql/functions/parameters/signum.asciidoc @@ -0,0 +1,6 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Parameters* + +`number`:: +Numeric expression. If `null`, the function returns `null`. diff --git a/docs/reference/esql/functions/signature/signum.svg b/docs/reference/esql/functions/signature/signum.svg new file mode 100644 index 0000000000000..76d2972f18f42 --- /dev/null +++ b/docs/reference/esql/functions/signature/signum.svg @@ -0,0 +1 @@ +SIGNUM(number) \ No newline at end of file diff --git a/docs/reference/esql/functions/types/signum.asciidoc b/docs/reference/esql/functions/types/signum.asciidoc new file mode 100644 index 0000000000000..7cda278abdb56 --- /dev/null +++ b/docs/reference/esql/functions/types/signum.asciidoc @@ -0,0 +1,12 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +number | result +double | double +integer | double +long | double +unsigned_long | double +|=== diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec index 9c343083275cd..0882fec5ec0bf 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec @@ -544,3 +544,50 @@ required_feature: esql.agg_values [1.56, 1.78] | Tech Lead [1.7, 1.83, 2.05] | null ; + +signumOfPositiveDouble#[skip:-8.13.99,reason:new scalar function added in 8.14] +row d = to_double(100) | eval s = signum(d); + +d:double | s:double +100 | 1.0 +; + +signumOfNegativeDouble#[skip:-8.13.99,reason:new scalar function added in 8.14] +row d = to_double(-100) | eval s = signum(d); + +d:double | s:double +-100 | -1.0 +; + +signumOfZeroDouble#[skip:-8.13.99,reason:new scalar function added in 8.14] +row d = to_double(0) | eval s = signum(d); + +d:double | s:double +0 | 0.0 +; + +signumWithEvalWhereAndStats#[skip:-8.13.99,reason:new scalar function added in 8.14] + +from employees +| where emp_no <= 10009 +| eval s = signum(mv_min(salary_change)) +| where signum(mv_max(salary_change)) >= 0 +| STATS x = AVG(signum(60000 - salary)); + +x:double +0.14285714285714285 +; + +signumWithEvalAndSort#[skip:-8.13.99,reason:new scalar function added in 8.14] +from employees +| eval s = signum(mv_min(salary_change)) +| where signum(mv_max(salary_change)) >= 0 +| keep s, emp_no, salary, salary_change +| sort s, emp_no +| limit 3; + +s:double | emp_no:integer | salary:integer | salary_change:double +-1.0 | 10002 | 56371 | [-7.23, 11.17] +-1.0 | 10004 | 36174 | [-0.35, 1.13, 3.65, 13.48] +-1.0 | 10005 | 63528 | [-2.14, 13.07] +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec index 8657602e7b16f..3e1d1b19a7f67 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec @@ -988,3 +988,70 @@ required_feature: esql.agg_values [3, 5] | Tech Lead [1, 4] | null ; + +signumOfPositiveInteger#[skip:-8.13.99,reason:new scalar function added in 8.14] +row i = 100 | eval s = signum(i); + +i:integer | s:double +100 | 1.0 +; + +signumOfNegativeInteger#[skip:-8.13.99,reason:new scalar function added in 8.14] +row i = -100 | eval s = signum(i); + +i:integer | s:double +-100 | -1.0 +; + +signumOfZeroInteger#[skip:-8.13.99,reason:new scalar function added in 8.14] +row i = 0 | eval s = signum(i); + +i:integer | s:double +0 | 0.0 +; + +signumOfPositiveLong#[skip:-8.13.99,reason:new scalar function added in 8.14] +row l = to_long(100) | eval s = signum(l); + +l:long | s:double +100 | 1.0 +; + +signumOfNegativeLong#[skip:-8.13.99,reason:new scalar function added in 8.14] +row l = to_long(-100) | eval s = signum(l); + +l:long | s:double +-100 | -1.0 +; + +signumOfZeroLong#[skip:-8.13.99,reason:new scalar function added in 8.14] +row l = to_long(0) | eval s = signum(l); + +l:long | s:double +0 | 0.0 +; + +signumWithEvalWhereAndStats#[skip:-8.13.99,reason:new scalar function added in 8.14] + +from employees +| eval s = signum(mv_min(salary_change.int)) +| where signum(mv_max(salary_change.int)) >= 0 +| STATS x=AVG(signum(60000 - salary)); + +x:double +0.5409836065573771 +; + +signumWithEvalAndSort#[skip:-8.13.99,reason:new scalar function added in 8.14] +from employees +| eval s = signum(60000 - salary) +| where signum(salary - 55000) >= 0 +| keep s, emp_no, salary +| sort s DESC, salary ASC +| limit 3; + +s:double | emp_no:integer | salary:integer +1.0 | 10052 | 55360 +1.0 | 10002 | 56371 +1.0 | 10041 | 56415 +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec index 8491919b3ee93..6caeade1af58c 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec @@ -1249,6 +1249,19 @@ i:ul | c:ul | f:ul 1000000000000000000 | 1000000000000000000 | 1000000000000000000 ; +signum#[skip:-8.13.99,reason:new scalar function added in 8.14] +// tag::signum[] +ROW d = 100.0 +| EVAL s = SIGNUM(d) +// end::signum[] +; + +// tag::signum-result[] +d: double | s:double +100 | 1.0 +// end::signum-result[] +; + sqrt // tag::sqrt[] ROW d = 100.0 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec index 524de7c2c3b67..746684aca3e38 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -58,6 +58,7 @@ double pi() "keyword right(string:keyword|text, length:integer)" "double round(number:double, ?decimals:integer)" "keyword|text rtrim(string:keyword|text)" +"double signum(number:double|integer|long|unsigned_long)" "double sin(angle:double|integer|long|unsigned_long)" "double sinh(angle:double|integer|long|unsigned_long)" "keyword split(string:keyword|text, delim:keyword|text)" @@ -165,6 +166,7 @@ replace |[string, regex, newString] |["keyword|text", "keyword|te right |[string, length] |["keyword|text", integer] |[, ] round |[number, decimals] |[double, integer] |[The numeric value to round, The number of decimal places to round to. Defaults to 0.] rtrim |string |"keyword|text" |[""] +signum |number |"double|integer|long|unsigned_long" |"Numeric expression. If `null`, the function returns `null`." sin |angle |"double|integer|long|unsigned_long" |An angle, in radians. If `null`, the function returns `null`. sinh |angle |"double|integer|long|unsigned_long" |An angle, in radians. If `null`, the function returns `null`. split |[string, delim] |["keyword|text", "keyword|text"] |[, ] @@ -273,6 +275,7 @@ replace |The function substitutes in the string any match of the regular e right |Return the substring that extracts length chars from the string starting from the right. round |Rounds a number to the closest number with the specified number of digits. rtrim |Removes trailing whitespaces from a string. +signum |Returns the sign of the given number. It returns `-1` for negative numbers, `0` for `0` and `1` for positive numbers. sin |Returns ths {wikipedia}/Sine_and_cosine[Sine] trigonometric function of an angle. sinh |Returns the {wikipedia}/Hyperbolic_functions[hyperbolic sine] of an angle. split |Split a single valued string into multiple strings. @@ -382,6 +385,7 @@ replace |keyword right |keyword |[false, false] |false |false round |double |[false, true] |false |false rtrim |"keyword|text" |false |false |false +signum |double |false |false |false sin |double |false |false |false sinh |double |false |false |false split |keyword |[false, false] |false |false @@ -443,5 +447,5 @@ countFunctions#[skip:-8.13.99] meta functions | stats a = count(*), b = count(*), c = count(*) | mv_expand c; a:long | b:long | c:long -99 | 99 | 99 +100 | 100 | 100 ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec index c6f24d876240f..f1a15f41af7b3 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec @@ -187,3 +187,34 @@ warning:Line 1:22: java.lang.IllegalArgumentException: single-value function enc bytes_in:ul | rad:double 16002960716282089759 | 2.79304354566432608E17 ; + +signumOfPositiveUnsignedLong#[skip:-8.13.99,reason:new scalar function added in 8.14] +row l = to_ul(100) | eval s = signum(l); + +l:ul | s:double +100 | 1.0 +; + +signumOfZeroUnsignedLong#[skip:-8.13.99,reason:new scalar function added in 8.14] +row l = to_ul(0) | eval s = signum(l); + +l:ul | s:double +0 | 0.0 +; + +signumWithEvalAndWhere#[skip:-8.13.99,reason:new scalar function added in 8.14] + +from ul_logs | +where signum(bytes_in) >= 0.0 | +eval s = signum(bytes_out) | +keep s, bytes_in, bytes_out | +sort bytes_out, s | +limit 2; + +warning:Line 2:7: evaluation of [signum(bytes_in)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 2:7: java.lang.IllegalArgumentException: single-value function encountered multi-value + +s:double | bytes_in:ul | bytes_out:ul +1.0 | 1957665857956635540 | 352442273299370793 +1.0 | 2408213296071189837 | 419872666232023984 +; diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumDoubleEvaluator.java new file mode 100644 index 0000000000000..c7d21a7b9c5a0 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumDoubleEvaluator.java @@ -0,0 +1,108 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Signum}. + * This class is generated. Do not edit it. + */ +public final class SignumDoubleEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator val; + + private final DriverContext driverContext; + + public SignumDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.val = val; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (DoubleBlock valBlock = (DoubleBlock) val.eval(page)) { + DoubleVector valVector = valBlock.asVector(); + if (valVector == null) { + return eval(page.getPositionCount(), valBlock); + } + return eval(page.getPositionCount(), valVector).asBlock(); + } + } + + public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { + try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendDouble(Signum.process(valBlock.getDouble(valBlock.getFirstValueIndex(p)))); + } + return result.build(); + } + } + + public DoubleVector eval(int positionCount, DoubleVector valVector) { + try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + result.appendDouble(Signum.process(valVector.getDouble(p))); + } + return result.build(); + } + } + + @Override + public String toString() { + return "SignumDoubleEvaluator[" + "val=" + val + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(val); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory val; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; + this.val = val; + } + + @Override + public SignumDoubleEvaluator get(DriverContext context) { + return new SignumDoubleEvaluator(source, val.get(context), context); + } + + @Override + public String toString() { + return "SignumDoubleEvaluator[" + "val=" + val + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumIntEvaluator.java new file mode 100644 index 0000000000000..939807d8deffa --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumIntEvaluator.java @@ -0,0 +1,110 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Signum}. + * This class is generated. Do not edit it. + */ +public final class SignumIntEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator val; + + private final DriverContext driverContext; + + public SignumIntEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.val = val; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (IntBlock valBlock = (IntBlock) val.eval(page)) { + IntVector valVector = valBlock.asVector(); + if (valVector == null) { + return eval(page.getPositionCount(), valBlock); + } + return eval(page.getPositionCount(), valVector).asBlock(); + } + } + + public DoubleBlock eval(int positionCount, IntBlock valBlock) { + try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendDouble(Signum.process(valBlock.getInt(valBlock.getFirstValueIndex(p)))); + } + return result.build(); + } + } + + public DoubleVector eval(int positionCount, IntVector valVector) { + try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + result.appendDouble(Signum.process(valVector.getInt(p))); + } + return result.build(); + } + } + + @Override + public String toString() { + return "SignumIntEvaluator[" + "val=" + val + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(val); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory val; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; + this.val = val; + } + + @Override + public SignumIntEvaluator get(DriverContext context) { + return new SignumIntEvaluator(source, val.get(context), context); + } + + @Override + public String toString() { + return "SignumIntEvaluator[" + "val=" + val + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumLongEvaluator.java new file mode 100644 index 0000000000000..0c4af4671672a --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumLongEvaluator.java @@ -0,0 +1,110 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Signum}. + * This class is generated. Do not edit it. + */ +public final class SignumLongEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator val; + + private final DriverContext driverContext; + + public SignumLongEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.val = val; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock valBlock = (LongBlock) val.eval(page)) { + LongVector valVector = valBlock.asVector(); + if (valVector == null) { + return eval(page.getPositionCount(), valBlock); + } + return eval(page.getPositionCount(), valVector).asBlock(); + } + } + + public DoubleBlock eval(int positionCount, LongBlock valBlock) { + try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendDouble(Signum.process(valBlock.getLong(valBlock.getFirstValueIndex(p)))); + } + return result.build(); + } + } + + public DoubleVector eval(int positionCount, LongVector valVector) { + try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + result.appendDouble(Signum.process(valVector.getLong(p))); + } + return result.build(); + } + } + + @Override + public String toString() { + return "SignumLongEvaluator[" + "val=" + val + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(val); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory val; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; + this.val = val; + } + + @Override + public SignumLongEvaluator get(DriverContext context) { + return new SignumLongEvaluator(source, val.get(context), context); + } + + @Override + public String toString() { + return "SignumLongEvaluator[" + "val=" + val + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumUnsignedLongEvaluator.java new file mode 100644 index 0000000000000..d3b20c98139c4 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumUnsignedLongEvaluator.java @@ -0,0 +1,110 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Signum}. + * This class is generated. Do not edit it. + */ +public final class SignumUnsignedLongEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator val; + + private final DriverContext driverContext; + + public SignumUnsignedLongEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.val = val; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock valBlock = (LongBlock) val.eval(page)) { + LongVector valVector = valBlock.asVector(); + if (valVector == null) { + return eval(page.getPositionCount(), valBlock); + } + return eval(page.getPositionCount(), valVector).asBlock(); + } + } + + public DoubleBlock eval(int positionCount, LongBlock valBlock) { + try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendDouble(Signum.processUnsignedLong(valBlock.getLong(valBlock.getFirstValueIndex(p)))); + } + return result.build(); + } + } + + public DoubleVector eval(int positionCount, LongVector valVector) { + try(DoubleVector.Builder result = driverContext.blockFactory().newDoubleVectorBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + result.appendDouble(Signum.processUnsignedLong(valVector.getLong(p))); + } + return result.build(); + } + } + + @Override + public String toString() { + return "SignumUnsignedLongEvaluator[" + "val=" + val + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(val); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory val; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; + this.val = val; + } + + @Override + public SignumUnsignedLongEvaluator get(DriverContext context) { + return new SignumUnsignedLongEvaluator(source, val.get(context), context); + } + + @Override + public String toString() { + return "SignumUnsignedLongEvaluator[" + "val=" + val + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index 9f0976e0045d3..1a27c7b69c1e6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -59,6 +59,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.math.Pi; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Pow; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Round; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Signum; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Sin; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Sinh; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Sqrt; @@ -152,6 +153,7 @@ private FunctionDefinition[][] functions() { def(Pi.class, Pi::new, "pi"), def(Pow.class, Pow::new, "pow"), def(Round.class, Round::new, "round"), + def(Signum.class, Signum::new, "signum"), def(Sin.class, Sin::new, "sin"), def(Sinh.class, Sinh::new, "sinh"), def(Sqrt.class, Sqrt::new, "sqrt"), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Signum.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Signum.java new file mode 100644 index 0000000000000..ede41c10f3ac2 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Signum.java @@ -0,0 +1,102 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import org.elasticsearch.compute.ann.Evaluator; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.expression.function.Example; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.NodeInfo; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; +import org.elasticsearch.xpack.ql.util.NumericUtils; + +import java.util.List; +import java.util.function.Function; + +public class Signum extends UnaryScalarFunction { + @FunctionInfo( + returnType = { "double" }, + description = "Returns the sign of the given number.\n" + + "It returns `-1` for negative numbers, `0` for `0` and `1` for positive numbers.", + examples = @Example(file = "math", tag = "signum") + ) + public Signum( + Source source, + @Param( + name = "number", + type = { "double", "integer", "long", "unsigned_long" }, + description = "Numeric expression. If `null`, the function returns `null`." + ) Expression n + ) { + super(source, n); + } + + @Override + public EvalOperator.ExpressionEvaluator.Factory toEvaluator( + Function toEvaluator + ) { + var field = toEvaluator.apply(field()); + var fieldType = field().dataType(); + + if (fieldType == DataTypes.DOUBLE) { + return new SignumDoubleEvaluator.Factory(source(), field); + } + if (fieldType == DataTypes.INTEGER) { + return new SignumIntEvaluator.Factory(source(), field); + } + if (fieldType == DataTypes.LONG) { + return new SignumLongEvaluator.Factory(source(), field); + } + if (fieldType == DataTypes.UNSIGNED_LONG) { + return new SignumUnsignedLongEvaluator.Factory(source(), field); + } + + throw EsqlIllegalArgumentException.illegalDataType(fieldType); + } + + @Override + public Expression replaceChildren(List newChildren) { + return new Signum(source(), newChildren.get(0)); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, Signum::new, field()); + } + + @Override + public DataType dataType() { + return DataTypes.DOUBLE; + } + + @Evaluator(extraName = "Double") + static double process(double val) { + return Math.signum(val); + } + + @Evaluator(extraName = "Int") + static double process(int val) { + return Math.signum(val); + } + + @Evaluator(extraName = "Long") + static double process(long val) { + return Math.signum(val); + } + + @Evaluator(extraName = "UnsignedLong") + static double processUnsignedLong(long val) { + return Math.signum(NumericUtils.unsignedLongToDouble(val)); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java index f30425158b1b3..9469889285fd3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java @@ -127,12 +127,21 @@ *
  • * Generate a syntax diagram and a table with supported types by running the tests via * gradle: {@code ./gradlew x-pack:plugin:esql:test} - * The generated files can be found here - * {@code docs/reference/esql/functions/signature/myfunction.svg } - * and here - * {@code docs/reference/esql/functions/types/myfunction.asciidoc} - * Make sure to commit them and reference them in your doc file. There are plenty of examples on how - * to reference those files e.g. {@code docs/reference/esql/functions/sin.asciidoc}. + * The generated files are + *
      + *
    1. {@code docs/reference/esql/functions/description/myfunction.asciidoc}
    2. + *
    3. {@code docs/reference/esql/functions/examples/myfunction.asciidoc}
    4. + *
    5. {@code docs/reference/esql/functions/layout/myfunction.asciidoc}
    6. + *
    7. {@code docs/reference/esql/functions/parameters/myfunction.asciidoc}
    8. + *
    9. {@code docs/reference/esql/functions/signature/myfunction.svg}
    10. + *
    11. {@code docs/reference/esql/functions/types/myfunction.asciidoc}
    12. + *
    + * + * Make sure to commit them. Add a reference to the + * {@code docs/reference/esql/functions/layout/myfunction.asciidoc} in the function list + * docs. There are plenty of examples on how + * to reference those files e.g. if you are writing a Math function, you will want to + * list it in {@code docs/reference/esql/functions/math-functions.asciidoc}. *
  • *
  • * Build the docs by cloning the docs repo diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java index 21c17110ad4fe..a85ddac532241 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java @@ -80,6 +80,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.math.Pi; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Pow; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Round; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Signum; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Sin; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Sinh; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Sqrt; @@ -349,6 +350,7 @@ public static List namedTypeEntries() { of(ESQL_UNARY_SCLR_CLS, Log10.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), of(ESQL_UNARY_SCLR_CLS, LTrim.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), of(ESQL_UNARY_SCLR_CLS, RTrim.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), + of(ESQL_UNARY_SCLR_CLS, Signum.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), of(ESQL_UNARY_SCLR_CLS, Sin.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), of(ESQL_UNARY_SCLR_CLS, Sinh.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), of(ESQL_UNARY_SCLR_CLS, Sqrt.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), @@ -1296,6 +1298,7 @@ static void writeBinaryLogic(PlanStreamOutput out, BinaryLogic binaryLogic) thro entry(name(LTrim.class), LTrim::new), entry(name(RTrim.class), RTrim::new), entry(name(Neg.class), Neg::new), + entry(name(Signum.class), Signum::new), entry(name(Sin.class), Sin::new), entry(name(Sinh.class), Sinh::new), entry(name(Sqrt.class), Sqrt::new), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumTests.java new file mode 100644 index 0000000000000..4167029010950 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumTests.java @@ -0,0 +1,82 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataTypes; +import org.elasticsearch.xpack.ql.util.NumericUtils; + +import java.math.BigInteger; +import java.util.ArrayList; +import java.util.List; +import java.util.function.Supplier; + +public class SignumTests extends AbstractFunctionTestCase { + public SignumTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + String read = "Attribute[channel=0]"; + List suppliers = new ArrayList<>(); + TestCaseSupplier.forUnaryInt( + suppliers, + "SignumIntEvaluator[val=" + read + "]", + DataTypes.DOUBLE, + i -> (double) Math.signum(i), + Integer.MIN_VALUE, + Integer.MAX_VALUE, + List.of() + ); + + TestCaseSupplier.forUnaryLong( + suppliers, + "SignumLongEvaluator[val=" + read + "]", + DataTypes.DOUBLE, + l -> (double) Math.signum(l), + Long.MIN_VALUE, + Long.MAX_VALUE, + List.of() + ); + + TestCaseSupplier.forUnaryUnsignedLong( + suppliers, + "SignumUnsignedLongEvaluator[val=" + read + "]", + DataTypes.DOUBLE, + ul -> Math.signum(NumericUtils.unsignedLongToDouble(NumericUtils.asLongUnsigned(ul))), + BigInteger.ZERO, + UNSIGNED_LONG_MAX, + List.of() + ); + TestCaseSupplier.forUnaryDouble( + suppliers, + "SignumDoubleEvaluator[val=" + read + "]", + DataTypes.DOUBLE, + Math::signum, + -Double.MAX_VALUE, + Double.MAX_VALUE, + List.of() + ); + + suppliers = anyNullIsNull(true, suppliers); + + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(suppliers)); + } + + @Override + protected Expression build(Source source, List args) { + return new Signum(source, args.get(0)); + } +} From a32512fe0f588e2a03dd3892aef0bed5828fbc9d Mon Sep 17 00:00:00 2001 From: Jedr Blaszyk Date: Thu, 4 Apr 2024 10:51:15 +0200 Subject: [PATCH 018/173] Connector API: Followup on #106060 (#107058) --- .../connector/ConnectorConfiguration.java | 66 ++++++++++--------- .../ConfigurationValidation.java | 6 +- 2 files changed, 37 insertions(+), 35 deletions(-) diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorConfiguration.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorConfiguration.java index fc2c0920f49df..75dba46f8e29c 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorConfiguration.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorConfiguration.java @@ -34,6 +34,7 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Optional; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; @@ -45,10 +46,14 @@ public class ConnectorConfiguration implements Writeable, ToXContentObject { @Nullable private final String category; + @Nullable private final Object defaultValue; + @Nullable private final List dependsOn; + @Nullable private final ConfigurationDisplayType display; private final String label; + @Nullable private final List options; @Nullable private final Integer order; @@ -58,9 +63,13 @@ public class ConnectorConfiguration implements Writeable, ToXContentObject { private final boolean sensitive; @Nullable private final String tooltip; + @Nullable private final ConfigurationFieldType type; + @Nullable private final List uiRestrictions; + @Nullable private final List validations; + @Nullable private final Object value; /** @@ -380,41 +389,38 @@ public void writeTo(StreamOutput out) throws IOException { public Map toMap() { Map map = new HashMap<>(); - if (category != null) { - map.put(CATEGORY_FIELD.getPreferredName(), category); - } + + Optional.ofNullable(category).ifPresent(c -> map.put(CATEGORY_FIELD.getPreferredName(), c)); map.put(DEFAULT_VALUE_FIELD.getPreferredName(), defaultValue); - if (dependsOn != null) { - map.put(DEPENDS_ON_FIELD.getPreferredName(), dependsOn.stream().map(ConfigurationDependency::toMap).toList()); - } - if (display != null) { - map.put(DISPLAY_FIELD.getPreferredName(), display.toString()); - } + + Optional.ofNullable(dependsOn) + .ifPresent(d -> map.put(DEPENDS_ON_FIELD.getPreferredName(), d.stream().map(ConfigurationDependency::toMap).toList())); + + Optional.ofNullable(display).ifPresent(d -> map.put(DISPLAY_FIELD.getPreferredName(), d.toString())); + map.put(LABEL_FIELD.getPreferredName(), label); - if (options != null) { - map.put(OPTIONS_FIELD.getPreferredName(), options.stream().map(ConfigurationSelectOption::toMap).toList()); - } - if (order != null) { - map.put(ORDER_FIELD.getPreferredName(), order); - } - if (placeholder != null) { - map.put(PLACEHOLDER_FIELD.getPreferredName(), placeholder); - } + + Optional.ofNullable(options) + .ifPresent(o -> map.put(OPTIONS_FIELD.getPreferredName(), o.stream().map(ConfigurationSelectOption::toMap).toList())); + + Optional.ofNullable(order).ifPresent(o -> map.put(ORDER_FIELD.getPreferredName(), o)); + + Optional.ofNullable(placeholder).ifPresent(p -> map.put(PLACEHOLDER_FIELD.getPreferredName(), p)); + map.put(REQUIRED_FIELD.getPreferredName(), required); map.put(SENSITIVE_FIELD.getPreferredName(), sensitive); - if (tooltip != null) { - map.put(TOOLTIP_FIELD.getPreferredName(), tooltip); - } - if (type != null) { - map.put(TYPE_FIELD.getPreferredName(), type.toString()); - } - if (uiRestrictions != null) { - map.put(UI_RESTRICTIONS_FIELD.getPreferredName(), uiRestrictions); - } - if (validations != null) { - map.put(VALIDATIONS_FIELD.getPreferredName(), validations.stream().map(ConfigurationValidation::toMap).toList()); - } + + Optional.ofNullable(tooltip).ifPresent(t -> map.put(TOOLTIP_FIELD.getPreferredName(), t)); + + Optional.ofNullable(type).ifPresent(t -> map.put(TYPE_FIELD.getPreferredName(), t.toString())); + + Optional.ofNullable(uiRestrictions).ifPresent(u -> map.put(UI_RESTRICTIONS_FIELD.getPreferredName(), u)); + + Optional.ofNullable(validations) + .ifPresent(v -> map.put(VALIDATIONS_FIELD.getPreferredName(), v.stream().map(ConfigurationValidation::toMap).toList())); + map.put(VALUE_FIELD.getPreferredName(), value); + return map; } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationValidation.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationValidation.java index 51e912650bc1d..4ed5a22d2a4bf 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationValidation.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationValidation.java @@ -19,7 +19,6 @@ import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; -import java.util.HashMap; import java.util.Map; import java.util.Objects; @@ -103,10 +102,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } public Map toMap() { - Map map = new HashMap<>(); - map.put(CONSTRAINT_FIELD.getPreferredName(), constraint); - map.put(TYPE_FIELD.getPreferredName(), type.toString()); - return map; + return Map.of(CONSTRAINT_FIELD.getPreferredName(), constraint, TYPE_FIELD.getPreferredName(), type.toString()); } public static ConfigurationValidation fromXContent(XContentParser parser) throws IOException { From d6582cf1afcd460767bc4f60f8270fcfef5066c4 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 4 Apr 2024 12:42:23 +0200 Subject: [PATCH 019/173] Address concurrency issue in top hits aggregation (#106990) Top hits aggregation runs the fetch phase concurrently when the query phase is executed across multiple slices. This is problematic as the fetch phase does not support concurrent execution yet. The core of the issue is that the search execution context is shared across slices, which call setLookupProviders against it concurrently, setting each time different instances of preloaded source and field lookup providers. This makes us cross streams between slices, and hit lucene assertions that ensure that stored fields loaded from a certain thread are not read from a different thread. We have not hit this before because the problem revolves around SearchLookup which is used by runtime fields. TopHitsIT is the main test we have for top hits agg, but it uses a mock script engine which bypasses painless and SearchLookup. --- docs/changelog/106990.yaml | 5 ++ .../bucket/terms/RareTermsIT.java | 39 ++++++++++++ .../bucket/terms/StringTermsIT.java | 51 +++++++++++++++ .../aggregations/metrics/TopHitsIT.java | 62 +++++++++++++++---- .../metrics/TopHitsAggregator.java | 17 ++++- .../search/fetch/FetchPhase.java | 5 ++ .../search/internal/SubSearchContext.java | 21 ++++++- .../terms/RareTermsAggregatorTests.java | 45 -------------- .../bucket/terms/TermsAggregatorTests.java | 54 ---------------- 9 files changed, 184 insertions(+), 115 deletions(-) create mode 100644 docs/changelog/106990.yaml diff --git a/docs/changelog/106990.yaml b/docs/changelog/106990.yaml new file mode 100644 index 0000000000000..26646e742a5ee --- /dev/null +++ b/docs/changelog/106990.yaml @@ -0,0 +1,5 @@ +pr: 106990 +summary: Address concurrency issue in top hits aggregation +area: Aggregations +type: bug +issues: [] diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsIT.java index 2dccda385bf53..c45cabf425b14 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsIT.java @@ -12,12 +12,22 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; +import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; +import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.global.InternalGlobal; +import org.elasticsearch.search.aggregations.metrics.InternalTopHits; +import org.elasticsearch.search.aggregations.metrics.TopHitsAggregationBuilder; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.xcontent.XContentType; import org.hamcrest.Matchers; +import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; /** * Test that index enough data to trigger the creation of Cuckoo filters. @@ -64,4 +74,33 @@ private void assertNumRareTerms(int maxDocs, int rareTerms) { } ); } + + public void testGlobalAggregationWithScore() { + createIndex("global", Settings.EMPTY, "_doc", "keyword", "type=keyword"); + prepareIndex("global").setSource("keyword", "a").setRefreshPolicy(IMMEDIATE).get(); + prepareIndex("global").setSource("keyword", "c").setRefreshPolicy(IMMEDIATE).get(); + prepareIndex("global").setSource("keyword", "e").setRefreshPolicy(IMMEDIATE).get(); + GlobalAggregationBuilder globalBuilder = new GlobalAggregationBuilder("global").subAggregation( + new RareTermsAggregationBuilder("terms").field("keyword") + .subAggregation( + new RareTermsAggregationBuilder("sub_terms").field("keyword") + .subAggregation(new TopHitsAggregationBuilder("top_hits").storedField("_none_")) + ) + ); + assertNoFailuresAndResponse(client().prepareSearch("global").addAggregation(globalBuilder), response -> { + InternalGlobal result = response.getAggregations().get("global"); + InternalMultiBucketAggregation terms = result.getAggregations().get("terms"); + assertThat(terms.getBuckets().size(), equalTo(3)); + for (MultiBucketsAggregation.Bucket bucket : terms.getBuckets()) { + InternalMultiBucketAggregation subTerms = bucket.getAggregations().get("sub_terms"); + assertThat(subTerms.getBuckets().size(), equalTo(1)); + MultiBucketsAggregation.Bucket subBucket = subTerms.getBuckets().get(0); + InternalTopHits topHits = subBucket.getAggregations().get("top_hits"); + assertThat(topHits.getHits().getHits().length, equalTo(1)); + for (SearchHit hit : topHits.getHits()) { + assertThat(hit.getScore(), greaterThan(0f)); + } + } + }); + } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java index 1b2d66fc12c76..662744ddfe77e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java @@ -18,16 +18,24 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; +import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.AggregationTestScriptsPlugin; +import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.elasticsearch.search.aggregations.BucketOrder; +import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.bucket.AbstractTermsTestCase; +import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; import org.elasticsearch.search.aggregations.bucket.filter.Filter; +import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.global.InternalGlobal; import org.elasticsearch.search.aggregations.metrics.Avg; import org.elasticsearch.search.aggregations.metrics.ExtendedStats; +import org.elasticsearch.search.aggregations.metrics.InternalTopHits; import org.elasticsearch.search.aggregations.metrics.Stats; import org.elasticsearch.search.aggregations.metrics.Sum; +import org.elasticsearch.search.aggregations.metrics.TopHitsAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.test.ESIntegTestCase; @@ -63,6 +71,7 @@ import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.startsWith; import static org.hamcrest.core.IsNull.notNullValue; @@ -1376,4 +1385,46 @@ private void assertOrderByKeyResponse( } ); } + + public void testGlobalAggregationWithScore() throws Exception { + assertAcked(prepareCreate("global").setMapping("keyword", "type=keyword")); + indexRandom( + true, + prepareIndex("global").setSource("keyword", "a"), + prepareIndex("global").setSource("keyword", "c"), + prepareIndex("global").setSource("keyword", "e") + ); + String executionHint = randomFrom(TermsAggregatorFactory.ExecutionMode.values()).toString(); + Aggregator.SubAggCollectionMode collectionMode = randomFrom(Aggregator.SubAggCollectionMode.values()); + GlobalAggregationBuilder globalBuilder = new GlobalAggregationBuilder("global").subAggregation( + new TermsAggregationBuilder("terms").userValueTypeHint(ValueType.STRING) + .executionHint(executionHint) + .collectMode(collectionMode) + .field("keyword") + .order(BucketOrder.key(true)) + .subAggregation( + new TermsAggregationBuilder("sub_terms").userValueTypeHint(ValueType.STRING) + .executionHint(executionHint) + .collectMode(collectionMode) + .field("keyword") + .order(BucketOrder.key(true)) + .subAggregation(new TopHitsAggregationBuilder("top_hits").storedField("_none_")) + ) + ); + assertNoFailuresAndResponse(prepareSearch("global").addAggregation(globalBuilder), response -> { + InternalGlobal result = response.getAggregations().get("global"); + InternalMultiBucketAggregation terms = result.getAggregations().get("terms"); + assertThat(terms.getBuckets().size(), equalTo(3)); + for (MultiBucketsAggregation.Bucket bucket : terms.getBuckets()) { + InternalMultiBucketAggregation subTerms = bucket.getAggregations().get("sub_terms"); + assertThat(subTerms.getBuckets().size(), equalTo(1)); + MultiBucketsAggregation.Bucket subBucket = subTerms.getBuckets().get(0); + InternalTopHits topHits = subBucket.getAggregations().get("top_hits"); + assertThat(topHits.getHits().getHits().length, equalTo(1)); + for (SearchHit hit : topHits.getHits()) { + assertThat(hit.getScore(), greaterThan(0f)); + } + } + }); + } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java index 6cf274cb69fb3..991fe98612e3d 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java @@ -7,6 +7,7 @@ */ package org.elasticsearch.search.aggregations.metrics; +import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.util.ArrayUtil; @@ -20,6 +21,7 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.script.MockScriptEngine; import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.Script; @@ -34,8 +36,13 @@ import org.elasticsearch.search.aggregations.bucket.nested.Nested; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorFactory.ExecutionMode; +import org.elasticsearch.search.fetch.FetchSubPhase; +import org.elasticsearch.search.fetch.FetchSubPhaseProcessor; +import org.elasticsearch.search.fetch.StoredFieldsSpec; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.fetch.subphase.highlight.HighlightField; +import org.elasticsearch.search.lookup.FieldLookup; +import org.elasticsearch.search.lookup.LeafSearchLookup; import org.elasticsearch.search.rescore.QueryRescorerBuilder; import org.elasticsearch.search.sort.ScriptSortBuilder.ScriptSortType; import org.elasticsearch.search.sort.SortBuilders; @@ -43,6 +50,7 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.xcontent.XContentBuilder; +import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -87,7 +95,7 @@ public class TopHitsIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return Collections.singleton(CustomScriptPlugin.class); + return List.of(CustomScriptPlugin.class, FetchPlugin.class); } public static class CustomScriptPlugin extends MockScriptPlugin { @@ -110,7 +118,7 @@ public static String randomExecutionHint() { @Override public void setupSuiteScopeCluster() throws Exception { - assertAcked(prepareCreate("idx").setMapping(TERMS_AGGS_FIELD, "type=keyword")); + assertAcked(prepareCreate("idx").setMapping(TERMS_AGGS_FIELD, "type=keyword", "text", "type=text,store=true")); assertAcked(prepareCreate("field-collapsing").setMapping("group", "type=keyword")); createIndex("empty"); assertAcked( @@ -592,7 +600,7 @@ public void testFieldCollapsing() throws Exception { ); } - public void testFetchFeatures() { + public void testFetchFeatures() throws IOException { final boolean seqNoAndTerm = randomBoolean(); assertNoFailuresAndResponse( prepareSearch("idx").setQuery(matchQuery("text", "text").queryName("test")) @@ -642,19 +650,14 @@ public void testFetchFeatures() { assertThat(hit.getMatchedQueries()[0], equalTo("test")); - DocumentField field1 = hit.field("field1"); - assertThat(field1.getValue(), equalTo(5L)); - - DocumentField field2 = hit.field("field2"); - assertThat(field2.getValue(), equalTo(2.71f)); - - assertThat(hit.getSourceAsMap().get("text").toString(), equalTo("some text to entertain")); - - field2 = hit.field("script"); - assertThat(field2.getValue().toString(), equalTo("5")); + assertThat(hit.field("field1").getValue(), equalTo(5L)); + assertThat(hit.field("field2").getValue(), equalTo(2.71f)); + assertThat(hit.field("script").getValue().toString(), equalTo("5")); assertThat(hit.getSourceAsMap().size(), equalTo(1)); assertThat(hit.getSourceAsMap().get("text").toString(), equalTo("some text to entertain")); + assertEquals("some text to entertain", hit.getFields().get("text").getValue()); + assertEquals("some text to entertain", hit.getFields().get("text_stored_lookup").getValue()); } } ); @@ -1263,4 +1266,37 @@ public void testWithRescore() { } ); } + + public static class FetchPlugin extends Plugin implements SearchPlugin { + @Override + public List getFetchSubPhases(FetchPhaseConstructionContext context) { + return Collections.singletonList(fetchContext -> { + if (fetchContext.getIndexName().equals("idx")) { + return new FetchSubPhaseProcessor() { + + private LeafSearchLookup leafSearchLookup; + + @Override + public void setNextReader(LeafReaderContext ctx) { + leafSearchLookup = fetchContext.getSearchExecutionContext().lookup().getLeafSearchLookup(ctx); + } + + @Override + public void process(FetchSubPhase.HitContext hitContext) { + leafSearchLookup.setDocument(hitContext.docId()); + FieldLookup fieldLookup = leafSearchLookup.fields().get("text"); + hitContext.hit() + .setDocumentField("text_stored_lookup", new DocumentField("text_stored_lookup", fieldLookup.getValues())); + } + + @Override + public StoredFieldsSpec storedFieldsSpec() { + return StoredFieldsSpec.NO_REQUIREMENTS; + } + }; + } + return null; + }); + } + } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java index 75f5c472c6665..92fb09b017b2c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.util.LongObjectPagedHashMap; import org.elasticsearch.common.util.LongObjectPagedHashMap.Cursor; import org.elasticsearch.core.Releasables; +import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.AggregationExecutionContext; @@ -191,8 +192,7 @@ public InternalAggregation buildAggregation(long owningBucketOrdinal) throws IOE for (int i = 0; i < topDocs.scoreDocs.length; i++) { docIdsToLoad[i] = topDocs.scoreDocs[i].doc; } - subSearchContext.fetchPhase().execute(subSearchContext, docIdsToLoad); - FetchSearchResult fetchResult = subSearchContext.fetchResult(); + FetchSearchResult fetchResult = runFetchPhase(subSearchContext, docIdsToLoad); if (fetchProfiles != null) { fetchProfiles.add(fetchResult.profileResult()); } @@ -216,6 +216,19 @@ public InternalAggregation buildAggregation(long owningBucketOrdinal) throws IOE ); } + private static FetchSearchResult runFetchPhase(SubSearchContext subSearchContext, int[] docIdsToLoad) { + // Fork the search execution context for each slice, because the fetch phase does not support concurrent execution yet. + SearchExecutionContext searchExecutionContext = new SearchExecutionContext(subSearchContext.getSearchExecutionContext()); + SubSearchContext fetchSubSearchContext = new SubSearchContext(subSearchContext) { + @Override + public SearchExecutionContext getSearchExecutionContext() { + return searchExecutionContext; + } + }; + fetchSubSearchContext.fetchPhase().execute(fetchSubSearchContext, docIdsToLoad); + return fetchSubSearchContext.fetchResult(); + } + @Override public InternalTopHits buildEmptyAggregation() { TopDocs topDocs; diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java index c106d9b6f4cb2..2fa3e903a0074 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java @@ -104,6 +104,11 @@ private SearchHits buildSearchHits(SearchContext context, int[] docIdsToLoad, Pr PreloadedSourceProvider sourceProvider = new PreloadedSourceProvider(); PreloadedFieldLookupProvider fieldLookupProvider = new PreloadedFieldLookupProvider(); + // The following relies on the fact that we fetch sequentially one segment after another, from a single thread + // This needs to be revised once we add concurrency to the fetch phase, and needs a work-around for situations + // where we run fetch as part of the query phase, where inter-segment concurrency is leveraged. + // One problem is the global setLookupProviders call against the shared execution context. + // Another problem is that the above provider implementations are not thread-safe context.getSearchExecutionContext().setLookupProviders(sourceProvider, ctx -> fieldLookupProvider); List processors = getProcessors(context.shardTarget(), fetchContext, profiler); diff --git a/server/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java b/server/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java index 8567677aca30a..f31b319882b5a 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java @@ -29,7 +29,7 @@ public class SubSearchContext extends FilteredSearchContext { // By default return 3 hits per bucket. A higher default would make the response really large by default, since - // the to hits are returned per bucket. + // the top hits are returned per bucket. private static final int DEFAULT_SIZE = 3; private int from; @@ -62,6 +62,25 @@ public SubSearchContext(SearchContext context) { this.querySearchResult = new QuerySearchResult(); } + public SubSearchContext(SubSearchContext subSearchContext) { + this((SearchContext) subSearchContext); + this.from = subSearchContext.from; + this.size = subSearchContext.size; + this.sort = subSearchContext.sort; + this.parsedQuery = subSearchContext.parsedQuery; + this.query = subSearchContext.query; + this.storedFields = subSearchContext.storedFields; + this.scriptFields = subSearchContext.scriptFields; + this.fetchSourceContext = subSearchContext.fetchSourceContext; + this.docValuesContext = subSearchContext.docValuesContext; + this.fetchFieldsContext = subSearchContext.fetchFieldsContext; + this.highlight = subSearchContext.highlight; + this.explain = subSearchContext.explain; + this.trackScores = subSearchContext.trackScores; + this.version = subSearchContext.version; + this.seqNoAndPrimaryTerm = subSearchContext.seqNoAndPrimaryTerm; + } + @Override public void preProcess() {} diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java index 2d240f74b91a4..dff5c090f818e 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java @@ -40,7 +40,6 @@ import org.elasticsearch.index.mapper.RangeType; import org.elasticsearch.index.mapper.SeqNoFieldMapper; import org.elasticsearch.index.mapper.Uid; -import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorTestCase; @@ -49,8 +48,6 @@ import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.MultiBucketConsumerService; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; -import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder; -import org.elasticsearch.search.aggregations.bucket.global.InternalGlobal; import org.elasticsearch.search.aggregations.bucket.nested.InternalNested; import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregatorTests; @@ -72,7 +69,6 @@ import static java.util.stream.Collectors.toList; import static org.elasticsearch.index.mapper.SeqNoFieldMapper.PRIMARY_TERM_NAME; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThan; public class RareTermsAggregatorTests extends AggregatorTestCase { @@ -334,47 +330,6 @@ public void testInsideTerms() throws IOException { } } - public void testGlobalAggregationWithScore() throws IOException { - try (Directory directory = newDirectory()) { - try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { - Document document = new Document(); - document.add(new SortedDocValuesField("keyword", new BytesRef("a"))); - indexWriter.addDocument(document); - document = new Document(); - document.add(new SortedDocValuesField("keyword", new BytesRef("c"))); - indexWriter.addDocument(document); - document = new Document(); - document.add(new SortedDocValuesField("keyword", new BytesRef("e"))); - indexWriter.addDocument(document); - try (DirectoryReader indexReader = maybeWrapReaderEs(indexWriter.getReader())) { - GlobalAggregationBuilder globalBuilder = new GlobalAggregationBuilder("global").subAggregation( - new RareTermsAggregationBuilder("terms").field("keyword") - .subAggregation( - new RareTermsAggregationBuilder("sub_terms").field("keyword") - .subAggregation(new TopHitsAggregationBuilder("top_hits").storedField("_none_")) - ) - ); - - MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType("keyword"); - - InternalGlobal result = searchAndReduce(indexReader, new AggTestConfig(globalBuilder, fieldType)); - InternalMultiBucketAggregation terms = result.getAggregations().get("terms"); - assertThat(terms.getBuckets().size(), equalTo(3)); - for (MultiBucketsAggregation.Bucket bucket : terms.getBuckets()) { - InternalMultiBucketAggregation subTerms = bucket.getAggregations().get("sub_terms"); - assertThat(subTerms.getBuckets().size(), equalTo(1)); - MultiBucketsAggregation.Bucket subBucket = subTerms.getBuckets().get(0); - InternalTopHits topHits = subBucket.getAggregations().get("top_hits"); - assertThat(topHits.getHits().getHits().length, equalTo(1)); - for (SearchHit hit : topHits.getHits()) { - assertThat(hit.getScore(), greaterThan(0f)); - } - } - } - } - } - } - public void testWithNestedAggregations() throws IOException { try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java index 183d1d0ab6ed0..788249fee1187 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java @@ -76,7 +76,6 @@ import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptType; import org.elasticsearch.script.StringFieldScript; -import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.AggregationExecutionException; @@ -91,8 +90,6 @@ import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.filter.InternalFilter; -import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder; -import org.elasticsearch.search.aggregations.bucket.global.InternalGlobal; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.InternalDateHistogram; @@ -1308,57 +1305,6 @@ public void testMixLongAndDouble() throws Exception { } } - public void testGlobalAggregationWithScore() throws IOException { - try (Directory directory = newDirectory()) { - try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { - Document document = new Document(); - document.add(new SortedDocValuesField("keyword", new BytesRef("a"))); - indexWriter.addDocument(document); - document = new Document(); - document.add(new SortedDocValuesField("keyword", new BytesRef("c"))); - indexWriter.addDocument(document); - document = new Document(); - document.add(new SortedDocValuesField("keyword", new BytesRef("e"))); - indexWriter.addDocument(document); - try (DirectoryReader indexReader = maybeWrapReaderEs(indexWriter.getReader())) { - String executionHint = randomFrom(TermsAggregatorFactory.ExecutionMode.values()).toString(); - Aggregator.SubAggCollectionMode collectionMode = randomFrom(Aggregator.SubAggCollectionMode.values()); - GlobalAggregationBuilder globalBuilder = new GlobalAggregationBuilder("global").subAggregation( - new TermsAggregationBuilder("terms").userValueTypeHint(ValueType.STRING) - .executionHint(executionHint) - .collectMode(collectionMode) - .field("keyword") - .order(BucketOrder.key(true)) - .subAggregation( - new TermsAggregationBuilder("sub_terms").userValueTypeHint(ValueType.STRING) - .executionHint(executionHint) - .collectMode(collectionMode) - .field("keyword") - .order(BucketOrder.key(true)) - .subAggregation(new TopHitsAggregationBuilder("top_hits").storedField("_none_")) - ) - ); - - MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType("keyword"); - - InternalGlobal result = searchAndReduce(indexReader, new AggTestConfig(globalBuilder, fieldType)); - InternalMultiBucketAggregation terms = result.getAggregations().get("terms"); - assertThat(terms.getBuckets().size(), equalTo(3)); - for (MultiBucketsAggregation.Bucket bucket : terms.getBuckets()) { - InternalMultiBucketAggregation subTerms = bucket.getAggregations().get("sub_terms"); - assertThat(subTerms.getBuckets().size(), equalTo(1)); - MultiBucketsAggregation.Bucket subBucket = subTerms.getBuckets().get(0); - InternalTopHits topHits = subBucket.getAggregations().get("top_hits"); - assertThat(topHits.getHits().getHits().length, equalTo(1)); - for (SearchHit hit : topHits.getHits()) { - assertThat(hit.getScore(), greaterThan(0f)); - } - } - } - } - } - } - public void testWithNestedAggregations() throws IOException { try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { From e33fd1b5a4d72c332d92e0513f75487672f9c280 Mon Sep 17 00:00:00 2001 From: Jedr Blaszyk Date: Thu, 4 Apr 2024 14:06:12 +0200 Subject: [PATCH 020/173] [Connector API] Support numeric for configuration select option value type (#107059) --- docs/changelog/107059.yaml | 5 ++ .../335_connector_update_configuration.yml | 40 ++++++++++++++++ .../ConfigurationSelectOption.java | 25 ++++++---- .../ConnectorConfigurationTests.java | 48 +++++++++++++++++++ 4 files changed, 110 insertions(+), 8 deletions(-) create mode 100644 docs/changelog/107059.yaml diff --git a/docs/changelog/107059.yaml b/docs/changelog/107059.yaml new file mode 100644 index 0000000000000..6c7ee48f9b53b --- /dev/null +++ b/docs/changelog/107059.yaml @@ -0,0 +1,5 @@ +pr: 107059 +summary: "[Connector API] Support numeric for configuration select option value type" +area: Application +type: bug +issues: [] diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/335_connector_update_configuration.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/335_connector_update_configuration.yml index 0bde4bafaffd4..418a3cf6de94a 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/335_connector_update_configuration.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/335_connector_update_configuration.yml @@ -161,6 +161,46 @@ setup: - match: { configuration.some_field.tooltip: null } +--- +"Update Connector Configuration with numeric select options": + - do: + connector.update_configuration: + connector_id: test-connector + body: + configuration: + some_field: + default_value: null + depends_on: + - field: some_field + value: 31 + display: numeric + label: Very important field + options: + - label: ten + value: 10 + - label: five + value: 5 + order: 4 + required: true + sensitive: false + tooltip: null + type: str + ui_restrictions: [ ] + validations: + - constraint: 0 + type: greater_than + value: 123 + + + - match: { result: updated } + + - do: + connector.get: + connector_id: test-connector + + - match: { configuration.some_field.options.0.value: 10 } + - match: { configuration.some_field.options.1.value: 5 } + --- "Update Connector Configuration - Connector doesn't exist": - do: diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationSelectOption.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationSelectOption.java index 3c17f97ead51d..9728faaac3dd4 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationSelectOption.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationSelectOption.java @@ -11,9 +11,11 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParseException; import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; @@ -25,16 +27,16 @@ public class ConfigurationSelectOption implements Writeable, ToXContentObject { private final String label; - private final String value; + private final Object value; - private ConfigurationSelectOption(String label, String value) { + private ConfigurationSelectOption(String label, Object value) { this.label = label; this.value = value; } public ConfigurationSelectOption(StreamInput in) throws IOException { this.label = in.readString(); - this.value = in.readString(); + this.value = in.readGenericValue(); } private static final ParseField LABEL_FIELD = new ParseField("label"); @@ -43,12 +45,19 @@ public ConfigurationSelectOption(StreamInput in) throws IOException { private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "connector_configuration_select_option", true, - args -> new ConfigurationSelectOption.Builder().setLabel((String) args[0]).setValue((String) args[1]).build() + args -> new ConfigurationSelectOption.Builder().setLabel((String) args[0]).setValue(args[1]).build() ); static { PARSER.declareString(constructorArg(), LABEL_FIELD); - PARSER.declareString(constructorArg(), VALUE_FIELD); + PARSER.declareField(constructorArg(), (p, c) -> { + if (p.currentToken() == XContentParser.Token.VALUE_STRING) { + return p.text(); + } else if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) { + return p.numberValue(); + } + throw new XContentParseException("Unsupported token [" + p.currentToken() + "]"); + }, VALUE_FIELD, ObjectParser.ValueType.VALUE); } @Override @@ -76,7 +85,7 @@ public static ConfigurationSelectOption fromXContent(XContentParser parser) thro @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(label); - out.writeString(value); + out.writeGenericValue(value); } @Override @@ -95,14 +104,14 @@ public int hashCode() { public static class Builder { private String label; - private String value; + private Object value; public Builder setLabel(String label) { this.label = label; return this; } - public Builder setValue(String value) { + public Builder setValue(Object value) { this.value = value; return this; } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorConfigurationTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorConfigurationTests.java index 3a7ff819ecbf5..caedb526b0b7e 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorConfigurationTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorConfigurationTests.java @@ -89,6 +89,54 @@ public void testToXContent() throws IOException { assertToXContentEquivalent(originalBytes, toXContent(parsed, XContentType.JSON, humanReadable), XContentType.JSON); } + public void testToXContent_WithNumericSelectOptions() throws IOException { + String content = XContentHelper.stripWhitespace(""" + { + "default_value": null, + "depends_on": [ + { + "field": "some_field", + "value": true + } + ], + "display": "textbox", + "label": "Very important field", + "options": [ + { + "label": "five", + "value": 5 + }, + { + "label": "ten", + "value": 10 + } + ], + "order": 4, + "required": true, + "sensitive": false, + "tooltip": "Wow, this tooltip is useful.", + "type": "str", + "ui_restrictions": [], + "validations": [ + { + "constraint": 0, + "type": "greater_than" + } + ], + "value": "" + } + """); + + ConnectorConfiguration configuration = ConnectorConfiguration.fromXContentBytes(new BytesArray(content), XContentType.JSON); + boolean humanReadable = true; + BytesReference originalBytes = toShuffledXContent(configuration, XContentType.JSON, ToXContent.EMPTY_PARAMS, humanReadable); + ConnectorConfiguration parsed; + try (XContentParser parser = createParser(XContentType.JSON.xContent(), originalBytes)) { + parsed = ConnectorConfiguration.fromXContent(parser); + } + assertToXContentEquivalent(originalBytes, toXContent(parsed, XContentType.JSON, humanReadable), XContentType.JSON); + } + public void testToXContentCrawlerConfig_WithNullValue() throws IOException { String content = XContentHelper.stripWhitespace(""" { From 90351ef63903c0ea5453d27b14575dbb2d07e6aa Mon Sep 17 00:00:00 2001 From: David Turner Date: Thu, 4 Apr 2024 14:04:14 +0100 Subject: [PATCH 021/173] Capture hot threads during slow cluster state application (#107087) Today we emit a warning when applying the cluster state takes more than 30s by default. Experience shows that it's almost always one task that exceeds the limit, and usually that task is the `IndicesClusterStateService` (see #89821). This commit adds a `DEBUG` logger that will capture a thread dump if a task is running for longer than a configurable duration so we can get more insight into the cause of the slow execution. --- .../ClusterApplierRecordingService.java | 55 ++++++++-- .../service/ClusterApplierService.java | 21 ++-- .../common/settings/ClusterSettings.java | 1 + .../ClusterApplierRecordingServiceTests.java | 101 ++++++++++++++---- .../ml/integration/AnomalyJobCRUDIT.java | 3 +- .../AutodetectResultProcessorIT.java | 3 +- .../ml/integration/EstablishedMemUsageIT.java | 3 +- .../integration/JobModelSnapshotCRUDIT.java | 3 +- .../ml/integration/JobResultsProviderIT.java | 3 +- .../integration/JobStorageDeletionTaskIT.java | 3 +- ...sportGetTrainedModelsStatsActionTests.java | 3 +- .../ml/datafeed/DatafeedJobBuilderTests.java | 3 +- .../InferenceProcessorFactoryTests.java | 3 +- .../persistence/JobResultsPersisterTests.java | 3 +- .../OpenJobPersistentTasksExecutorTests.java | 1 + .../ResultsPersisterServiceTests.java | 3 +- .../slm/SnapshotLifecycleServiceTests.java | 74 +++++++------ 17 files changed, 201 insertions(+), 85 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierRecordingService.java b/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierRecordingService.java index b1a2726e468e9..e7bd3b938504e 100644 --- a/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierRecordingService.java +++ b/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierRecordingService.java @@ -7,7 +7,14 @@ */ package org.elasticsearch.cluster.service; +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchTimeoutException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.cluster.service.ClusterApplierRecordingService.Stats.Recording; +import org.elasticsearch.common.ReferenceDocs; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -16,6 +23,8 @@ import org.elasticsearch.core.Releasable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; +import org.elasticsearch.monitor.jvm.HotThreads; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; @@ -28,10 +37,11 @@ import java.util.Map; import java.util.Objects; import java.util.Set; -import java.util.function.LongSupplier; public final class ClusterApplierRecordingService { + private static final Logger logger = LogManager.getLogger(ClusterApplierRecordingService.class); + private final Map recordedActions = new HashMap<>(); synchronized Stats getStats() { @@ -59,13 +69,16 @@ synchronized void updateStats(Recorder recorder) { static final class Recorder { private String currentAction; - private long startTimeMS; + private long startMillis; private boolean recording; + private SubscribableListener currentListener; private final List> recordings = new LinkedList<>(); - private final LongSupplier currentTimeSupplier; + private final ThreadPool threadPool; + private final TimeValue debugLoggingTimeout; - Recorder(LongSupplier currentTimeSupplier) { - this.currentTimeSupplier = currentTimeSupplier; + Recorder(ThreadPool threadPool, TimeValue debugLoggingTimeout) { + this.threadPool = threadPool; + this.debugLoggingTimeout = debugLoggingTimeout; } Releasable record(String action) { @@ -75,14 +88,40 @@ Releasable record(String action) { this.recording = true; this.currentAction = action; - this.startTimeMS = currentTimeSupplier.getAsLong(); + this.startMillis = threadPool.rawRelativeTimeInMillis(); + + if (logger.isDebugEnabled()) { + currentListener = new SubscribableListener<>(); + currentListener.addTimeout(debugLoggingTimeout, threadPool, threadPool.generic()); + currentListener.addListener(new ActionListener<>() { + @Override + public void onResponse(Void unused) {} + + @Override + public void onFailure(Exception e) { + assert e instanceof ElasticsearchTimeoutException : e; // didn't complete in time + HotThreads.logLocalHotThreads( + logger, + Level.DEBUG, + "hot threads while applying cluster state [" + currentAction + ']', + ReferenceDocs.LOGGING + ); + } + }); + } + return this::stop; } void stop() { recording = false; - long timeSpentMS = currentTimeSupplier.getAsLong() - this.startTimeMS; - recordings.add(new Tuple<>(currentAction, timeSpentMS)); + long elapsedMillis = threadPool.rawRelativeTimeInMillis() - this.startMillis; + recordings.add(new Tuple<>(currentAction, elapsedMillis)); + + if (currentListener != null) { + currentListener.onResponse(null); + currentListener = null; + } } List> getRecordings() { diff --git a/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java b/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java index 4230838a97592..c2b35adb738f6 100644 --- a/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java +++ b/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java @@ -64,12 +64,20 @@ public class ClusterApplierService extends AbstractLifecycleComponent implements Setting.Property.NodeScope ); + public static final Setting CLUSTER_SERVICE_SLOW_TASK_THREAD_DUMP_TIMEOUT_SETTING = Setting.positiveTimeSetting( + "cluster.service.slow_task_thread_dump_timeout", + TimeValue.timeValueSeconds(30), + Setting.Property.Dynamic, + Setting.Property.NodeScope + ); + public static final String CLUSTER_UPDATE_THREAD_NAME = "clusterApplierService#updateTask"; private final ClusterSettings clusterSettings; private final ThreadPool threadPool; private volatile TimeValue slowTaskLoggingThreshold; + private volatile TimeValue slowTaskThreadDumpTimeout; private volatile PrioritizedEsThreadPoolExecutor threadPoolExecutor; @@ -98,15 +106,8 @@ public ClusterApplierService(String nodeName, Settings settings, ClusterSettings this.nodeName = nodeName; this.recordingService = new ClusterApplierRecordingService(); - this.slowTaskLoggingThreshold = CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING.get(settings); - this.clusterSettings.addSettingsUpdateConsumer( - CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, - this::setSlowTaskLoggingThreshold - ); - } - - private void setSlowTaskLoggingThreshold(TimeValue slowTaskLoggingThreshold) { - this.slowTaskLoggingThreshold = slowTaskLoggingThreshold; + clusterSettings.initializeAndWatch(CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, t -> slowTaskLoggingThreshold = t); + clusterSettings.initializeAndWatch(CLUSTER_SERVICE_SLOW_TASK_THREAD_DUMP_TIMEOUT_SETTING, t -> slowTaskThreadDumpTimeout = t); } public synchronized void setNodeConnectionsService(NodeConnectionsService nodeConnectionsService) { @@ -391,7 +392,7 @@ private void runTask(String source, Function updateF final ClusterState previousClusterState = state.get(); final long startTimeMillis = threadPool.relativeTimeInMillis(); - final Recorder stopWatch = new Recorder(threadPool::rawRelativeTimeInMillis); + final Recorder stopWatch = new Recorder(threadPool, slowTaskThreadDumpTimeout); final ClusterState newClusterState; try { try (Releasable ignored = stopWatch.record("running task [" + source + ']')) { diff --git a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index ac5255f58622a..3493206e00bf6 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -340,6 +340,7 @@ public void apply(Settings value, Settings current, Settings previous) { IndexModule.NODE_STORE_ALLOW_MMAP, IndexSettings.NODE_DEFAULT_REFRESH_INTERVAL_SETTING, ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_THREAD_DUMP_TIMEOUT_SETTING, ClusterService.USER_DEFINED_METADATA, MasterService.MASTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, MasterService.MASTER_SERVICE_STARVATION_LOGGING_THRESHOLD_SETTING, diff --git a/server/src/test/java/org/elasticsearch/cluster/service/ClusterApplierRecordingServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/service/ClusterApplierRecordingServiceTests.java index e4bad62b6834a..be7ca6d2f0616 100644 --- a/server/src/test/java/org/elasticsearch/cluster/service/ClusterApplierRecordingServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/service/ClusterApplierRecordingServiceTests.java @@ -8,11 +8,18 @@ package org.elasticsearch.cluster.service; +import org.apache.logging.log4j.Level; import org.elasticsearch.cluster.service.ClusterApplierRecordingService.Recorder; import org.elasticsearch.cluster.service.ClusterApplierRecordingService.Stats.Recording; +import org.elasticsearch.common.util.concurrent.DeterministicTaskQueue; import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.MockLogAppender; +import org.elasticsearch.test.junit.annotations.TestLogging; +import org.elasticsearch.threadpool.ThreadPool; +import org.junit.Before; import java.util.Map; @@ -20,22 +27,37 @@ public class ClusterApplierRecordingServiceTests extends ESTestCase { + private DeterministicTaskQueue deterministicTaskQueue; + private ThreadPool threadPool; + + @Before + public void createThreadPool() { + deterministicTaskQueue = new DeterministicTaskQueue(); + deterministicTaskQueue.scheduleAt(between(0, 1000000), () -> {}); + deterministicTaskQueue.runAllTasks(); + threadPool = deterministicTaskQueue.getThreadPool(); + } + + private void advanceTime(long millis) { + deterministicTaskQueue.scheduleAt(deterministicTaskQueue.getCurrentTimeMillis() + millis, () -> {}); + deterministicTaskQueue.runAllTasks(); + } + public void testRecorder() { - long[] currentTime = new long[1]; - var recorder = new Recorder(() -> currentTime[0]); + var recorder = new Recorder(threadPool, TimeValue.ZERO); { Releasable releasable = recorder.record("action1"); - currentTime[0] = 5; + advanceTime(5); releasable.close(); } { Releasable releasable = recorder.record("action2"); - currentTime[0] = 42; + advanceTime(37); releasable.close(); } { Releasable releasable = recorder.record("action3"); - currentTime[0] = 45; + advanceTime(3); releasable.close(); } @@ -44,8 +66,8 @@ public void testRecorder() { } public void testRecorderAlreadyRecording() { - var recorder = new Recorder(() -> 1L); - Releasable releasable = recorder.record("action1"); + var recorder = new Recorder(threadPool, TimeValue.ZERO); + Releasable ignored = recorder.record("action1"); expectThrows(IllegalStateException.class, () -> recorder.record("action2")); } @@ -53,16 +75,15 @@ public void testRecordingServiceStats() { var service = new ClusterApplierRecordingService(); { - long[] currentTime = new long[1]; - var recorder = new Recorder(() -> currentTime[0]); + var recorder = new Recorder(threadPool, TimeValue.ZERO); try (var r = recorder.record("action1")) { - currentTime[0] = 5; + advanceTime(5); } try (var r = recorder.record("action2")) { - currentTime[0] = 42; + advanceTime(37); } try (var r = recorder.record("action3")) { - currentTime[0] = 45; + advanceTime(3); } service.updateStats(recorder); var stats = service.getStats(); @@ -76,16 +97,15 @@ public void testRecordingServiceStats() { ); } { - long[] currentTime = new long[1]; - var recorder = new Recorder(() -> currentTime[0]); + var recorder = new Recorder(threadPool, TimeValue.ZERO); try (var r = recorder.record("action1")) { - currentTime[0] = 3; + advanceTime(3); } try (var r = recorder.record("action2")) { - currentTime[0] = 35; + advanceTime(32); } try (var r = recorder.record("action3")) { - currentTime[0] = 41; + advanceTime(6); } service.updateStats(recorder); var stats = service.getStats(); @@ -99,13 +119,12 @@ public void testRecordingServiceStats() { ); } { - long[] currentTime = new long[1]; - var recorder = new Recorder(() -> currentTime[0]); + var recorder = new Recorder(threadPool, TimeValue.ZERO); try (var r = recorder.record("action1")) { - currentTime[0] = 2; + advanceTime(2); } try (var r = recorder.record("action3")) { - currentTime[0] = 6; + advanceTime(4); } service.updateStats(recorder); var stats = service.getStats(); @@ -116,4 +135,44 @@ public void testRecordingServiceStats() { } } + @TestLogging(reason = "testing debug logging", value = "org.elasticsearch.cluster.service.ClusterApplierRecordingService:DEBUG") + public void testSlowTaskDebugLogging() { + final var debugLoggingTimeout = TimeValue.timeValueMillis(between(1, 100000)); + var recorder = new Recorder(threadPool, debugLoggingTimeout); + + // ensure hot threads is logged if the action is too slow + var slowAction = recorder.record("slow_action"); + deterministicTaskQueue.scheduleAt( + deterministicTaskQueue.getCurrentTimeMillis() + debugLoggingTimeout.millis() + between(1, 1000), + slowAction::close + ); + MockLogAppender.assertThatLogger( + deterministicTaskQueue::runAllTasksInTimeOrder, + ClusterApplierRecordingService.class, + new MockLogAppender.SeenEventExpectation( + "hot threads", + ClusterApplierRecordingService.class.getCanonicalName(), + Level.DEBUG, + "hot threads while applying cluster state [slow_action]" + ) + ); + + // ensure hot threads is _NOT_ logged if the action completes quickly enough + var fastAction = recorder.record("fast_action"); + deterministicTaskQueue.scheduleAt( + randomLongBetween(0, deterministicTaskQueue.getCurrentTimeMillis() + debugLoggingTimeout.millis() - 1), + fastAction::close + ); + MockLogAppender.assertThatLogger( + deterministicTaskQueue::runAllTasksInTimeOrder, + ClusterApplierRecordingService.class, + new MockLogAppender.UnseenEventExpectation( + "hot threads", + ClusterApplierRecordingService.class.getCanonicalName(), + Level.DEBUG, + "*" + ) + ); + } + } diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AnomalyJobCRUDIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AnomalyJobCRUDIT.java index 35d99b97f99c9..08fda90f9fd73 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AnomalyJobCRUDIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AnomalyJobCRUDIT.java @@ -68,7 +68,8 @@ public void createComponents() throws Exception { OperationRouting.USE_ADAPTIVE_REPLICA_SELECTION_SETTING, ResultsPersisterService.PERSIST_RESULTS_MAX_RETRIES, ClusterService.USER_DEFINED_METADATA, - ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_THREAD_DUMP_TIMEOUT_SETTING ) ) ); diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java index c24c1c1becb18..6cb467af525c9 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java @@ -165,7 +165,8 @@ public void createComponents() throws Exception { OperationRouting.USE_ADAPTIVE_REPLICA_SELECTION_SETTING, ClusterService.USER_DEFINED_METADATA, ResultsPersisterService.PERSIST_RESULTS_MAX_RETRIES, - ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_THREAD_DUMP_TIMEOUT_SETTING ) ) ); diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/EstablishedMemUsageIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/EstablishedMemUsageIT.java index e09df368ecbdc..96b4aea1a55b9 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/EstablishedMemUsageIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/EstablishedMemUsageIT.java @@ -56,7 +56,8 @@ public void createComponents() { ResultsPersisterService.PERSIST_RESULTS_MAX_RETRIES, OperationRouting.USE_ADAPTIVE_REPLICA_SELECTION_SETTING, ClusterService.USER_DEFINED_METADATA, - ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_THREAD_DUMP_TIMEOUT_SETTING ) ) ); diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobModelSnapshotCRUDIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobModelSnapshotCRUDIT.java index 1538d7a94fb81..dbc8ec3f99a97 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobModelSnapshotCRUDIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobModelSnapshotCRUDIT.java @@ -68,7 +68,8 @@ public void createComponents() throws Exception { OperationRouting.USE_ADAPTIVE_REPLICA_SELECTION_SETTING, ResultsPersisterService.PERSIST_RESULTS_MAX_RETRIES, ClusterService.USER_DEFINED_METADATA, - ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_THREAD_DUMP_TIMEOUT_SETTING ) ) ); diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java index b4ffe46e6ea92..ae128b507c795 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java @@ -128,7 +128,8 @@ public void createComponents() throws Exception { OperationRouting.USE_ADAPTIVE_REPLICA_SELECTION_SETTING, ResultsPersisterService.PERSIST_RESULTS_MAX_RETRIES, ClusterService.USER_DEFINED_METADATA, - ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_THREAD_DUMP_TIMEOUT_SETTING ) ) ); diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobStorageDeletionTaskIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobStorageDeletionTaskIT.java index b7bd8fed3e83c..4493a680d25cf 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobStorageDeletionTaskIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobStorageDeletionTaskIT.java @@ -72,7 +72,8 @@ public void createComponents() { ResultsPersisterService.PERSIST_RESULTS_MAX_RETRIES, OperationRouting.USE_ADAPTIVE_REPLICA_SELECTION_SETTING, ClusterService.USER_DEFINED_METADATA, - ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_THREAD_DUMP_TIMEOUT_SETTING ) ) ); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsActionTests.java index 5c2c3abf232f5..442c0095b3001 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsActionTests.java @@ -121,7 +121,8 @@ public void setUpVariables() { MasterService.MASTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, OperationRouting.USE_ADAPTIVE_REPLICA_SELECTION_SETTING, ClusterService.USER_DEFINED_METADATA, - ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_THREAD_DUMP_TIMEOUT_SETTING ) ) ); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobBuilderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobBuilderTests.java index be768e1a01e20..98bcb5d7f0d8e 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobBuilderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobBuilderTests.java @@ -82,7 +82,8 @@ public void init() { MasterService.MASTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, OperationRouting.USE_ADAPTIVE_REPLICA_SELECTION_SETTING, ClusterService.USER_DEFINED_METADATA, - ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_THREAD_DUMP_TIMEOUT_SETTING ) ) ); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessorFactoryTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessorFactoryTests.java index 0698c266400b0..9adbb3b3dd89a 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessorFactoryTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessorFactoryTests.java @@ -98,7 +98,8 @@ public void setUpVariables() { MasterService.MASTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, OperationRouting.USE_ADAPTIVE_REPLICA_SELECTION_SETTING, ClusterService.USER_DEFINED_METADATA, - ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_THREAD_DUMP_TIMEOUT_SETTING ) ) ); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java index 0a7fc75115d2a..7b0d9d3051dcc 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java @@ -436,7 +436,8 @@ private ResultsPersisterService buildResultsPersisterService(OriginSettingClient OperationRouting.USE_ADAPTIVE_REPLICA_SELECTION_SETTING, ResultsPersisterService.PERSIST_RESULTS_MAX_RETRIES, ClusterService.USER_DEFINED_METADATA, - ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_THREAD_DUMP_TIMEOUT_SETTING ) ) ); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutorTests.java index c3db184759d3f..0440a66bdbcaa 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutorTests.java @@ -100,6 +100,7 @@ public void setUpMocks() { OperationRouting.USE_ADAPTIVE_REPLICA_SELECTION_SETTING, ClusterService.USER_DEFINED_METADATA, ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_THREAD_DUMP_TIMEOUT_SETTING, MachineLearning.CONCURRENT_JOB_ALLOCATIONS, MachineLearning.MAX_MACHINE_MEMORY_PERCENT, MachineLearning.MAX_LAZY_ML_NODES, diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterServiceTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterServiceTests.java index 7aaeabac3af8b..2acf2e3da3cf6 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterServiceTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterServiceTests.java @@ -408,7 +408,8 @@ public static ResultsPersisterService buildResultsPersisterService(OriginSetting OperationRouting.USE_ADAPTIVE_REPLICA_SELECTION_SETTING, ClusterService.USER_DEFINED_METADATA, ResultsPersisterService.PERSIST_RESULTS_MAX_RETRIES, - ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_THREAD_DUMP_TIMEOUT_SETTING ) ) ); diff --git a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleServiceTests.java b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleServiceTests.java index 2013a8ff53301..b65f3587ffbf0 100644 --- a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleServiceTests.java +++ b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleServiceTests.java @@ -443,43 +443,47 @@ public void testValidateMinimumInterval() { public void testStoppedPriority() { ClockMock clock = new ClockMock(); ThreadPool threadPool = new TestThreadPool("name"); - ClusterSettings clusterSettings = new ClusterSettings( - Settings.EMPTY, - new HashSet<>( - Arrays.asList( - MasterService.MASTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, - OperationRouting.USE_ADAPTIVE_REPLICA_SELECTION_SETTING, - ClusterService.USER_DEFINED_METADATA, - ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING + try { + ClusterSettings clusterSettings = new ClusterSettings( + Settings.EMPTY, + new HashSet<>( + Arrays.asList( + MasterService.MASTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, + OperationRouting.USE_ADAPTIVE_REPLICA_SELECTION_SETTING, + ClusterService.USER_DEFINED_METADATA, + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, + ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_THREAD_DUMP_TIMEOUT_SETTING + ) ) - ) - ); - final SetOnce task = new SetOnce<>(); - ClusterService fakeService = new ClusterService(Settings.EMPTY, clusterSettings, threadPool, null) { - @Override - public void submitUnbatchedStateUpdateTask(String source, ClusterStateUpdateTask updateTask) { - logger.info("--> got task: [source: {}]: {}", source, updateTask); - if (updateTask instanceof OperationModeUpdateTask operationModeUpdateTask) { - task.set(operationModeUpdateTask); + ); + final SetOnce task = new SetOnce<>(); + ClusterService fakeService = new ClusterService(Settings.EMPTY, clusterSettings, threadPool, null) { + @Override + public void submitUnbatchedStateUpdateTask(String source, ClusterStateUpdateTask updateTask) { + logger.info("--> got task: [source: {}]: {}", source, updateTask); + if (updateTask instanceof OperationModeUpdateTask operationModeUpdateTask) { + task.set(operationModeUpdateTask); + } } - } - }; - - SnapshotLifecycleService service = new SnapshotLifecycleService( - Settings.EMPTY, - () -> new SnapshotLifecycleTask(null, null, null), - fakeService, - clock - ); - ClusterState state = createState( - new SnapshotLifecycleMetadata(Map.of(), OperationMode.STOPPING, new SnapshotLifecycleStats(0, 0, 0, 0, Map.of())), - true - ); - service.clusterChanged(new ClusterChangedEvent("blah", state, ClusterState.EMPTY_STATE)); - assertEquals(task.get().priority(), Priority.IMMEDIATE); - assertNull(task.get().getILMOperationMode()); - assertEquals(task.get().getSLMOperationMode(), OperationMode.STOPPED); - threadPool.shutdownNow(); + }; + + SnapshotLifecycleService service = new SnapshotLifecycleService( + Settings.EMPTY, + () -> new SnapshotLifecycleTask(null, null, null), + fakeService, + clock + ); + ClusterState state = createState( + new SnapshotLifecycleMetadata(Map.of(), OperationMode.STOPPING, new SnapshotLifecycleStats(0, 0, 0, 0, Map.of())), + true + ); + service.clusterChanged(new ClusterChangedEvent("blah", state, ClusterState.EMPTY_STATE)); + assertEquals(task.get().priority(), Priority.IMMEDIATE); + assertNull(task.get().getILMOperationMode()); + assertEquals(task.get().getSLMOperationMode(), OperationMode.STOPPED); + } finally { + ThreadPool.terminate(threadPool, 10, TimeUnit.SECONDS); + } } class FakeSnapshotTask extends SnapshotLifecycleTask { From 43d0ef94cb5f5104c3b5a1cea363217b140bbb81 Mon Sep 17 00:00:00 2001 From: David Turner Date: Thu, 4 Apr 2024 14:05:28 +0100 Subject: [PATCH 022/173] AwaitsFix for #107043 --- .../admin/cluster/node/tasks/TransportTasksActionTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java index 7168b2c1edcdd..9ddcf8a596226 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java @@ -563,6 +563,7 @@ public void testCancellingTasksThatDontSupportCancellation() throws Exception { responseLatch.await(10, TimeUnit.SECONDS); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107043") public void testFailedTasksCount() throws Exception { Settings settings = Settings.builder().put(MockTaskManager.USE_MOCK_TASK_MANAGER_SETTING.getKey(), true).build(); setupTestNodes(settings); From f30b79e9727ab88b0dbb9d2c013a0554ad73cc69 Mon Sep 17 00:00:00 2001 From: Jedr Blaszyk Date: Thu, 4 Apr 2024 15:19:18 +0200 Subject: [PATCH 023/173] [Connector API] Support updating single schedule type (full, incremental or access_control) (#107090) --- .../331_connector_update_scheduling.yml | 35 +++++++++++++--- .../connector/ConnectorScheduling.java | 42 +++++++++++++++---- .../UpdateConnectorSchedulingAction.java | 9 ++++ .../connector/ConnectorIndexServiceTests.java | 38 +++++++++++++++++ .../connector/ConnectorTestUtils.java | 2 +- 5 files changed, 111 insertions(+), 15 deletions(-) diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/331_connector_update_scheduling.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/331_connector_update_scheduling.yml index e8e3fa0e87068..dd74fa7e27c2f 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/331_connector_update_scheduling.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/331_connector_update_scheduling.yml @@ -44,6 +44,34 @@ setup: - match: { scheduling.incremental.enabled: false } - match: { scheduling.incremental.interval: "3 0 0 * * ?" } + +--- +"Update Connector Scheduling - Update single schedule only": + - do: + connector.update_scheduling: + connector_id: test-connector + body: + scheduling: + incremental: + enabled: true + interval: 3 0 0 * * ? + + + - match: { result: updated } + + - do: + connector.get: + connector_id: test-connector + + - match: { scheduling.incremental.enabled: true } + - match: { scheduling.incremental.interval: "3 0 0 * * ?" } + + # Other schedules are unchanged (those are defaults when connector is created) + - match: { scheduling.full.enabled: false } + - match: { scheduling.full.interval: "0 0 0 * * ?" } + - match: { scheduling.access_control.enabled: false } + - match: { scheduling.access_control.interval: "0 0 0 * * ?" } + --- "Update Connector Scheduling - Connector doesn't exist": - do: @@ -63,16 +91,13 @@ setup: interval: 3 0 0 * * ? --- -"Update Connector Scheduling - Required fields are missing": +"Update Connector Scheduling - Schedules are missing": - do: catch: "bad_request" connector.update_scheduling: connector_id: test-connector body: - scheduling: - incremental: - enabled: false - interval: 3 0 0 * * ? + scheduling: {} --- "Update Connector Scheduling - Wrong CRON expression": diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorScheduling.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorScheduling.java index 98b6bdf1f3250..3c08a5ac1e218 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorScheduling.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorScheduling.java @@ -27,6 +27,7 @@ import java.util.Objects; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; public class ConnectorScheduling implements Writeable, ToXContentObject { @@ -45,9 +46,9 @@ public class ConnectorScheduling implements Writeable, ToXContentObject { * @param incremental connector incremental sync schedule represented as {@link ScheduleConfig} */ private ConnectorScheduling(ScheduleConfig accessControl, ScheduleConfig full, ScheduleConfig incremental) { - this.accessControl = Objects.requireNonNull(accessControl, ACCESS_CONTROL_FIELD.getPreferredName()); - this.full = Objects.requireNonNull(full, FULL_FIELD.getPreferredName()); - this.incremental = Objects.requireNonNull(incremental, INCREMENTAL_FIELD.getPreferredName()); + this.accessControl = accessControl; + this.full = full; + this.incremental = incremental; } public ConnectorScheduling(StreamInput in) throws IOException { @@ -56,6 +57,18 @@ public ConnectorScheduling(StreamInput in) throws IOException { this.incremental = new ScheduleConfig(in); } + public ScheduleConfig getAccessControl() { + return accessControl; + } + + public ScheduleConfig getFull() { + return full; + } + + public ScheduleConfig getIncremental() { + return incremental; + } + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "connector_scheduling", true, @@ -67,13 +80,18 @@ public ConnectorScheduling(StreamInput in) throws IOException { static { PARSER.declareField( - constructorArg(), + optionalConstructorArg(), (p, c) -> ScheduleConfig.fromXContent(p), ACCESS_CONTROL_FIELD, ObjectParser.ValueType.OBJECT ); - PARSER.declareField(constructorArg(), (p, c) -> ScheduleConfig.fromXContent(p), FULL_FIELD, ObjectParser.ValueType.OBJECT); - PARSER.declareField(constructorArg(), (p, c) -> ScheduleConfig.fromXContent(p), INCREMENTAL_FIELD, ObjectParser.ValueType.OBJECT); + PARSER.declareField(optionalConstructorArg(), (p, c) -> ScheduleConfig.fromXContent(p), FULL_FIELD, ObjectParser.ValueType.OBJECT); + PARSER.declareField( + optionalConstructorArg(), + (p, c) -> ScheduleConfig.fromXContent(p), + INCREMENTAL_FIELD, + ObjectParser.ValueType.OBJECT + ); } public static ConnectorScheduling fromXContentBytes(BytesReference source, XContentType xContentType) { @@ -92,9 +110,15 @@ public static ConnectorScheduling fromXContent(XContentParser parser) throws IOE public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); { - builder.field(ACCESS_CONTROL_FIELD.getPreferredName(), accessControl); - builder.field(FULL_FIELD.getPreferredName(), full); - builder.field(INCREMENTAL_FIELD.getPreferredName(), incremental); + if (accessControl != null) { + builder.field(ACCESS_CONTROL_FIELD.getPreferredName(), accessControl); + } + if (full != null) { + builder.field(FULL_FIELD.getPreferredName(), full); + } + if (incremental != null) { + builder.field(INCREMENTAL_FIELD.getPreferredName(), incremental); + } } builder.endObject(); return builder; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorSchedulingAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorSchedulingAction.java index b7d3c0c2e5d10..578639f065a0b 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorSchedulingAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorSchedulingAction.java @@ -73,6 +73,15 @@ public ActionRequestValidationException validate() { validationException = addValidationError("[scheduling] cannot be [null].", validationException); } + if (Objects.isNull(scheduling.getFull()) + && Objects.isNull(scheduling.getIncremental()) + && Objects.isNull(scheduling.getIncremental())) { + validationException = addValidationError( + "[scheduling] object needs to define at least one schedule type: [full | incremental | access_control]", + validationException + ); + } + return validationException; } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java index 601c1597a39b0..00cc08a3b0bb7 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java @@ -54,6 +54,7 @@ import java.util.stream.Collectors; import java.util.stream.IntStream; +import static org.elasticsearch.xpack.application.connector.ConnectorTestUtils.getRandomCronExpression; import static org.hamcrest.CoreMatchers.anyOf; import static org.hamcrest.CoreMatchers.equalTo; @@ -320,6 +321,43 @@ public void testUpdateConnectorScheduling() throws Exception { assertThat(updatedScheduling, equalTo(indexedConnector.getScheduling())); } + public void testUpdateConnectorScheduling_OnlyFullSchedule() throws Exception { + Connector connector = ConnectorTestUtils.getRandomConnector(); + String connectorId = randomUUID(); + + DocWriteResponse resp = buildRequestAndAwaitPutConnector(connectorId, connector); + assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); + + // Update scheduling for full, incremental and access_control + ConnectorScheduling initialScheduling = ConnectorTestUtils.getRandomConnectorScheduling(); + UpdateConnectorSchedulingAction.Request updateSchedulingRequest = new UpdateConnectorSchedulingAction.Request( + connectorId, + initialScheduling + ); + DocWriteResponse updateResponse = awaitUpdateConnectorScheduling(updateSchedulingRequest); + assertThat(updateResponse.status(), equalTo(RestStatus.OK)); + + // Update full scheduling only + ConnectorScheduling.ScheduleConfig fullSyncSchedule = new ConnectorScheduling.ScheduleConfig.Builder().setEnabled(randomBoolean()) + .setInterval(getRandomCronExpression()) + .build(); + + UpdateConnectorSchedulingAction.Request updateSchedulingRequestWithFullSchedule = new UpdateConnectorSchedulingAction.Request( + connectorId, + new ConnectorScheduling.Builder().setFull(fullSyncSchedule).build() + ); + + updateResponse = awaitUpdateConnectorScheduling(updateSchedulingRequestWithFullSchedule); + assertThat(updateResponse.status(), equalTo(RestStatus.OK)); + + Connector indexedConnector = awaitGetConnector(connectorId); + // Assert that full schedule is updated + assertThat(fullSyncSchedule, equalTo(indexedConnector.getScheduling().getFull())); + // Assert that other schedules stay unchanged + assertThat(initialScheduling.getAccessControl(), equalTo(indexedConnector.getScheduling().getAccessControl())); + assertThat(initialScheduling.getIncremental(), equalTo(indexedConnector.getScheduling().getIncremental())); + } + public void testUpdateConnectorIndexName() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); String connectorId = randomUUID(); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java index 0eade25eaa03f..48168c2f45827 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java @@ -325,7 +325,7 @@ private static ConnectorFeatures.SyncRulesFeatures randomSyncRulesFeatures() { /** * Second (0 - 59) Minute (0 - 59) Hour (0 - 23) Day of month (1 - 31) Month (1 - 12) */ - private static Cron getRandomCronExpression() { + public static Cron getRandomCronExpression() { return new Cron( String.format( Locale.ROOT, From 29a3256a7dc6419a40ba5b156eaa295af41be869 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Thu, 4 Apr 2024 15:42:03 +0200 Subject: [PATCH 024/173] [DOCS] Makes Inference APIs main page more informative. (#107100) --- .../inference/delete-inference.asciidoc | 14 +++++++------ .../inference/get-inference.asciidoc | 20 ++++++++++--------- .../inference/inference-apis.asciidoc | 14 ++++++++----- .../inference/post-inference.asciidoc | 17 +++++++++------- .../inference/put-inference.asciidoc | 6 +++--- 5 files changed, 41 insertions(+), 30 deletions(-) diff --git a/docs/reference/inference/delete-inference.asciidoc b/docs/reference/inference/delete-inference.asciidoc index 5b693f51d65da..72f752563491b 100644 --- a/docs/reference/inference/delete-inference.asciidoc +++ b/docs/reference/inference/delete-inference.asciidoc @@ -4,12 +4,14 @@ experimental[] -Deletes an {infer} model deployment. - -IMPORTANT: The {infer} APIs enable you to use certain services, such as ELSER, -OpenAI, or Hugging Face, in your cluster. This is not the same feature that you -can use on an ML node with custom {ml} models. If you want to train and use your -own model, use the <>. +Deletes an {infer} endpoint. + +IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in +{ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, or +Hugging Face. For built-in models and models uploaded though Eland, the {infer} +APIs offer an alternative way to use and manage trained models. However, if you +do not plan to use the {infer} APIs to use these models or if you want to use +non-NLP models, use the <>. [discrete] diff --git a/docs/reference/inference/get-inference.asciidoc b/docs/reference/inference/get-inference.asciidoc index 1a11904a169ca..2cfc17a3b6203 100644 --- a/docs/reference/inference/get-inference.asciidoc +++ b/docs/reference/inference/get-inference.asciidoc @@ -4,12 +4,14 @@ experimental[] -Retrieves {infer} model information. +Retrieves {infer} endpoint information. -IMPORTANT: The {infer} APIs enable you to use certain services, such as ELSER, -OpenAI, or Hugging Face, in your cluster. This is not the same feature that you -can use on an ML node with custom {ml} models. If you want to train and use your -own model, use the <>. +IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in +{ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, or +Hugging Face. For built-in models and models uploaded though Eland, the {infer} +APIs offer an alternative way to use and manage trained models. However, if you +do not plan to use the {infer} APIs to use these models or if you want to use +non-NLP models, use the <>. [discrete] @@ -37,10 +39,10 @@ own model, use the <>. You can get information in a single API request for: -* a single {infer} model by providing the task type and the model ID, -* all of the {infer} models for a certain task type by providing the task type -and a wildcard expression, -* all of the {infer} models by using a wildcard expression. +* a single {infer} endpoint by providing the task type and the {infer} ID, +* all of the {infer} endpoints for a certain task type by providing the task +type and a wildcard expression, +* all of the {infer} endpoints by using a wildcard expression. [discrete] diff --git a/docs/reference/inference/inference-apis.asciidoc b/docs/reference/inference/inference-apis.asciidoc index cdc6bfe254ea2..d700a396e08bf 100644 --- a/docs/reference/inference/inference-apis.asciidoc +++ b/docs/reference/inference/inference-apis.asciidoc @@ -4,12 +4,16 @@ experimental[] -IMPORTANT: The {infer} APIs enable you to use certain services, such as ELSER, -OpenAI, or Hugging Face, in your cluster. This is not the same feature that you -can use on an ML node with custom {ml} models. If you want to train and use your -own model, use the <>. +IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in +{ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, or +Hugging Face. For built-in models and models uploaded though Eland, the {infer} +APIs offer an alternative way to use and manage trained models. However, if you +do not plan to use the {infer} APIs to use these models or if you want to use +non-NLP models, use the <>. -You can use the following APIs to manage {infer} models and perform {infer}: +The {infer} APIs enable you to create {infer} endpoints and use {ml} models of +different providers - such as Cohere, OpenAI, or HuggingFace - as a service. Use +the following APIs to manage {infer} models and perform {infer}: * <> * <> diff --git a/docs/reference/inference/post-inference.asciidoc b/docs/reference/inference/post-inference.asciidoc index 08a58d7789e33..5b78af905b095 100644 --- a/docs/reference/inference/post-inference.asciidoc +++ b/docs/reference/inference/post-inference.asciidoc @@ -4,12 +4,14 @@ experimental[] -Performs an inference task on an input text by using an {infer} model. +Performs an inference task on an input text by using an {infer} endpoint. -IMPORTANT: The {infer} APIs enable you to use certain services, such as ELSER, -OpenAI, or Hugging Face, in your cluster. This is not the same feature that you -can use on an ML node with custom {ml} models. If you want to train and use your -own model, use the <>. +IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in +{ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, or +Hugging Face. For built-in models and models uploaded though Eland, the {infer} +APIs offer an alternative way to use and manage trained models. However, if you +do not plan to use the {infer} APIs to use these models or if you want to use +non-NLP models, use the <>. [discrete] @@ -34,8 +36,9 @@ own model, use the <>. The perform {infer} API enables you to use {ml} models to perform specific tasks on data that you provide as an input. The API returns a response with the -results of the tasks. The {infer} model you use can perform one specific task -that has been defined when the model was created with the <>. +results of the tasks. The {infer} endpoint you use can perform one specific task +that has been defined when the endpoint was created with the +<>. [discrete] diff --git a/docs/reference/inference/put-inference.asciidoc b/docs/reference/inference/put-inference.asciidoc index 110ec9d6fa98c..87a865b9487e5 100644 --- a/docs/reference/inference/put-inference.asciidoc +++ b/docs/reference/inference/put-inference.asciidoc @@ -4,7 +4,7 @@ experimental[] -Creates a model to perform an {infer} task. +Creates an {infer} endpoint to perform an {infer} task. IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in {ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, or @@ -33,8 +33,8 @@ or if you want to use non-NLP models, use the <>. [[put-inference-api-desc]] ==== {api-description-title} -The create {infer} API enables you to create and configure a {ml} model to -perform a specific {infer} task. +The create {infer} API enables you to create an {infer} endpoint and configure a +{ml} model to perform a specific {infer} task. The following services are available through the {infer} API: From 87a995377efafd56d28ec6acf1dcefd2ca1e8ec5 Mon Sep 17 00:00:00 2001 From: Volodymyr Krasnikov <129072588+volodk85@users.noreply.github.com> Date: Thu, 4 Apr 2024 07:11:47 -0700 Subject: [PATCH 025/173] Unmute testUniDirectionalIndexFollowing (#106886) --- .../java/org/elasticsearch/upgrades/CcrRollingUpgradeIT.java | 1 - 1 file changed, 1 deletion(-) diff --git a/x-pack/qa/rolling-upgrade-multi-cluster/src/test/java/org/elasticsearch/upgrades/CcrRollingUpgradeIT.java b/x-pack/qa/rolling-upgrade-multi-cluster/src/test/java/org/elasticsearch/upgrades/CcrRollingUpgradeIT.java index db30f7cb98b02..68ebb43d607a1 100644 --- a/x-pack/qa/rolling-upgrade-multi-cluster/src/test/java/org/elasticsearch/upgrades/CcrRollingUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade-multi-cluster/src/test/java/org/elasticsearch/upgrades/CcrRollingUpgradeIT.java @@ -28,7 +28,6 @@ public class CcrRollingUpgradeIT extends AbstractMultiClusterUpgradeTestCase { - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102000") public void testUniDirectionalIndexFollowing() throws Exception { logger.info("clusterName={}, upgradeState={}", clusterName, upgradeState); From da28e76cccca610b7ed5c77ceb75892f5dda659b Mon Sep 17 00:00:00 2001 From: Andrei Dan Date: Thu, 4 Apr 2024 17:20:14 +0300 Subject: [PATCH 026/173] Default `data_streams.auto_sharding.excludes` to NO exclusions (`[]`) (#107091) --- .../datastreams/DataStreamAutoshardingIT.java | 7 +------ .../autosharding/DataStreamAutoShardingService.java | 2 +- .../DataStreamAutoShardingServiceTests.java | 10 ++-------- 3 files changed, 4 insertions(+), 15 deletions(-) diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamAutoshardingIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamAutoshardingIT.java index 9f2e6feb91659..f7743ebac9caf 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamAutoshardingIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamAutoshardingIT.java @@ -84,7 +84,6 @@ protected Collection> nodePlugins() { public void configureClusterSettings() { updateClusterSettings( Settings.builder() - .putList(DataStreamAutoShardingService.DATA_STREAMS_AUTO_SHARDING_EXCLUDES_SETTING.getKey(), List.of()) // we want to manually trigger the rollovers in this test suite to be able to assert incrementally the changes in shard // configurations .put(DataStreamLifecycleService.DATA_STREAM_LIFECYCLE_POLL_INTERVAL, "30d") @@ -93,11 +92,7 @@ public void configureClusterSettings() { @After public void resetClusterSetting() { - updateClusterSettings( - Settings.builder() - .putNull(DataStreamAutoShardingService.DATA_STREAMS_AUTO_SHARDING_EXCLUDES_SETTING.getKey()) - .putNull(DataStreamLifecycleService.DATA_STREAM_LIFECYCLE_POLL_INTERVAL) - ); + updateClusterSettings(Settings.builder().putNull(DataStreamLifecycleService.DATA_STREAM_LIFECYCLE_POLL_INTERVAL)); } public void testRolloverOnAutoShardCondition() throws Exception { diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingService.java b/server/src/main/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingService.java index a26be73cc169d..da1c85834d2c8 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingService.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingService.java @@ -46,7 +46,7 @@ public class DataStreamAutoShardingService { public static final Setting> DATA_STREAMS_AUTO_SHARDING_EXCLUDES_SETTING = Setting.listSetting( "data_streams.auto_sharding.excludes", - List.of("*"), + List.of(), Function.identity(), Setting.Property.Dynamic, Setting.Property.NodeScope diff --git a/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java b/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java index 41a5d0b70ea10..0d1104279d3ce 100644 --- a/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java +++ b/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java @@ -77,10 +77,7 @@ public void setupService() { clusterService = createClusterService(threadPool, clusterSettings); now = System.currentTimeMillis(); service = new DataStreamAutoShardingService( - Settings.builder() - .put(DataStreamAutoShardingService.DATA_STREAMS_AUTO_SHARDING_ENABLED, true) - .putList(DataStreamAutoShardingService.DATA_STREAMS_AUTO_SHARDING_EXCLUDES_SETTING.getKey(), List.of()) - .build(), + Settings.builder().put(DataStreamAutoShardingService.DATA_STREAMS_AUTO_SHARDING_ENABLED, true).build(), clusterService, new FeatureService(List.of(new FeatureSpecification() { @Override @@ -147,10 +144,7 @@ public Set getFeatures() { ClusterState stateNoFeature = ClusterState.builder(ClusterName.DEFAULT).metadata(Metadata.builder()).build(); DataStreamAutoShardingService noFeatureService = new DataStreamAutoShardingService( - Settings.builder() - .put(DataStreamAutoShardingService.DATA_STREAMS_AUTO_SHARDING_ENABLED, true) - .putList(DataStreamAutoShardingService.DATA_STREAMS_AUTO_SHARDING_EXCLUDES_SETTING.getKey(), List.of()) - .build(), + Settings.builder().put(DataStreamAutoShardingService.DATA_STREAMS_AUTO_SHARDING_ENABLED, true).build(), clusterService, new FeatureService(List.of()), () -> now From abfb0ae7b3bb7ca2fcc426e7e1482ef8d042a8f9 Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Thu, 4 Apr 2024 16:24:33 +0200 Subject: [PATCH 027/173] ESQL: Fix treating all fields as MV in COUNT pushdown (#106720) Fix a mistake in #106690 that accidentally prevented COUNT(field) from being pushed down in case field is single-valued. Add test to avoid future regressions. --- docs/changelog/106720.yaml | 5 ++ .../xpack/esql/qa/rest/RestEsqlTestCase.java | 9 +-- .../xpack/esql/EsqlTestUtils.java | 11 +++ .../xpack/esql/stats/SearchStats.java | 3 +- .../LocalPhysicalPlanOptimizerTests.java | 74 ++++++++++++++++++- 5 files changed, 93 insertions(+), 9 deletions(-) create mode 100644 docs/changelog/106720.yaml diff --git a/docs/changelog/106720.yaml b/docs/changelog/106720.yaml new file mode 100644 index 0000000000000..93358ed1d3dff --- /dev/null +++ b/docs/changelog/106720.yaml @@ -0,0 +1,5 @@ +pr: 106720 +summary: "ESQL: Fix treating all fields as MV in COUNT pushdown" +area: ES|QL +type: bug +issues: [] diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java index 301b30df5647a..b67432f491cf3 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java @@ -30,6 +30,7 @@ import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.junit.After; import org.junit.Before; @@ -79,12 +80,8 @@ public abstract class RestEsqlTestCase extends ESRestTestCase { private static final String MAPPING_ALL_TYPES; static { - try (InputStream mappingPropertiesStream = RestEsqlTestCase.class.getResourceAsStream("/mapping-all-types.json")) { - String properties = new String(mappingPropertiesStream.readAllBytes(), StandardCharsets.UTF_8); - MAPPING_ALL_TYPES = "{\"mappings\": " + properties + "}"; - } catch (IOException ex) { - throw new RuntimeException(ex); - } + String properties = EsqlTestUtils.loadUtf8TextFile("/mapping-all-types.json"); + MAPPING_ALL_TYPES = "{\"mappings\": " + properties + "}"; } private static final String DOCUMENT_TEMPLATE = """ diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java index 8c5c79b98767e..fc8f80a19f09f 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java @@ -34,6 +34,9 @@ import org.elasticsearch.xpack.ql.util.StringUtils; import org.junit.Assert; +import java.io.IOException; +import java.io.InputStream; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Iterator; import java.util.List; @@ -146,6 +149,14 @@ public static Map loadMapping(String name) { return TypesTests.loadMapping(EsqlDataTypeRegistry.INSTANCE, name, true); } + public static String loadUtf8TextFile(String name) { + try (InputStream textStream = EsqlTestUtils.class.getResourceAsStream(name)) { + return new String(textStream.readAllBytes(), StandardCharsets.UTF_8); + } catch (IOException ex) { + throw new RuntimeException(ex); + } + } + public static EnrichResolution emptyPolicyResolution() { return new EnrichResolution(); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/SearchStats.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/SearchStats.java index e8c547c55a373..57458c0574776 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/SearchStats.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/SearchStats.java @@ -198,8 +198,9 @@ public boolean isSingleValue(String field) { // fields are MV per default var sv = new boolean[] { false }; for (SearchExecutionContext context : contexts) { - MappedFieldType mappedType = context.isFieldMapped(field) ? null : context.getFieldType(field); + MappedFieldType mappedType = context.isFieldMapped(field) ? context.getFieldType(field) : null; if (mappedType != null) { + sv[0] = true; doWithContexts(r -> { sv[0] &= detectSingleValue(r, mappedType, field); return sv[0]; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java index cf387245a5968..80deb0ea83d86 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java @@ -9,12 +9,16 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.apache.lucene.search.IndexSearcher; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Tuple; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.MapperServiceTestCase; +import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.EsqlTestUtils.TestSearchStats; @@ -53,8 +57,10 @@ import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataTypes; import org.elasticsearch.xpack.ql.type.EsField; +import org.elasticsearch.xpack.ql.util.Holder; import org.junit.Before; +import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Locale; @@ -78,7 +84,7 @@ import static org.hamcrest.Matchers.nullValue; //@TestLogging(value = "org.elasticsearch.xpack.esql:TRACE,org.elasticsearch.compute:TRACE", reason = "debug") -public class LocalPhysicalPlanOptimizerTests extends ESTestCase { +public class LocalPhysicalPlanOptimizerTests extends MapperServiceTestCase { private static final String PARAM_FORMATTING = "%1$s"; @@ -270,6 +276,70 @@ public void testCountOneFieldWithFilterAndLimit() { assertThat(plan.anyMatch(EsQueryExec.class::isInstance), is(true)); } + public void testCountPushdownForSvAndMvFields() throws IOException { + String properties = EsqlTestUtils.loadUtf8TextFile("/mapping-basic.json"); + String mapping = "{\"mappings\": " + properties + "}"; + + String query = """ + from test + | stats c = count(salary) + """; + + PhysicalPlan plan; + + List> docsCasesWithoutPushdown = List.of( + // No pushdown yet in case of MVs + List.of("{ \"salary\" : [1,2] }"), + List.of("{ \"salary\" : [1,2] }", "{ \"salary\" : null}") + ); + for (List docs : docsCasesWithoutPushdown) { + plan = planWithMappingAndDocs(query, mapping, docs); + // No EsSatsQueryExec as leaf of the plan. + assertThat(plan.anyMatch(EsQueryExec.class::isInstance), is(true)); + } + + // Cases where we can push this down as a COUNT(*) since there are only SVs + List> docsCasesWithPushdown = List.of(List.of(), List.of("{ \"salary\" : 1 }"), List.of("{ \"salary\": null }")); + for (List docs : docsCasesWithPushdown) { + plan = planWithMappingAndDocs(query, mapping, docs); + + Holder leaf = new Holder<>(); + plan.forEachDown(p -> { + if (p instanceof EsStatsQueryExec s) { + leaf.set(s); + } + }); + + String expectedStats = """ + [Stat[name=salary, type=COUNT, query={ + "exists" : { + "field" : "salary", + "boost" : 1.0 + } + }]]"""; + assertNotNull(leaf.get()); + assertThat(leaf.get().stats().toString(), equalTo(expectedStats)); + } + } + + private PhysicalPlan planWithMappingAndDocs(String query, String mapping, List docs) throws IOException { + MapperService mapperService = createMapperService(mapping); + List parsedDocs = docs.stream().map(d -> mapperService.documentMapper().parse(source(d))).toList(); + + Holder plan = new Holder<>(null); + withLuceneIndex(mapperService, indexWriter -> { + for (ParsedDocument parsedDoc : parsedDocs) { + indexWriter.addDocument(parsedDoc.rootDoc()); + } + }, directoryReader -> { + IndexSearcher searcher = newSearcher(directoryReader); + SearchExecutionContext ctx = createSearchExecutionContext(mapperService, searcher); + plan.set(plan(query, new SearchStats(List.of(ctx)))); + }); + + return plan.get(); + } + // optimized doesn't know yet how to break down different multi count public void testCountMultipleFieldsWithFilter() { var plan = plan(""" From edc9e6787a88991214c835b3995f941dc56ba28d Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Thu, 4 Apr 2024 16:28:07 +0200 Subject: [PATCH 028/173] ESQL: Fix fully pruned aggregates (#106673) Fix a bug where PruneColumns would sometimes completely replace an Aggregate, producing the wrong number of rows as a result. --- docs/changelog/106673.yaml | 6 ++ .../src/main/resources/stats.csv-spec | 91 ++++++++++++++++++- .../esql/optimizer/LogicalPlanOptimizer.java | 18 +++- .../optimizer/LogicalPlanOptimizerTests.java | 72 +++++++++++++++ 4 files changed, 183 insertions(+), 4 deletions(-) create mode 100644 docs/changelog/106673.yaml diff --git a/docs/changelog/106673.yaml b/docs/changelog/106673.yaml new file mode 100644 index 0000000000000..9a716d20ad2bc --- /dev/null +++ b/docs/changelog/106673.yaml @@ -0,0 +1,6 @@ +pr: 106673 +summary: "ESQL: Fix fully pruned aggregates" +area: ES|QL +type: bug +issues: + - 106427 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index 91c79e64b2385..0a18568cf3c84 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -1218,7 +1218,7 @@ c:l 891 ; -countMV#[skip:-8.13.99,reason:supported in 8.14] +countMV#[skip:-8.13.99,reason:fixed in 8.14] FROM employees | STATS vals = COUNT(salary_change.int) ; @@ -1227,6 +1227,95 @@ vals:l 183 ; +emptyProjectInStatWithEval#[skip:-8.13.99,reason:fixed in 8.14] +FROM employees +| STATS c = COUNT(salary) +| EVAL x = 3.14 +| DROP c +; + +x:d +3.14 +; + +emptyProjectInStatWithCountGroupAndEval#[skip:-8.13.99,reason:fixed in 8.14] +FROM employees +| STATS c = COUNT(salary) BY gender +| EVAL x = 3.14 +| DROP c, gender +; + +x:d +3.14 +3.14 +3.14 +; + + +emptyProjectInStatWithMinGroupAndEval#[skip:-8.13.99,reason:fixed in 8.14] +FROM employees +| STATS m = MIN(salary) BY gender +| EVAL x = 3.14 +| DROP m, gender +; + +x:d +3.14 +3.14 +3.14 +; + +emptyProjectInStatOnlyGroupAndEval#[skip:-8.13.99,reason:fixed in 8.14] +FROM employees +| STATS BY gender +| EVAL x = 3.14 +| DROP gender +; + +x:d +3.14 +3.14 +3.14 +; + +emptyProjectInStatWithTwoGroupsAndEval#[skip:-8.13.99,reason:fixed in 8.14] +FROM employees +| STATS c = COUNT(salary) BY gender, still_hired +| EVAL x = 3.14 +| DROP c, gender, still_hired +; + +x:d +3.14 +3.14 +3.14 +3.14 +3.14 +3.14 +; + +emptyProjectInStatDueToAnotherStat#[skip:-8.13.99,reason:fixed in 8.14] +FROM employees +| STATS s = SUM(salary), m = MIN(salary) +| EVAL x = 3.14 +| STATS rows = COUNT(*) +; + +rows:l +1 +; + +emptyProjectInStatDueToAnotherStatWithGroups#[skip:-8.13.99,reason:fixed in 8.14] +FROM employees +| STATS m = MEDIAN(salary) BY gender, still_hired +| EVAL x = 3.14 +| STATS rows = COUNT(*) +; + +rows:l +6 +; + sumOfConst#[skip:-8.13.99,reason:supported in 8.14] FROM employees | STATS s1 = sum(1), s2point1 = sum(2.1), s_mv = sum([-1, 0, 3]) * 3, s_null = sum(null), rows = count(*) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java index ec3ff07a9867f..fe2a3076380df 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java @@ -1037,11 +1037,23 @@ public LogicalPlan apply(LogicalPlan plan) { recheck = false; if (p instanceof Aggregate aggregate) { var remaining = seenProjection.get() ? removeUnused(aggregate.aggregates(), used) : null; - // no aggregates, no need + if (remaining != null) { if (remaining.isEmpty()) { - recheck = true; - p = aggregate.child(); + // We still need to have a plan that produces 1 row per group. + if (aggregate.groupings().isEmpty()) { + p = new LocalRelation( + aggregate.source(), + List.of(new EmptyAttribute(aggregate.source())), + LocalSupplier.of( + new Block[] { BlockUtils.constantBlock(PlannerUtils.NON_BREAKING_BLOCK_FACTORY, null, 1) } + ) + ); + } else { + // Aggs cannot produce pages with 0 columns, so retain one grouping. + remaining = List.of(Expressions.attribute(aggregate.groupings().get(0))); + p = new Aggregate(aggregate.source(), aggregate.child(), aggregate.groupings(), remaining); + } } else { p = new Aggregate(aggregate.source(), aggregate.child(), aggregate.groupings(), remaining); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 3f0b39603ef89..050ee2caefec0 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -238,6 +238,78 @@ public void testEmptyProjectionInStat() { assertThat(relation.supplier().get(), emptyArray()); } + /** + * Expects + * + * EsqlProject[[x{r}#6]] + * \_Eval[[1[INTEGER] AS x]] + * \_Limit[1000[INTEGER]] + * \_LocalRelation[[{e}#18],[ConstantNullBlock[positions=1]]] + */ + public void testEmptyProjectInStatWithEval() { + var plan = plan(""" + from test + | where languages > 1 + | stats c = count(salary) + | eval x = 1, c2 = c*2 + | drop c, c2 + """); + + var project = as(plan, Project.class); + var eval = as(project.child(), Eval.class); + var limit = as(eval.child(), Limit.class); + var singleRowRelation = as(limit.child(), LocalRelation.class); + var singleRow = singleRowRelation.supplier().get(); + assertThat(singleRow.length, equalTo(1)); + assertThat(singleRow[0].getPositionCount(), equalTo(1)); + + var exprs = eval.fields(); + assertThat(exprs.size(), equalTo(1)); + var alias = as(exprs.get(0), Alias.class); + assertThat(alias.name(), equalTo("x")); + assertThat(alias.child().fold(), equalTo(1)); + } + + /** + * Expects + * + * EsqlProject[[x{r}#8]] + * \_Eval[[1[INTEGER] AS x]] + * \_Limit[1000[INTEGER]] + * \_Aggregate[[emp_no{f}#15],[emp_no{f}#15]] + * \_Filter[languages{f}#18 > 1[INTEGER]] + * \_EsRelation[test][_meta_field{f}#21, emp_no{f}#15, first_name{f}#16, ..] + */ + public void testEmptyProjectInStatWithGroupAndEval() { + var plan = plan(""" + from test + | where languages > 1 + | stats c = count(salary) by emp_no + | eval x = 1, c2 = c*2 + | drop c, emp_no, c2 + """); + + var project = as(plan, Project.class); + var eval = as(project.child(), Eval.class); + var limit = as(eval.child(), Limit.class); + var agg = as(limit.child(), Aggregate.class); + var filter = as(agg.child(), Filter.class); + var relation = as(filter.child(), EsRelation.class); + + assertThat(Expressions.names(agg.groupings()), contains("emp_no")); + assertThat(Expressions.names(agg.aggregates()), contains("emp_no")); + + var exprs = eval.fields(); + assertThat(exprs.size(), equalTo(1)); + var alias = as(exprs.get(0), Alias.class); + assertThat(alias.name(), equalTo("x")); + assertThat(alias.child().fold(), equalTo(1)); + + var filterCondition = as(filter.condition(), GreaterThan.class); + assertThat(Expressions.name(filterCondition.left()), equalTo("languages")); + assertThat(filterCondition.right().fold(), equalTo(1)); + } + public void testCombineProjections() { var plan = plan(""" from test From 8a1df9be2d7cef6d82f45372956d264753ccaa17 Mon Sep 17 00:00:00 2001 From: shainaraskas <58563081+shainaraskas@users.noreply.github.com> Date: Thu, 4 Apr 2024 10:44:14 -0400 Subject: [PATCH 029/173] [DOCS] fix time zone logic example (#106962) * [DOCS] fix time zone logic example * specify standard time * goodbye e.g. --- .../bucket/datehistogram-aggregation.asciidoc | 27 ++++++++++--------- 1 file changed, 14 insertions(+), 13 deletions(-) diff --git a/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc b/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc index 26774c7091d27..3511ec9e63b02 100644 --- a/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc @@ -341,23 +341,24 @@ Response: rounding is also done in UTC. Use the `time_zone` parameter to indicate that bucketing should use a different time zone. -For example, if the interval is a calendar day and the time zone is -`America/New_York` then `2020-01-03T01:00:01Z` is : -# Converted to `2020-01-02T18:00:01` -# Rounded down to `2020-01-02T00:00:00` -# Then converted back to UTC to produce `2020-01-02T05:00:00:00Z` -# Finally, when the bucket is turned into a string key it is printed in - `America/New_York` so it'll display as `"2020-01-02T00:00:00"`. - -It looks like: +When you specify a time zone, the following logic is used to determine the bucket the document belongs in: [source,java] ---- bucket_key = localToUtc(Math.floor(utcToLocal(value) / interval) * interval)) ---- -You can specify time zones as an ISO 8601 UTC offset (e.g. `+01:00` or -`-08:00`) or as an IANA time zone ID, +For example, if the interval is a calendar day and the time zone is +`America/New_York`, then the date value `2020-01-03T01:00:01Z` is processed as follows: + +. Converted to EST: `2020-01-02T20:00:01` +. Rounded down to the nearest interval: `2020-01-02T00:00:00` +. Converted back to UTC: `2020-01-02T05:00:00:00Z` + +When a `key_as_string` is generated for the bucket, the key value is stored in `America/New_York` time, so it'll display as `"2020-01-02T00:00:00"`. + +You can specify time zones as an ISO 8601 UTC offset, such as `+01:00` or +`-08:00`, or as an IANA time zone ID, such as `America/Los_Angeles`. Consider the following example: @@ -618,7 +619,7 @@ For example, for `+50d` we see: -------------------------------------------------- // TESTRESPONSE[skip:no setup made for this example yet] -It is therefor always important when using `offset` with `calendar_interval` bucket sizes +It is therefore always important when using `offset` with `calendar_interval` bucket sizes to understand the consequences of using offsets larger than the interval size. More examples: @@ -633,7 +634,7 @@ but as soon as you push the start date into the second month by having an offset quarters will all start on different dates. [[date-histogram-keyed-response]] -==== Keyed Response +==== Keyed response Setting the `keyed` flag to `true` associates a unique string key with each bucket and returns the ranges as a hash rather than an array: From 46ec6362b5a87a85f5a2461a42b3383043e9ad07 Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Thu, 4 Apr 2024 16:45:06 +0200 Subject: [PATCH 030/173] [Profiling] Add TopN Functions API (#106860) With this commit we add a new API to the Universal Profiling plugin that allows to gather a list of functions with the most observed samples (TopN functions). --------- Co-authored-by: Joseph Crail --- docs/changelog/106860.yaml | 5 + .../api/profiling.topn_functions.json | 28 ++ .../profiling/GetFlameGraphActionIT.java | 16 +- .../profiling/GetStackTracesActionIT.java | 91 +++++- .../profiling/GetTopNFunctionsActionIT.java | 81 +++++ .../data/profiling-events-all.ndjson | 4 +- .../profiling/GetStackTracesRequest.java | 34 +- .../GetStackTracesResponseBuilder.java | 3 + .../profiling/GetTopNFunctionsAction.java | 18 ++ .../profiling/GetTopNFunctionsResponse.java | 56 ++++ .../xpack/profiling/ProfilingPlugin.java | 2 + .../profiling/RestGetTopNFunctionsAction.java | 46 +++ .../xpack/profiling/StackFrame.java | 1 + .../xpack/profiling/StackTrace.java | 6 +- .../xpack/profiling/TopNFunction.java | 297 ++++++++++++++++++ .../xpack/profiling/TraceEvent.java | 9 +- .../TransportGetFlamegraphAction.java | 4 +- .../TransportGetStackTracesAction.java | 72 ++++- .../TransportGetTopNFunctionsAction.java | 162 ++++++++++ .../profiling/GetStackTracesRequestTests.java | 21 +- .../xpack/profiling/ResamplerTests.java | 5 + .../xpack/profiling/StackFrameTests.java | 1 - .../xpack/profiling/TopNFunctionTests.java | 117 +++++++ .../TransportGetFlamegraphActionTests.java | 1 - .../TransportGetTopNFunctionsActionTests.java | 183 +++++++++++ .../xpack/security/operator/Constants.java | 1 + .../rest-api-spec/test/profiling/10_basic.yml | 64 ++++ 27 files changed, 1299 insertions(+), 29 deletions(-) create mode 100644 docs/changelog/106860.yaml create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/profiling.topn_functions.json create mode 100644 x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsActionIT.java create mode 100644 x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsAction.java create mode 100644 x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsResponse.java create mode 100644 x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetTopNFunctionsAction.java create mode 100644 x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TopNFunction.java create mode 100644 x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetTopNFunctionsAction.java create mode 100644 x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TopNFunctionTests.java create mode 100644 x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetTopNFunctionsActionTests.java diff --git a/docs/changelog/106860.yaml b/docs/changelog/106860.yaml new file mode 100644 index 0000000000000..376f8753023b9 --- /dev/null +++ b/docs/changelog/106860.yaml @@ -0,0 +1,5 @@ +pr: 106860 +summary: "[Profiling] Add TopN Functions API" +area: Application +type: enhancement +issues: [] diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/profiling.topn_functions.json b/rest-api-spec/src/main/resources/rest-api-spec/api/profiling.topn_functions.json new file mode 100644 index 0000000000000..3b4db3abf2cca --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/profiling.topn_functions.json @@ -0,0 +1,28 @@ +{ + "profiling.topn_functions":{ + "documentation":{ + "url":"https://www.elastic.co/guide/en/observability/current/universal-profiling.html", + "description":"Extracts a list of topN functions from Universal Profiling." + }, + "stability":"stable", + "visibility":"private", + "headers":{ + "accept": ["application/json"], + "content_type": ["application/json"] + }, + "url":{ + "paths":[ + { + "path":"/_profiling/topn/functions", + "methods":[ + "POST" + ] + } + ] + }, + "body":{ + "description":"The filter conditions for stacktraces", + "required":true + } + } +} diff --git a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetFlameGraphActionIT.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetFlameGraphActionIT.java index 855c0c200aaba..20519d53459ba 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetFlameGraphActionIT.java +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetFlameGraphActionIT.java @@ -9,7 +9,21 @@ public class GetFlameGraphActionIT extends ProfilingTestCase { public void testGetStackTracesUnfiltered() throws Exception { - GetStackTracesRequest request = new GetStackTracesRequest(1000, 600.0d, 1.0d, 1.0d, null, null, null, null, null, null, null, null); + GetStackTracesRequest request = new GetStackTracesRequest( + 1000, + 600.0d, + 1.0d, + 1.0d, + null, + null, + null, + null, + null, + null, + null, + null, + null + ); GetFlamegraphResponse response = client().execute(GetFlamegraphAction.INSTANCE, request).get(); // only spot-check top level properties - detailed tests are done in unit tests assertEquals(994, response.getSize()); diff --git a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStackTracesActionIT.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStackTracesActionIT.java index 62b8242e7df86..30de2173e8903 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStackTracesActionIT.java +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStackTracesActionIT.java @@ -15,7 +15,65 @@ public class GetStackTracesActionIT extends ProfilingTestCase { public void testGetStackTracesUnfiltered() throws Exception { - GetStackTracesRequest request = new GetStackTracesRequest(1000, 600.0d, 1.0d, 1.0d, null, null, null, null, null, null, null, null); + GetStackTracesRequest request = new GetStackTracesRequest( + 1000, + 600.0d, + 1.0d, + 1.0d, + null, + null, + null, + null, + null, + null, + null, + null, + null + ); + request.setAdjustSampleCount(true); + GetStackTracesResponse response = client().execute(GetStackTracesAction.INSTANCE, request).get(); + assertEquals(46, response.getTotalSamples()); + assertEquals(1821, response.getTotalFrames()); + + assertNotNull(response.getStackTraceEvents()); + assertEquals(3L, response.getStackTraceEvents().get("L7kj7UvlKbT-vN73el4faQ").count); + + assertNotNull(response.getStackTraces()); + // just do a high-level spot check. Decoding is tested in unit-tests + StackTrace stackTrace = response.getStackTraces().get("L7kj7UvlKbT-vN73el4faQ"); + assertEquals(18, stackTrace.addressOrLines.length); + assertEquals(18, stackTrace.fileIds.length); + assertEquals(18, stackTrace.frameIds.length); + assertEquals(18, stackTrace.typeIds.length); + assertEquals(0.0000048475146d, stackTrace.annualCO2Tons, 0.0000000001d); + assertEquals(0.18834d, stackTrace.annualCostsUSD, 0.00001d); + // not determined by default + assertNull(stackTrace.subGroups); + + assertNotNull(response.getStackFrames()); + StackFrame stackFrame = response.getStackFrames().get("8NlMClggx8jaziUTJXlmWAAAAAAAAIYI"); + assertEquals(List.of("start_thread"), stackFrame.functionName); + + assertNotNull(response.getExecutables()); + assertEquals("vmlinux", response.getExecutables().get("lHp5_WAgpLy2alrUVab6HA")); + } + + public void testGetStackTracesGroupedByServiceName() throws Exception { + GetStackTracesRequest request = new GetStackTracesRequest( + 1000, + 600.0d, + 1.0d, + 1.0d, + null, + null, + null, + "service.name", + null, + null, + null, + null, + null + ); request.setAdjustSampleCount(true); GetStackTracesResponse response = client().execute(GetStackTracesAction.INSTANCE, request).get(); assertEquals(46, response.getTotalSamples()); @@ -33,6 +91,7 @@ public void testGetStackTracesUnfiltered() throws Exception { assertEquals(18, stackTrace.typeIds.length); assertEquals(0.0000048475146d, stackTrace.annualCO2Tons, 0.0000000001d); assertEquals(0.18834d, stackTrace.annualCostsUSD, 0.00001d); + assertEquals(Long.valueOf(2L), stackTrace.subGroups.get("basket")); assertNotNull(response.getStackFrames()); StackFrame stackFrame = response.getStackFrames().get("8NlMClggx8jaziUTJXlmWAAAAAAAAIYI"); @@ -42,6 +101,28 @@ public void testGetStackTracesUnfiltered() throws Exception { assertEquals("vmlinux", response.getExecutables().get("lHp5_WAgpLy2alrUVab6HA")); } + public void testGetStackTracesGroupedByInvalidField() { + GetStackTracesRequest request = new GetStackTracesRequest( + 1000, + 600.0d, + 1.0d, + 1.0d, + null, + null, + null, + // only service.name is supported (note the trailing "s") + "service.names", + null, + null, + null, + null, + null + ); + request.setAdjustSampleCount(true); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, client().execute(GetStackTracesAction.INSTANCE, request)); + assertEquals("Requested custom event aggregation field [service.names] but only [service.name] is supported.", e.getMessage()); + } + public void testGetStackTracesFromAPMWithMatchNoDownsampling() throws Exception { BoolQueryBuilder query = QueryBuilders.boolQuery(); query.must().add(QueryBuilders.termQuery("transaction.name", "encodeSha1")); @@ -56,6 +137,7 @@ public void testGetStackTracesFromAPMWithMatchNoDownsampling() throws Exception // also match an index that does not contain stacktrace ids to ensure it is ignored new String[] { "apm-test-*", "apm-legacy-test-*" }, "transaction.profiler_stack_trace_ids", + "transaction.name", null, null, null, @@ -79,6 +161,7 @@ public void testGetStackTracesFromAPMWithMatchNoDownsampling() throws Exception assertEquals(39, stackTrace.typeIds.length); assertTrue(stackTrace.annualCO2Tons > 0.0d); assertTrue(stackTrace.annualCostsUSD > 0.0d); + assertEquals(Long.valueOf(3L), stackTrace.subGroups.get("encodeSha1")); assertNotNull(response.getStackFrames()); StackFrame stackFrame = response.getStackFrames().get("fhsEKXDuxJ-jIJrZpdRuSAAAAAAAAFtj"); @@ -103,6 +186,7 @@ public void testGetStackTracesFromAPMWithMatchAndDownsampling() throws Exception null, null, null, + null, null ); // ensures consistent results in the random sampler aggregation that is used internally @@ -126,6 +210,8 @@ public void testGetStackTracesFromAPMWithMatchAndDownsampling() throws Exception assertEquals(39, stackTrace.typeIds.length); assertTrue(stackTrace.annualCO2Tons > 0.0d); assertTrue(stackTrace.annualCostsUSD > 0.0d); + // not determined by default + assertNull(stackTrace.subGroups); assertNotNull(response.getStackFrames()); StackFrame stackFrame = response.getStackFrames().get("fhsEKXDuxJ-jIJrZpdRuSAAAAAAAAFtj"); @@ -150,6 +236,7 @@ public void testGetStackTracesFromAPMNoMatch() throws Exception { null, null, null, + null, null ); GetStackTracesResponse response = client().execute(GetStackTracesAction.INSTANCE, request).get(); @@ -171,6 +258,7 @@ public void testGetStackTracesFromAPMIndexNotAvailable() throws Exception { null, null, null, + null, null ); GetStackTracesResponse response = client().execute(GetStackTracesAction.INSTANCE, request).get(); @@ -192,6 +280,7 @@ public void testGetStackTracesFromAPMStackTraceFieldNotAvailable() throws Except null, null, null, + null, null ); GetStackTracesResponse response = client().execute(GetStackTracesAction.INSTANCE, request).get(); diff --git a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsActionIT.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsActionIT.java new file mode 100644 index 0000000000000..05d0e1cb0471b --- /dev/null +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsActionIT.java @@ -0,0 +1,81 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.profiling; + +import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; + +public class GetTopNFunctionsActionIT extends ProfilingTestCase { + public void testGetTopNFunctionsUnfiltered() throws Exception { + GetStackTracesRequest request = new GetStackTracesRequest( + 1000, + 600.0d, + 1.0d, + 1.0d, + null, + null, + null, + null, + null, + null, + null, + null, + null + ); + request.setAdjustSampleCount(true); + GetTopNFunctionsResponse response = client().execute(GetTopNFunctionsAction.INSTANCE, request).get(); + assertEquals(747, response.getTopN().size()); + } + + public void testGetTopNFunctionsGroupedByServiceName() throws Exception { + GetStackTracesRequest request = new GetStackTracesRequest( + 1000, + 600.0d, + 1.0d, + 1.0d, + null, + null, + null, + "service.name", + null, + null, + null, + null, + null + ); + request.setAdjustSampleCount(true); + request.setLimit(50); + GetTopNFunctionsResponse response = client().execute(GetTopNFunctionsAction.INSTANCE, request).get(); + assertEquals(50, response.getTopN().size()); + } + + public void testGetTopNFunctionsFromAPM() throws Exception { + BoolQueryBuilder query = QueryBuilders.boolQuery(); + query.must().add(QueryBuilders.termQuery("transaction.name", "encodeSha1")); + query.must().add(QueryBuilders.rangeQuery("@timestamp").lte("1698624000")); + + GetStackTracesRequest request = new GetStackTracesRequest( + null, + 1.0d, + 1.0d, + 1.0d, + query, + // also match an index that does not contain stacktrace ids to ensure it is ignored + new String[] { "apm-test-*", "apm-legacy-test-*" }, + "transaction.profiler_stack_trace_ids", + "transaction.name", + null, + null, + null, + null, + null + ); + GetTopNFunctionsResponse response = client().execute(GetTopNFunctionsAction.INSTANCE, request).get(); + assertEquals(45, response.getTopN().size()); + } +} diff --git a/x-pack/plugin/profiling/src/internalClusterTest/resources/data/profiling-events-all.ndjson b/x-pack/plugin/profiling/src/internalClusterTest/resources/data/profiling-events-all.ndjson index 502494f05c62c..7211ad54cbcd1 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/resources/data/profiling-events-all.ndjson +++ b/x-pack/plugin/profiling/src/internalClusterTest/resources/data/profiling-events-all.ndjson @@ -71,9 +71,9 @@ {"create": {"_index": "profiling-events-all"}} {"Stacktrace.count": [1], "profiling.project.id": ["100"], "os.kernel": ["9.9.9-0"], "tags": ["environment:qa", "region:eu-west-1"], "host.ip": ["192.168.1.2"], "@timestamp": ["1698624000"], "container.name": ["instance-0000000010"], "ecs.version": ["1.12.0"], "Stacktrace.id": ["XF9MchOwpePfa6_hYy-vZQ"], "agent.version": ["head-be593ef3-1688111067"], "host.name": ["ip-192-168-1-2"], "host.id": ["8457605156473051743"], "process.thread.name": ["497295213074376"]} {"create": {"_index": "profiling-events-all"}} -{"Stacktrace.count": [2], "profiling.project.id": ["100"], "os.kernel": ["9.9.9-0"], "tags": ["environment:qa", "region:eu-west-1"], "host.ip": ["192.168.1.2"], "@timestamp": ["1698624000"], "container.name": ["instance-0000000010"], "ecs.version": ["1.12.0"], "Stacktrace.id": ["L7kj7UvlKbT-vN73el4faQ"], "agent.version": ["head-be593ef3-1688111067"], "host.name": ["ip-192-168-1-2"], "host.id": ["8457605156473051743"], "process.thread.name": ["497295213074376"]} +{"Stacktrace.count": [2], "profiling.project.id": ["100"], "os.kernel": ["9.9.9-0"], "tags": ["environment:qa", "region:eu-west-1"], "host.ip": ["192.168.1.2"], "@timestamp": ["1698624000"], "container.name": ["instance-0000000010"], "ecs.version": ["1.12.0"], "Stacktrace.id": ["L7kj7UvlKbT-vN73el4faQ"], "agent.version": ["head-be593ef3-1688111067"], "host.name": ["ip-192-168-1-2"], "host.id": ["8457605156473051743"], "process.thread.name": ["497295213074376"], "service.name": "basket"} {"create": {"_index": "profiling-events-all"}} -{"Stacktrace.count": [1], "profiling.project.id": ["100"], "os.kernel": ["9.9.9-0"], "tags": ["environment:qa", "region:eu-west-1"], "host.ip": ["192.168.1.2"], "@timestamp": ["1698624000"], "container.name": ["instance-0000000010"], "ecs.version": ["1.12.0"], "Stacktrace.id": ["L7kj7UvlKbT-vN73el4faQ"], "agent.version": ["head-be593ef3-1688111067"], "host.name": ["ip-192-168-1-2"], "host.id": ["8457605156473051743"], "process.thread.name": ["497295213074376"]} +{"Stacktrace.count": [1], "profiling.project.id": ["100"], "os.kernel": ["9.9.9-0"], "tags": ["environment:qa", "region:eu-west-1"], "host.ip": ["192.168.1.2"], "@timestamp": ["1698624000"], "container.name": ["instance-0000000010"], "ecs.version": ["1.12.0"], "Stacktrace.id": ["L7kj7UvlKbT-vN73el4faQ"], "agent.version": ["head-be593ef3-1688111067"], "host.name": ["ip-192-168-1-2"], "host.id": ["8457605156473051743"], "process.thread.name": ["497295213074376"], "service.name": "basket"} {"create": {"_index": "profiling-events-all"}} {"Stacktrace.count": [1], "profiling.project.id": ["100"], "os.kernel": ["9.9.9-0"], "tags": ["environment:qa", "region:eu-west-1"], "host.ip": ["192.168.1.2"], "@timestamp": ["1698624000"], "container.name": ["instance-0000000010"], "ecs.version": ["1.12.0"], "Stacktrace.id": ["hRqQI2CBPiapzgFG9jrmDA"], "agent.version": ["head-be593ef3-1688111067"], "host.name": ["ip-192-168-1-2"], "host.id": ["8457605156473051743"], "process.thread.name": ["599103450330106"]} {"create": {"_index": "profiling-events-all"}} diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesRequest.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesRequest.java index 86ed038467191..038a576cd77fc 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesRequest.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesRequest.java @@ -38,8 +38,10 @@ public class GetStackTracesRequest extends ActionRequest implements IndicesRequest.Replaceable { public static final ParseField QUERY_FIELD = new ParseField("query"); public static final ParseField SAMPLE_SIZE_FIELD = new ParseField("sample_size"); + public static final ParseField LIMIT_FIELD = new ParseField("limit"); public static final ParseField INDICES_FIELD = new ParseField("indices"); public static final ParseField STACKTRACE_IDS_FIELD = new ParseField("stacktrace_ids_field"); + public static final ParseField AGGREGATION_FIELD = new ParseField("aggregation_field"); public static final ParseField REQUESTED_DURATION_FIELD = new ParseField("requested_duration"); public static final ParseField AWS_COST_FACTOR_FIELD = new ParseField("aws_cost_factor"); public static final ParseField AZURE_COST_FACTOR_FIELD = new ParseField("azure_cost_factor"); @@ -52,9 +54,11 @@ public class GetStackTracesRequest extends ActionRequest implements IndicesReque private QueryBuilder query; private int sampleSize; + private Integer limit; private String[] indices; private boolean userProvidedIndices; private String stackTraceIdsField; + private String aggregationField; private Double requestedDuration; private Double awsCostFactor; private Double azureCostFactor; @@ -73,7 +77,7 @@ public class GetStackTracesRequest extends ActionRequest implements IndicesReque private Integer shardSeed; public GetStackTracesRequest() { - this(null, null, null, null, null, null, null, null, null, null, null, null); + this(null, null, null, null, null, null, null, null, null, null, null, null, null); } public GetStackTracesRequest( @@ -84,6 +88,7 @@ public GetStackTracesRequest( QueryBuilder query, String[] indices, String stackTraceIdsField, + String aggregationField, Double customCO2PerKWH, Double customDatacenterPUE, Double customPerCoreWattX86, @@ -98,6 +103,7 @@ public GetStackTracesRequest( this.indices = indices; this.userProvidedIndices = indices != null && indices.length > 0; this.stackTraceIdsField = stackTraceIdsField; + this.aggregationField = aggregationField; this.customCO2PerKWH = customCO2PerKWH; this.customDatacenterPUE = customDatacenterPUE; this.customPerCoreWattX86 = customPerCoreWattX86; @@ -114,6 +120,14 @@ public int getSampleSize() { return sampleSize; } + public void setLimit(int limit) { + this.limit = limit; + } + + public Integer getLimit() { + return limit; + } + public Double getRequestedDuration() { return requestedDuration; } @@ -162,6 +176,10 @@ public String getStackTraceIdsField() { return stackTraceIdsField; } + public String getAggregationField() { + return aggregationField; + } + public boolean isAdjustSampleCount() { return Boolean.TRUE.equals(adjustSampleCount); } @@ -194,8 +212,12 @@ public void parseXContent(XContentParser parser) throws IOException { } else if (token.isValue()) { if (SAMPLE_SIZE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { this.sampleSize = parser.intValue(); + } else if (LIMIT_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { + this.limit = parser.intValue(); } else if (STACKTRACE_IDS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { this.stackTraceIdsField = parser.text(); + } else if (AGGREGATION_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { + this.aggregationField = parser.text(); } else if (REQUESTED_DURATION_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { this.requestedDuration = parser.doubleValue(); } else if (AWS_COST_FACTOR_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { @@ -277,7 +299,15 @@ public ActionRequestValidationException validate() { ); } } + if (aggregationField != null && aggregationField.isBlank()) { + validationException = addValidationError( + "[" + AGGREGATION_FIELD.getPreferredName() + "] must be non-empty", + validationException + ); + } + validationException = requirePositive(SAMPLE_SIZE_FIELD, sampleSize, validationException); + validationException = requirePositive(LIMIT_FIELD, limit, validationException); validationException = requirePositive(REQUESTED_DURATION_FIELD, requestedDuration, validationException); validationException = requirePositive(AWS_COST_FACTOR_FIELD, awsCostFactor, validationException); validationException = requirePositive(AZURE_COST_FACTOR_FIELD, azureCostFactor, validationException); @@ -307,7 +337,9 @@ public String getDescription() { StringBuilder sb = new StringBuilder(); appendField(sb, "indices", indices); appendField(sb, "stacktrace_ids_field", stackTraceIdsField); + appendField(sb, "aggregation_field", aggregationField); appendField(sb, "sample_size", sampleSize); + appendField(sb, "limit", limit); appendField(sb, "requested_duration", requestedDuration); appendField(sb, "aws_cost_factor", awsCostFactor); appendField(sb, "azure_cost_factor", azureCostFactor); diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesResponseBuilder.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesResponseBuilder.java index f058341b80b37..44c9c987fc6c7 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesResponseBuilder.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesResponseBuilder.java @@ -155,6 +155,9 @@ public GetStackTracesResponse build() { if (event != null) { StackTrace stackTrace = entry.getValue(); stackTrace.count = event.count; + if (event.subGroups.isEmpty() == false) { + stackTrace.subGroups = event.subGroups; + } stackTrace.annualCO2Tons = event.annualCO2Tons; stackTrace.annualCostsUSD = event.annualCostsUSD; } diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsAction.java new file mode 100644 index 0000000000000..b11e74cbbf93d --- /dev/null +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsAction.java @@ -0,0 +1,18 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.profiling; + +import org.elasticsearch.action.ActionType; + +public final class GetTopNFunctionsAction extends ActionType { + public static final GetTopNFunctionsAction INSTANCE = new GetTopNFunctionsAction(); + public static final String NAME = "indices:data/read/profiling/topn/functions"; + + private GetTopNFunctionsAction() { + super(NAME); + } +} diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsResponse.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsResponse.java new file mode 100644 index 0000000000000..b8785bc607b18 --- /dev/null +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsResponse.java @@ -0,0 +1,56 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.profiling; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.support.TransportAction; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.List; + +public class GetTopNFunctionsResponse extends ActionResponse implements ToXContentObject { + private final long selfCount; + private final long totalCount; + private final List topNFunctions; + + public GetTopNFunctionsResponse(long selfCount, long totalCount, List topNFunctions) { + this.selfCount = selfCount; + this.totalCount = totalCount; + this.topNFunctions = topNFunctions; + } + + @Override + public void writeTo(StreamOutput out) { + TransportAction.localOnly(); + } + + public long getSelfCount() { + return selfCount; + } + + public long getTotalCount() { + return totalCount; + } + + public List getTopN() { + return topNFunctions; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("self_count", selfCount); + builder.field("total_count", totalCount); + builder.xContentList("topn", topNFunctions); + builder.endObject(); + return builder; + } +} diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingPlugin.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingPlugin.java index 400ddfdbf73b6..0615bef7a4980 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingPlugin.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingPlugin.java @@ -134,6 +134,7 @@ public List getRestHandlers( if (enabled) { handlers.add(new RestGetStackTracesAction()); handlers.add(new RestGetFlamegraphAction()); + handlers.add(new RestGetTopNFunctionsAction()); } return Collections.unmodifiableList(handlers); } @@ -168,6 +169,7 @@ public static ExecutorBuilder responseExecutorBuilder() { return List.of( new ActionHandler<>(GetStackTracesAction.INSTANCE, TransportGetStackTracesAction.class), new ActionHandler<>(GetFlamegraphAction.INSTANCE, TransportGetFlamegraphAction.class), + new ActionHandler<>(GetTopNFunctionsAction.INSTANCE, TransportGetTopNFunctionsAction.class), new ActionHandler<>(GetStatusAction.INSTANCE, TransportGetStatusAction.class), new ActionHandler<>(XPackUsageFeatureAction.UNIVERSAL_PROFILING, ProfilingUsageTransportAction.class), new ActionHandler<>(XPackInfoFeatureAction.UNIVERSAL_PROFILING, ProfilingInfoTransportAction.class) diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetTopNFunctionsAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetTopNFunctionsAction.java new file mode 100644 index 0000000000000..b9896418d7b79 --- /dev/null +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetTopNFunctionsAction.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.profiling; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; +import org.elasticsearch.rest.action.RestCancellableNodeClient; +import org.elasticsearch.rest.action.RestToXContentListener; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.POST; + +@ServerlessScope(Scope.PUBLIC) +public class RestGetTopNFunctionsAction extends BaseRestHandler { + @Override + public List routes() { + return List.of(new Route(POST, "/_profiling/topn/functions")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + GetStackTracesRequest getStackTracesRequest = new GetStackTracesRequest(); + request.applyContentParser(getStackTracesRequest::parseXContent); + // enforce server-side adjustment of sample counts for top N functions + getStackTracesRequest.setAdjustSampleCount(true); + + return channel -> { + RestCancellableNodeClient cancelClient = new RestCancellableNodeClient(client, request.getHttpChannel()); + cancelClient.execute(GetTopNFunctionsAction.INSTANCE, getStackTracesRequest, new RestToXContentListener<>(channel)); + }; + } + + @Override + public String getName() { + return "get_topn_functions_action"; + } +} diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackFrame.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackFrame.java index 35f5899536745..5f7102c63d3d7 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackFrame.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackFrame.java @@ -18,6 +18,7 @@ import java.util.function.Consumer; final class StackFrame implements ToXContentObject { + static final StackFrame EMPTY_STACKFRAME = new StackFrame("", "", 0, 0); List fileName; List functionName; List functionOffset; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackTrace.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackTrace.java index b039bf22110b1..d24127824dafd 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackTrace.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackTrace.java @@ -26,7 +26,7 @@ final class StackTrace implements ToXContentObject { String[] fileIds; String[] frameIds; int[] typeIds; - + Map subGroups; double annualCO2Tons; double annualCostsUSD; long count; @@ -247,10 +247,10 @@ public boolean equals(Object o) { && Arrays.equals(fileIds, that.fileIds) && Arrays.equals(frameIds, that.frameIds) && Arrays.equals(typeIds, that.typeIds); - // Don't compare metadata like annualized co2, annualized costs and count. + // Don't compare metadata like annualized co2, annualized costs, subGroups and count. } - // Don't hash metadata like annualized co2, annualized costs and count. + // Don't hash metadata like annualized co2, annualized costs, subGroups and count. @Override public int hashCode() { int result = Arrays.hashCode(addressOrLines); diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TopNFunction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TopNFunction.java new file mode 100644 index 0000000000000..777d8e247335c --- /dev/null +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TopNFunction.java @@ -0,0 +1,297 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.profiling; + +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +final class TopNFunction implements Cloneable, ToXContentObject, Comparable { + private final String id; + private int rank; + private final int frameType; + private final boolean inline; + private final int addressOrLine; + private final String functionName; + private final String sourceFilename; + private final int sourceLine; + private final String exeFilename; + private long selfCount; + private long totalCount; + private double selfAnnualCO2Tons; + private double totalAnnualCO2Tons; + private double selfAnnualCostsUSD; + private double totalAnnualCostsUSD; + private final Map subGroups; + + TopNFunction( + String id, + int frameType, + boolean inline, + int addressOrLine, + String functionName, + String sourceFilename, + int sourceLine, + String exeFilename + ) { + this( + id, + 0, + frameType, + inline, + addressOrLine, + functionName, + sourceFilename, + sourceLine, + exeFilename, + 0, + 0, + 0.0d, + 0.0d, + 0.0d, + 0.0d, + new HashMap<>() + ); + } + + TopNFunction( + String id, + int rank, + int frameType, + boolean inline, + int addressOrLine, + String functionName, + String sourceFilename, + int sourceLine, + String exeFilename, + long selfCount, + long totalCount, + double selfAnnualCO2Tons, + double totalAnnualCO2Tons, + double selfAnnualCostsUSD, + double totalAnnualCostsUSD, + Map subGroups + ) { + this.id = id; + this.rank = rank; + this.frameType = frameType; + this.inline = inline; + this.addressOrLine = addressOrLine; + this.functionName = functionName; + this.sourceFilename = sourceFilename; + this.sourceLine = sourceLine; + this.exeFilename = exeFilename; + this.selfCount = selfCount; + this.totalCount = totalCount; + this.selfAnnualCO2Tons = selfAnnualCO2Tons; + this.totalAnnualCO2Tons = totalAnnualCO2Tons; + this.selfAnnualCostsUSD = selfAnnualCostsUSD; + this.totalAnnualCostsUSD = totalAnnualCostsUSD; + this.subGroups = subGroups; + } + + public String getId() { + return this.id; + } + + public void setRank(int rank) { + this.rank = rank; + } + + public long getSelfCount() { + return selfCount; + } + + public void addSelfCount(long selfCount) { + this.selfCount += selfCount; + } + + public long getTotalCount() { + return totalCount; + } + + public void addTotalCount(long totalCount) { + this.totalCount += totalCount; + } + + public void addSelfAnnualCO2Tons(double co2Tons) { + this.selfAnnualCO2Tons += co2Tons; + } + + public void addTotalAnnualCO2Tons(double co2Tons) { + this.totalAnnualCO2Tons += co2Tons; + } + + public void addSelfAnnualCostsUSD(double costs) { + this.selfAnnualCostsUSD += costs; + } + + public void addTotalAnnualCostsUSD(double costs) { + this.totalAnnualCostsUSD += costs; + } + + public void addSubGroups(Map subGroups) { + for (Map.Entry subGroup : subGroups.entrySet()) { + long count = this.subGroups.getOrDefault(subGroup.getKey(), 0L); + this.subGroups.put(subGroup.getKey(), count + subGroup.getValue()); + } + } + + @Override + protected TopNFunction clone() { + return new TopNFunction( + id, + rank, + frameType, + inline, + addressOrLine, + functionName, + sourceFilename, + sourceLine, + exeFilename, + selfCount, + totalCount, + selfAnnualCO2Tons, + totalAnnualCO2Tons, + selfAnnualCostsUSD, + totalAnnualCostsUSD, + new HashMap<>(subGroups) + ); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("id", this.id); + builder.field("rank", this.rank); + builder.startObject("frame"); + builder.field("frame_type", this.frameType); + builder.field("inline", this.inline); + builder.field("address_or_line", this.addressOrLine); + builder.field("function_name", this.functionName); + builder.field("file_name", this.sourceFilename); + builder.field("line_number", this.sourceLine); + builder.field("executable_file_name", this.exeFilename); + builder.endObject(); + builder.field("sub_groups", subGroups); + builder.field("self_count", this.selfCount); + builder.field("total_count", this.totalCount); + builder.field("self_annual_co2_tons").rawValue(NumberUtils.doubleToString(selfAnnualCO2Tons)); + builder.field("total_annual_co2_tons").rawValue(NumberUtils.doubleToString(totalAnnualCO2Tons)); + builder.field("self_annual_costs_usd").rawValue(NumberUtils.doubleToString(selfAnnualCostsUSD)); + builder.field("total_annual_costs_usd").rawValue(NumberUtils.doubleToString(totalAnnualCostsUSD)); + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + TopNFunction that = (TopNFunction) o; + return Objects.equals(id, that.id) + && Objects.equals(rank, that.rank) + && Objects.equals(frameType, that.frameType) + && Objects.equals(inline, that.inline) + && Objects.equals(addressOrLine, that.addressOrLine) + && Objects.equals(functionName, that.functionName) + && Objects.equals(sourceFilename, that.sourceFilename) + && Objects.equals(sourceLine, that.sourceLine) + && Objects.equals(exeFilename, that.exeFilename) + && Objects.equals(selfCount, that.selfCount) + && Objects.equals(totalCount, that.totalCount) + && Objects.equals(selfAnnualCO2Tons, that.selfAnnualCO2Tons) + && Objects.equals(totalAnnualCO2Tons, that.totalAnnualCO2Tons) + && Objects.equals(selfAnnualCostsUSD, that.selfAnnualCostsUSD) + && Objects.equals(totalAnnualCostsUSD, that.totalAnnualCostsUSD) + && Objects.equals(subGroups, that.subGroups); + } + + @Override + public int hashCode() { + return Objects.hash( + id, + rank, + frameType, + inline, + addressOrLine, + functionName, + sourceFilename, + sourceLine, + exeFilename, + selfCount, + totalCount, + selfAnnualCO2Tons, + totalAnnualCO2Tons, + selfAnnualCostsUSD, + totalAnnualCostsUSD, + subGroups + ); + } + + @Override + public String toString() { + return "TopNFunction{" + + "id='" + + id + + '\'' + + ", rank=" + + rank + + ", frameType=" + + frameType + + ", inline=" + + inline + + ", addressOrLine=" + + addressOrLine + + ", functionName='" + + functionName + + '\'' + + ", sourceFilename='" + + sourceFilename + + '\'' + + ", sourceLine=" + + sourceLine + + ", exeFilename='" + + exeFilename + + '\'' + + ", selfCount=" + + selfCount + + ", totalCount=" + + totalCount + + ", selfAnnualCO2Tons=" + + selfAnnualCO2Tons + + ", totalAnnualCO2Tons=" + + totalAnnualCO2Tons + + ", selfAnnualCostsUSD=" + + selfAnnualCostsUSD + + ", totalAnnualCostsUSD=" + + totalAnnualCostsUSD + + ", subGroups=" + + subGroups + + '}'; + } + + @Override + public int compareTo(TopNFunction that) { + if (this.selfCount > that.selfCount) { + return 1; + } + if (this.selfCount < that.selfCount) { + return -1; + } + return this.id.compareTo(that.id); + } +} diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TraceEvent.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TraceEvent.java index d092868e23cd9..adb88848a418e 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TraceEvent.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TraceEvent.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.profiling; +import java.util.HashMap; +import java.util.Map; import java.util.Objects; final class TraceEvent { @@ -14,9 +16,10 @@ final class TraceEvent { double annualCO2Tons; double annualCostsUSD; long count; + final Map subGroups = new HashMap<>(); TraceEvent(String stacktraceID) { - this.stacktraceID = stacktraceID; + this(stacktraceID, 0); } TraceEvent(String stacktraceID, long count) { @@ -53,6 +56,8 @@ public String toString() { + annualCostsUSD + ", count=" + count - + "}"; + + ", subGroups=" + + subGroups + + '}'; } } diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphAction.java index 39b73db41aeef..7a25319d3a1cc 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphAction.java @@ -28,8 +28,6 @@ public class TransportGetFlamegraphAction extends TransportAction { private static final Logger log = LogManager.getLogger(TransportGetFlamegraphAction.class); - private static final StackFrame EMPTY_STACKFRAME = new StackFrame("", "", 0, 0); - private final NodeClient nodeClient; private final TransportService transportService; @@ -97,7 +95,7 @@ static GetFlamegraphResponse buildFlamegraph(GetStackTracesResponse response) { String fileId = stackTrace.fileIds[i]; int frameType = stackTrace.typeIds[i]; int addressOrLine = stackTrace.addressOrLines[i]; - StackFrame stackFrame = response.getStackFrames().getOrDefault(frameId, EMPTY_STACKFRAME); + StackFrame stackFrame = response.getStackFrames().getOrDefault(frameId, StackFrame.EMPTY_STACKFRAME); String executable = response.getExecutables().getOrDefault(fileId, ""); final boolean isLeafFrame = i == frameCount - 1; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java index 8fff0dab53b08..0acdc7c37ce09 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java @@ -108,6 +108,12 @@ public class TransportGetStackTracesAction extends TransportAction { @@ -370,6 +408,14 @@ The same stacktraces may come from different hosts (eventually from different da stackTraceEvents.put(stackTraceID, event); } event.count += finalCount; + if (request.getAggregationField() != null) { + Terms subGroup = stacktraceBucket.getAggregations().get(CUSTOM_EVENT_SUB_AGGREGATION_NAME); + for (Terms.Bucket b : subGroup.getBuckets()) { + String subGroupName = b.getKeyAsString(); + long subGroupCount = event.subGroups.getOrDefault(subGroupName, 0L); + event.subGroups.put(subGroupName, subGroupCount + b.getDocCount()); + } + } } } responseBuilder.setTotalSamples(totalFinalCount); diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetTopNFunctionsAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetTopNFunctionsAction.java new file mode 100644 index 0000000000000..cb5f2da6c3731 --- /dev/null +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetTopNFunctionsAction.java @@ -0,0 +1,162 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.profiling; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.TransportAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.ParentTaskAssigningClient; +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +public class TransportGetTopNFunctionsAction extends TransportAction { + private static final Logger log = LogManager.getLogger(TransportGetTopNFunctionsAction.class); + private final NodeClient nodeClient; + private final TransportService transportService; + + @Inject + public TransportGetTopNFunctionsAction(NodeClient nodeClient, TransportService transportService, ActionFilters actionFilters) { + super(GetTopNFunctionsAction.NAME, actionFilters, transportService.getTaskManager()); + this.nodeClient = nodeClient; + this.transportService = transportService; + } + + @Override + protected void doExecute(Task task, GetStackTracesRequest request, ActionListener listener) { + Client client = new ParentTaskAssigningClient(this.nodeClient, transportService.getLocalNode(), task); + StopWatch watch = new StopWatch("getTopNFunctionsAction"); + client.execute(GetStackTracesAction.INSTANCE, request, ActionListener.wrap(searchResponse -> { + StopWatch processingWatch = new StopWatch("Processing response"); + GetTopNFunctionsResponse topNFunctionsResponse = buildTopNFunctions(searchResponse, request.getLimit()); + log.debug(() -> watch.report() + " " + processingWatch.report()); + listener.onResponse(topNFunctionsResponse); + }, listener::onFailure)); + } + + static GetTopNFunctionsResponse buildTopNFunctions(GetStackTracesResponse response, Integer limit) { + TopNFunctionsBuilder builder = new TopNFunctionsBuilder(limit); + if (response.getTotalFrames() == 0) { + return builder.build(); + } + + for (StackTrace stackTrace : response.getStackTraces().values()) { + Set frameGroupsPerStackTrace = new HashSet<>(); + long samples = stackTrace.count; + double annualCO2Tons = stackTrace.annualCO2Tons; + double annualCostsUSD = stackTrace.annualCostsUSD; + + int frameCount = stackTrace.frameIds.length; + for (int i = 0; i < frameCount; i++) { + String frameId = stackTrace.frameIds[i]; + String fileId = stackTrace.fileIds[i]; + int frameType = stackTrace.typeIds[i]; + int addressOrLine = stackTrace.addressOrLines[i]; + StackFrame stackFrame = response.getStackFrames().getOrDefault(frameId, StackFrame.EMPTY_STACKFRAME); + String executable = response.getExecutables().getOrDefault(fileId, ""); + + final boolean isLeafFrame = i == frameCount - 1; + stackFrame.forEach(frame -> { + // The samples associated with a frame provide the total number of + // traces in which that frame has appeared at least once. However, a + // frame may appear multiple times in a trace, and thus to avoid + // counting it multiple times we need to record the frames seen so + // far in each trace. Instead of using the entire frame information + // to determine if a frame has already been seen within a given + // stacktrace, we use the frame group ID for a frame. + String frameGroupId = FrameGroupID.create(fileId, addressOrLine, executable, frame.fileName(), frame.functionName()); + if (builder.isExists(frameGroupId) == false) { + builder.addTopNFunction( + new TopNFunction( + frameGroupId, + frameType, + frame.inline(), + addressOrLine, + frame.functionName(), + frame.fileName(), + frame.lineNumber(), + executable + ) + ); + } + TopNFunction current = builder.getTopNFunction(frameGroupId); + if (stackTrace.subGroups != null) { + current.addSubGroups(stackTrace.subGroups); + } + if (frameGroupsPerStackTrace.contains(frameGroupId) == false) { + frameGroupsPerStackTrace.add(frameGroupId); + current.addTotalCount(samples); + current.addTotalAnnualCO2Tons(annualCO2Tons); + current.addTotalAnnualCostsUSD(annualCostsUSD); + + } + if (isLeafFrame && frame.last()) { + // Leaf frame: sum up counts for self CPU. + current.addSelfCount(samples); + current.addSelfAnnualCO2Tons(annualCO2Tons); + current.addSelfAnnualCostsUSD(annualCostsUSD); + + } + }); + } + } + + return builder.build(); + } + + private static class TopNFunctionsBuilder { + private final Integer limit; + private final HashMap topNFunctions; + + TopNFunctionsBuilder(Integer limit) { + this.limit = limit; + this.topNFunctions = new HashMap<>(); + } + + public GetTopNFunctionsResponse build() { + List functions = new ArrayList<>(topNFunctions.values()); + functions.sort(Collections.reverseOrder()); + long sumSelfCount = 0; + long sumTotalCount = 0; + for (int i = 0; i < functions.size(); i++) { + TopNFunction topNFunction = functions.get(i); + topNFunction.setRank(i + 1); + sumSelfCount += topNFunction.getSelfCount(); + sumTotalCount += topNFunction.getTotalCount(); + } + // limit at the end so global stats are independent of the limit + if (limit != null && limit > 0) { + functions = functions.subList(0, limit); + } + return new GetTopNFunctionsResponse(sumSelfCount, sumTotalCount, functions); + } + + public boolean isExists(String frameGroupID) { + return this.topNFunctions.containsKey(frameGroupID); + } + + public TopNFunction getTopNFunction(String frameGroupID) { + return this.topNFunctions.get(frameGroupID); + } + + public void addTopNFunction(TopNFunction topNFunction) { + this.topNFunctions.put(topNFunction.getId(), topNFunction); + } + } +} diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesRequestTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesRequestTests.java index a6fd6f39d88a2..cfaa90b8adf85 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesRequestTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesRequestTests.java @@ -254,6 +254,7 @@ public void testValidateWrongSampleSize() { null, null, null, + null, null ); List validationErrors = request.validate().validationErrors(); @@ -274,6 +275,7 @@ public void testValidateSampleSizeIsValidWithCustomIndices() { null, null, null, + null, null ); assertNull("Expecting no validation errors", request.validate()); @@ -292,6 +294,7 @@ public void testValidateStacktraceWithoutIndices() { null, null, null, + null, null ); List validationErrors = request.validate().validationErrors(); @@ -312,6 +315,7 @@ public void testValidateIndicesWithoutStacktraces() { null, null, null, + null, null ); List validationErrors = request.validate().validationErrors(); @@ -333,6 +337,7 @@ public void testConsidersCustomIndicesInRelatedIndices() { null, null, null, + null, null ); String[] indices = request.indices(); @@ -341,7 +346,21 @@ public void testConsidersCustomIndicesInRelatedIndices() { } public void testConsidersDefaultIndicesInRelatedIndices() { - GetStackTracesRequest request = new GetStackTracesRequest(1, 1.0d, 1.0d, 1.0d, null, null, null, null, null, null, null, null); + GetStackTracesRequest request = new GetStackTracesRequest( + 1, + 1.0d, + 1.0d, + 1.0d, + null, + null, + null, + null, + null, + null, + null, + null, + null + ); String[] indices = request.indices(); assertEquals(15, indices.length); } diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ResamplerTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ResamplerTests.java index 75b59985b35b3..0b37dcd154ca5 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ResamplerTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ResamplerTests.java @@ -42,6 +42,7 @@ public void testNoResamplingNoSampleRateAdjustment() { null, null, null, + null, null ); request.setAdjustSampleCount(false); @@ -70,6 +71,7 @@ public void testNoResamplingButAdjustSampleRate() { null, null, null, + null, null ); request.setAdjustSampleCount(true); @@ -98,6 +100,7 @@ public void testResamplingNoSampleRateAdjustment() { null, null, null, + null, null ); request.setAdjustSampleCount(false); @@ -129,6 +132,7 @@ public void testResamplingNoSampleRateAdjustmentWithQuery() { null, null, null, + null, null ); @@ -157,6 +161,7 @@ public void testResamplingAndSampleRateAdjustment() { null, null, null, + null, null ); request.setAdjustSampleCount(true); diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/StackFrameTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/StackFrameTests.java index b9d5c549c2fbc..3e1bc4eba202d 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/StackFrameTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/StackFrameTests.java @@ -62,6 +62,5 @@ public void testEquality() { frame, (o -> new StackFrame(o.fileName, o.functionName, o.functionOffset, o.lineNumber)) ); - } } diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TopNFunctionTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TopNFunctionTests.java new file mode 100644 index 0000000000000..afbbe24979466 --- /dev/null +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TopNFunctionTests.java @@ -0,0 +1,117 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.profiling; + +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.EqualsHashCodeTestUtils; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; + +import java.io.IOException; +import java.util.Map; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; + +public class TopNFunctionTests extends ESTestCase { + public void testToXContent() throws IOException { + String fileID = "6tVKI4mSYDEJ-ABAIpYXcg"; + int frameType = 1; + boolean inline = false; + int addressOrLine = 23; + String functionName = "PyDict_GetItemWithError"; + String sourceFilename = "/build/python3.9-RNBry6/python3.9-3.9.2/Objects/dictobject.c"; + int sourceLine = 1456; + String exeFilename = "python3.9"; + + String frameGroupID = FrameGroupID.create(fileID, addressOrLine, exeFilename, sourceFilename, functionName); + + XContentType contentType = randomFrom(XContentType.values()); + XContentBuilder expectedRequest = XContentFactory.contentBuilder(contentType) + .startObject() + .field("id", frameGroupID) + .field("rank", 1) + .startObject("frame") + .field("frame_type", frameType) + .field("inline", inline) + .field("address_or_line", addressOrLine) + .field("function_name", functionName) + .field("file_name", sourceFilename) + .field("line_number", sourceLine) + .field("executable_file_name", exeFilename) + .endObject() + .field("sub_groups", Map.of("basket", 7L)) + .field("self_count", 1) + .field("total_count", 10) + .field("self_annual_co2_tons") + .rawValue("2.2000") + .field("total_annual_co2_tons") + .rawValue("22.0000") + .field("self_annual_costs_usd", "12.0000") + .field("total_annual_costs_usd", "120.0000") + .endObject(); + + XContentBuilder actualRequest = XContentFactory.contentBuilder(contentType); + TopNFunction topNFunction = new TopNFunction( + frameGroupID, + 1, + frameType, + inline, + addressOrLine, + functionName, + sourceFilename, + sourceLine, + exeFilename, + 1, + 10, + 2.2d, + 22.0d, + 12.0d, + 120.0d, + Map.of("basket", 7L) + ); + topNFunction.toXContent(actualRequest, ToXContent.EMPTY_PARAMS); + + assertToXContentEquivalent(BytesReference.bytes(expectedRequest), BytesReference.bytes(actualRequest), contentType); + } + + public void testEquality() { + String fileID = "6tVKI4mSYDEJ-ABAIpYXcg"; + int frameType = 1; + boolean inline = false; + int addressOrLine = 23; + String functionName = "PyDict_GetItemWithError"; + String sourceFilename = "/build/python3.9-RNBry6/python3.9-3.9.2/Objects/dictobject.c"; + int sourceLine = 1456; + String exeFilename = "python3.9"; + + String frameGroupID = FrameGroupID.create(fileID, addressOrLine, exeFilename, sourceFilename, functionName); + + TopNFunction topNFunction = new TopNFunction( + frameGroupID, + 1, + frameType, + inline, + addressOrLine, + functionName, + sourceFilename, + sourceLine, + exeFilename, + 1, + 10, + 2.0d, + 4.0d, + 23.2d, + 12.0d, + Map.of("checkout", 4L, "basket", 12L) + ); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(topNFunction, (TopNFunction::clone)); + } +} diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphActionTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphActionTests.java index fd20ed04978f2..e10892f0e73ce 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphActionTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphActionTests.java @@ -116,7 +116,6 @@ public void testCreateFlamegraph() { assertEquals(1L, response.getSelfCPU()); assertEquals(10L, response.getTotalCPU()); assertEquals(1L, response.getTotalSamples()); - } public void testCreateEmptyFlamegraphWithRootNode() { diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetTopNFunctionsActionTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetTopNFunctionsActionTests.java new file mode 100644 index 0000000000000..f248d8e27bd43 --- /dev/null +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetTopNFunctionsActionTests.java @@ -0,0 +1,183 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.profiling; + +import org.elasticsearch.test.ESTestCase; + +import java.util.Collections; +import java.util.List; +import java.util.Map; + +public class TransportGetTopNFunctionsActionTests extends ESTestCase { + public void testCreateAllTopNFunctions() { + GetStackTracesResponse stacktraces = new GetStackTracesResponse( + Map.of( + "2buqP1GpF-TXYmL4USW8gA", + new StackTrace( + new int[] { 12784352, 19334053, 19336161, 18795859, 18622708, 18619213, 12989721, 13658842, 16339645 }, + new String[] { + "fr28zxcZ2UDasxYuu6dV-w", + "fr28zxcZ2UDasxYuu6dV-w", + "fr28zxcZ2UDasxYuu6dV-w", + "fr28zxcZ2UDasxYuu6dV-w", + "fr28zxcZ2UDasxYuu6dV-w", + "fr28zxcZ2UDasxYuu6dV-w", + "fr28zxcZ2UDasxYuu6dV-w", + "fr28zxcZ2UDasxYuu6dV-w", + "fr28zxcZ2UDasxYuu6dV-w" }, + new String[] { + "fr28zxcZ2UDasxYuu6dV-wAAAAAAwxLg", + "fr28zxcZ2UDasxYuu6dV-wAAAAABJwOl", + "fr28zxcZ2UDasxYuu6dV-wAAAAABJwvh", + "fr28zxcZ2UDasxYuu6dV-wAAAAABHs1T", + "fr28zxcZ2UDasxYuu6dV-wAAAAABHCj0", + "fr28zxcZ2UDasxYuu6dV-wAAAAABHBtN", + "fr28zxcZ2UDasxYuu6dV-wAAAAAAxjUZ", + "fr28zxcZ2UDasxYuu6dV-wAAAAAA0Gra", + "fr28zxcZ2UDasxYuu6dV-wAAAAAA-VK9" }, + new int[] { 3, 3, 3, 3, 3, 3, 3, 3, 3 }, + 0.3d, + 2.7d, + 1 + ) + ), + Map.of(), + Map.of("fr28zxcZ2UDasxYuu6dV-w", "containerd"), + Map.of("2buqP1GpF-TXYmL4USW8gA", new TraceEvent("2buqP1GpF-TXYmL4USW8gA", 1L)), + 9, + 1.0d, + 1 + ); + + GetTopNFunctionsResponse response = TransportGetTopNFunctionsAction.buildTopNFunctions(stacktraces, null); + assertNotNull(response); + assertEquals(1, response.getSelfCount()); + assertEquals(9, response.getTotalCount()); + + List topNFunctions = response.getTopN(); + assertNotNull(topNFunctions); + assertEquals(9, topNFunctions.size()); + + assertEquals( + List.of( + topN("178196121", 1, 16339645, 1L, 1L, 0.3d, 0.3d, 2.7d, 2.7d), + topN("181192637", 2, 19336161, 0L, 1L, 0.0d, 0.3d, 0.0d, 2.7d), + topN("181190529", 3, 19334053, 0L, 1L, 0.0d, 0.3d, 0.0d, 2.7d), + topN("180652335", 4, 18795859, 0L, 1L, 0.0d, 0.3d, 0.0d, 2.7d), + topN("180479184", 5, 18622708, 0L, 1L, 0.0d, 0.3d, 0.0d, 2.7d), + topN("180475689", 6, 18619213, 0L, 1L, 0.0d, 0.3d, 0.0d, 2.7d), + topN("175515318", 7, 13658842, 0L, 1L, 0.0d, 0.3d, 0.0d, 2.7d), + topN("174846197", 8, 12989721, 0L, 1L, 0.0d, 0.3d, 0.0d, 2.7d), + topN("174640828", 9, 12784352, 0L, 1L, 0.0d, 0.3d, 0.0d, 2.7d) + ), + topNFunctions + ); + } + + public void testCreateTopNFunctionsWithLimit() { + GetStackTracesResponse stacktraces = new GetStackTracesResponse( + Map.of( + "2buqP1GpF-TXYmL4USW8gA", + new StackTrace( + new int[] { 12784352, 19334053, 19336161, 18795859, 18622708, 18619213, 12989721, 13658842, 16339645 }, + new String[] { + "fr28zxcZ2UDasxYuu6dV-w", + "fr28zxcZ2UDasxYuu6dV-w", + "fr28zxcZ2UDasxYuu6dV-w", + "fr28zxcZ2UDasxYuu6dV-w", + "fr28zxcZ2UDasxYuu6dV-w", + "fr28zxcZ2UDasxYuu6dV-w", + "fr28zxcZ2UDasxYuu6dV-w", + "fr28zxcZ2UDasxYuu6dV-w", + "fr28zxcZ2UDasxYuu6dV-w" }, + new String[] { + "fr28zxcZ2UDasxYuu6dV-wAAAAAAwxLg", + "fr28zxcZ2UDasxYuu6dV-wAAAAABJwOl", + "fr28zxcZ2UDasxYuu6dV-wAAAAABJwvh", + "fr28zxcZ2UDasxYuu6dV-wAAAAABHs1T", + "fr28zxcZ2UDasxYuu6dV-wAAAAABHCj0", + "fr28zxcZ2UDasxYuu6dV-wAAAAABHBtN", + "fr28zxcZ2UDasxYuu6dV-wAAAAAAxjUZ", + "fr28zxcZ2UDasxYuu6dV-wAAAAAA0Gra", + "fr28zxcZ2UDasxYuu6dV-wAAAAAA-VK9" }, + new int[] { 3, 3, 3, 3, 3, 3, 3, 3, 3 }, + 0.3d, + 2.7d, + 1 + ) + ), + Map.of(), + Map.of("fr28zxcZ2UDasxYuu6dV-w", "containerd"), + Map.of("2buqP1GpF-TXYmL4USW8gA", new TraceEvent("2buqP1GpF-TXYmL4USW8gA", 1L)), + 9, + 1.0d, + 1 + ); + + GetTopNFunctionsResponse response = TransportGetTopNFunctionsAction.buildTopNFunctions(stacktraces, 3); + assertNotNull(response); + assertEquals(1, response.getSelfCount()); + assertEquals(9, response.getTotalCount()); + + List topNFunctions = response.getTopN(); + assertNotNull(topNFunctions); + assertEquals(3, topNFunctions.size()); + + assertEquals( + List.of( + topN("178196121", 1, 16339645, 1L, 1L, 0.3d, 0.3d, 2.7d, 2.7d), + topN("181192637", 2, 19336161, 0L, 1L, 0.0d, 0.3d, 0.0d, 2.7d), + topN("181190529", 3, 19334053, 0L, 1L, 0.0d, 0.3d, 0.0d, 2.7d) + ), + topNFunctions + ); + } + + private TopNFunction topN( + String id, + int rank, + int addressOrLine, + long exclusiveCount, + long inclusiveCount, + double annualCO2TonsExclusive, + double annualCO2TonsInclusive, + double annualCostsUSDExclusive, + double annualCostsUSDInclusive + ) { + return new TopNFunction( + id, + rank, + 3, + false, + addressOrLine, + "", + "", + 0, + "containerd", + exclusiveCount, + inclusiveCount, + annualCO2TonsExclusive, + annualCO2TonsInclusive, + annualCostsUSDExclusive, + annualCostsUSDInclusive, + Collections.emptyMap() + ); + } + + public void testCreateEmptyTopNFunctions() { + GetStackTracesResponse stacktraces = new GetStackTracesResponse(Map.of(), Map.of(), Map.of(), Map.of(), 0, 1.0d, 0); + GetTopNFunctionsResponse response = TransportGetTopNFunctionsAction.buildTopNFunctions(stacktraces, null); + assertNotNull(response); + assertEquals(0, response.getSelfCount()); + assertEquals(0, response.getTotalCount()); + + List topNFunctions = response.getTopN(); + assertNotNull(topNFunctions); + assertEquals(0, topNFunctions.size()); + } +} diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 1009499d91b41..0f292d64bc4a6 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -549,6 +549,7 @@ public class Constants { "indices:data/read/open_point_in_time", "indices:data/read/profiling/stack_traces", "indices:data/read/profiling/flamegraph", + "indices:data/read/profiling/topn/functions", "indices:data/read/rank_eval", "indices:data/read/scroll", "indices:data/read/scroll/clear", diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/profiling/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/profiling/10_basic.yml index cc282d26ae418..ffd5358a12d0a 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/profiling/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/profiling/10_basic.yml @@ -217,3 +217,67 @@ teardown: } } - match: { Size: 47} + +--- +"Test topN functions from profiling-events": + - skip: + version: "- 8.13.99" + reason: "the topN functions API was added in 8.14.0" + + - do: + profiling.topn_functions: + body: > + { + "sample_size": 20000, + "requested_duration": 86400, + "limit": 10, + "query": { + "bool": { + "filter": [ + { + "range": { + "@timestamp": { + "gte": "2023-11-20", + "lt": "2023-11-21", + "format": "yyyy-MM-dd" + } + } + } + ] + } + } + } + - length: { topn: 10} + +--- +"Test topN functions from test-events": + - skip: + version: "- 8.13.99" + reason: "the topN functions API was added in 8.14.0" + + - do: + profiling.topn_functions: + body: > + { + "sample_size": 20000, + "indices": ["test-event*"], + "stacktrace_ids_field": "events", + "requested_duration": 86400, + "limit": 10, + "query": { + "bool": { + "filter": [ + { + "range": { + "@timestamp": { + "gte": "2023-11-20", + "lt": "2023-11-21", + "format": "yyyy-MM-dd" + } + } + } + ] + } + } + } + - length: { topn: 10} From 08ad143dcd04d72b6fbe2977cabfe5be562b0cd9 Mon Sep 17 00:00:00 2001 From: Jake Landis Date: Thu, 4 Apr 2024 09:46:33 -0500 Subject: [PATCH 031/173] Support swapping the user context to the secondary auth user for named actions (#106613) This commit adds the ability to define specific actions that are required to be executed as the secondary authenticated user. When actions are defined as secondary auth actions, then the secondary authentication headers (and subsequent authentication) are required to call those actions and the authorization is based on the secondary user. The SPI hook to define which actions are required is intended for internal only use only. --- .../ml/MlUpgradeModeActionFilterTests.java | 4 +- .../qa/secondary-auth-actions/build.gradle | 29 +++++ .../auth/actions/SecondaryAuthActionsIT.java | 118 ++++++++++++++++++ .../src/main/java/module-info.java | 10 ++ .../actions/SecondaryAuthActionsPlugin.java | 19 +++ ...ecurity.authc.support.SecondaryAuthActions | 1 + .../security/src/main/java/module-info.java | 1 + .../xpack/security/Security.java | 6 +- .../action/filter/SecurityActionFilter.java | 19 ++- .../authc/support/SecondaryAuthActions.java | 22 ++++ .../filter/SecurityActionFilterTests.java | 80 +++++++++++- 11 files changed, 305 insertions(+), 4 deletions(-) create mode 100644 x-pack/plugin/security/qa/secondary-auth-actions/build.gradle create mode 100644 x-pack/plugin/security/qa/secondary-auth-actions/src/javaRestTest/java/org/elasticsearch/secondary/auth/actions/SecondaryAuthActionsIT.java create mode 100644 x-pack/plugin/security/qa/secondary-auth-actions/src/main/java/module-info.java create mode 100644 x-pack/plugin/security/qa/secondary-auth-actions/src/main/java/org/elasticsearch/secondary/auth/actions/SecondaryAuthActionsPlugin.java create mode 100644 x-pack/plugin/security/qa/secondary-auth-actions/src/main/resources/META-INF/services/org.elasticsearch.xpack.security.authc.support.SecondaryAuthActions create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/SecondaryAuthActions.java diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlUpgradeModeActionFilterTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlUpgradeModeActionFilterTests.java index 7ecf98cd7a6dd..3092808dc91f8 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlUpgradeModeActionFilterTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlUpgradeModeActionFilterTests.java @@ -31,6 +31,8 @@ import org.junit.After; import org.junit.Before; +import java.util.Set; + import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -114,7 +116,7 @@ public void testApply_ActionDisallowedInUpgradeModeWithResetModeExemption() { public void testOrder_UpgradeFilterIsExecutedAfterSecurityFilter() { MlUpgradeModeActionFilter upgradeModeFilter = new MlUpgradeModeActionFilter(clusterService); - SecurityActionFilter securityFilter = new SecurityActionFilter(null, null, null, null, mock(ThreadPool.class), null, null); + SecurityActionFilter securityFilter = new SecurityActionFilter(null, null, null, null, mock(ThreadPool.class), null, null, Set::of); ActionFilter[] actionFiltersInOrderOfExecution = new ActionFilters(Sets.newHashSet(upgradeModeFilter, securityFilter)).filters(); assertThat(actionFiltersInOrderOfExecution, is(arrayContaining(securityFilter, upgradeModeFilter))); diff --git a/x-pack/plugin/security/qa/secondary-auth-actions/build.gradle b/x-pack/plugin/security/qa/secondary-auth-actions/build.gradle new file mode 100644 index 0000000000000..f805dc74c4ca0 --- /dev/null +++ b/x-pack/plugin/security/qa/secondary-auth-actions/build.gradle @@ -0,0 +1,29 @@ +import org.elasticsearch.gradle.util.GradleUtils + +apply plugin: 'elasticsearch.internal-java-rest-test' +apply plugin: 'elasticsearch.base-internal-es-plugin' + +esplugin { + name 'secondary-auth-actions-extension' + description 'Spi extension plugin for security to enforce custom secondary auth actions' + classname 'org.elasticsearch.secondary.auth.actions.SecondaryAuthActionsPlugin' + extendedPlugins = ['x-pack-security'] +} + +dependencies { + compileOnly project(':x-pack:plugin:core') + compileOnly project(':x-pack:plugin:security') + javaRestTestImplementation project(':test:framework') +} + +GradleUtils.extendSourceSet(project, 'main', 'javaRestTest') + +dependencies { + clusterPlugins project(':x-pack:plugin:security:qa:secondary-auth-actions') +} + +tasks.named("javadoc").configure { enabled = false } + +tasks.named('javaRestTest') { + usesDefaultDistribution() +} diff --git a/x-pack/plugin/security/qa/secondary-auth-actions/src/javaRestTest/java/org/elasticsearch/secondary/auth/actions/SecondaryAuthActionsIT.java b/x-pack/plugin/security/qa/secondary-auth-actions/src/javaRestTest/java/org/elasticsearch/secondary/auth/actions/SecondaryAuthActionsIT.java new file mode 100644 index 0000000000000..768353dcb14f4 --- /dev/null +++ b/x-pack/plugin/security/qa/secondary-auth-actions/src/javaRestTest/java/org/elasticsearch/secondary/auth/actions/SecondaryAuthActionsIT.java @@ -0,0 +1,118 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.secondary.auth.actions; + +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.junit.Before; +import org.junit.ClassRule; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; + +public class SecondaryAuthActionsIT extends ESRestTestCase { + + private static final String ADMIN_TOKEN = basicAuthHeaderValue("test_admin", new SecureString("x-pack-test-password".toCharArray())); + private static final String USER_TOKEN = basicAuthHeaderValue("test_user", new SecureString("x-pack-test-password".toCharArray())); + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .nodes(2) + // ensure secondary auth actions go across the cluster, so we don't attempt to double swap out the user in context + .node(0, n -> n.setting("node.roles", "[master]")) + .node(1, n -> n.setting("node.roles", "[data]")) + .setting("xpack.watcher.enabled", "false") + .setting("xpack.ml.enabled", "false") + .setting("xpack.security.enabled", "true") + .setting("xpack.security.transport.ssl.enabled", "false") + .setting("xpack.security.http.ssl.enabled", "false") + .user("test_admin", "x-pack-test-password", "superuser", false) + .user("test_user", "x-pack-test-password", "logsrole", false) + .plugin("secondary-auth-actions-extension") + + .build(); + + @Before + public void setup() throws IOException { + final Request roleRequest = new Request("PUT", "/_security/role/logsrole"); + roleRequest.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", ADMIN_TOKEN)); + roleRequest.setJsonEntity("{\"cluster\":[],\"indices\":[{\"names\":[\"logs*\"],\"privileges\":[\"view_index_metadata\"]}]}"); + client().performRequest(roleRequest); + + final Request logsRequest = new Request("PUT", "/logs/_doc/1"); + logsRequest.setEntity(new StringEntity("{}", ContentType.APPLICATION_JSON)); + logsRequest.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", ADMIN_TOKEN)); + client().performRequest(logsRequest); + + final Request metricsRequest = new Request("PUT", "/metrics/_doc/1"); + metricsRequest.setEntity(new StringEntity("{}", ContentType.APPLICATION_JSON)); + metricsRequest.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", ADMIN_TOKEN)); + client().performRequest(metricsRequest); + } + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + @Override + protected Settings restAdminSettings() { + return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", ADMIN_TOKEN).build(); + } + + public void testSecondaryAuthUser() throws IOException { + final Request authenticateRequest = new Request("GET", "_security/_authenticate"); + authenticateRequest.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", ADMIN_TOKEN)); + // This should fail because the secondary auth header is not set + ResponseException responseException = expectThrows(ResponseException.class, () -> client().performRequest(authenticateRequest)); + assertThat(responseException.getResponse().getStatusLine().getStatusCode(), equalTo(RestStatus.BAD_REQUEST.getStatus())); + assertThat(responseException.getMessage(), containsString("es-secondary-authorization header must be used to call action")); + // set the secondary auth header + authenticateRequest.setOptions( + RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", ADMIN_TOKEN).addHeader("es-secondary-authorization", USER_TOKEN) + ); + final Response authenticateResponse = client().performRequest(authenticateRequest); + final Map authenticateResponseBody = entityAsMap(authenticateResponse); + // ensure the result represents the secondary user + assertEquals("test_user", authenticateResponseBody.get("username")); + assertEquals(List.of("logsrole"), authenticateResponseBody.get("roles")); + + // check index level permissions + final Request getIndicesRequest = new Request("GET", "*"); + getIndicesRequest.setOptions( + RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", ADMIN_TOKEN).addHeader("es-secondary-authorization", USER_TOKEN) + ); + final Response getIndicesResponse = client().performRequest(getIndicesRequest); + final Map getIndicesResponseBody = entityAsMap(getIndicesResponse); + assertNotNull(getIndicesResponseBody.get("logs")); + assertNull(getIndicesResponseBody.get("metrics")); + + // invalid secondary auth header + getIndicesRequest.setOptions( + RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", ADMIN_TOKEN).addHeader("es-secondary-authorization", "junk") + ); + responseException = expectThrows(ResponseException.class, () -> client().performRequest(getIndicesRequest)); + assertThat(responseException.getResponse().getStatusLine().getStatusCode(), equalTo(RestStatus.UNAUTHORIZED.getStatus())); + assertThat(responseException.getMessage(), containsString("Failed to authenticate secondary user")); + } +} diff --git a/x-pack/plugin/security/qa/secondary-auth-actions/src/main/java/module-info.java b/x-pack/plugin/security/qa/secondary-auth-actions/src/main/java/module-info.java new file mode 100644 index 0000000000000..15ffd5ce480f0 --- /dev/null +++ b/x-pack/plugin/security/qa/secondary-auth-actions/src/main/java/module-info.java @@ -0,0 +1,10 @@ +import org.elasticsearch.secondary.auth.actions.SecondaryAuthActionsPlugin; +import org.elasticsearch.xpack.security.authc.support.SecondaryAuthActions; + +module org.elasticsearch.internal.security { + requires org.elasticsearch.base; + requires org.elasticsearch.server; + requires org.elasticsearch.security; + + provides SecondaryAuthActions with SecondaryAuthActionsPlugin; +} diff --git a/x-pack/plugin/security/qa/secondary-auth-actions/src/main/java/org/elasticsearch/secondary/auth/actions/SecondaryAuthActionsPlugin.java b/x-pack/plugin/security/qa/secondary-auth-actions/src/main/java/org/elasticsearch/secondary/auth/actions/SecondaryAuthActionsPlugin.java new file mode 100644 index 0000000000000..5bb5f7b90f407 --- /dev/null +++ b/x-pack/plugin/security/qa/secondary-auth-actions/src/main/java/org/elasticsearch/secondary/auth/actions/SecondaryAuthActionsPlugin.java @@ -0,0 +1,19 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.secondary.auth.actions; + +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.xpack.security.authc.support.SecondaryAuthActions; + +import java.util.Set; + +public class SecondaryAuthActionsPlugin extends Plugin implements SecondaryAuthActions { + public Set get() { + return Set.of("cluster:admin/xpack/security/user/authenticate", "indices:admin/get"); + } +} diff --git a/x-pack/plugin/security/qa/secondary-auth-actions/src/main/resources/META-INF/services/org.elasticsearch.xpack.security.authc.support.SecondaryAuthActions b/x-pack/plugin/security/qa/secondary-auth-actions/src/main/resources/META-INF/services/org.elasticsearch.xpack.security.authc.support.SecondaryAuthActions new file mode 100644 index 0000000000000..7cc1a88205a76 --- /dev/null +++ b/x-pack/plugin/security/qa/secondary-auth-actions/src/main/resources/META-INF/services/org.elasticsearch.xpack.security.authc.support.SecondaryAuthActions @@ -0,0 +1 @@ +org.elasticsearch.secondary.auth.actions.SecondaryAuthActionsPlugin diff --git a/x-pack/plugin/security/src/main/java/module-info.java b/x-pack/plugin/security/src/main/java/module-info.java index cd1eb8a650149..557d601579af8 100644 --- a/x-pack/plugin/security/src/main/java/module-info.java +++ b/x-pack/plugin/security/src/main/java/module-info.java @@ -68,6 +68,7 @@ exports org.elasticsearch.xpack.security.authz to org.elasticsearch.internal.security; exports org.elasticsearch.xpack.security.authc to org.elasticsearch.xcontent; exports org.elasticsearch.xpack.security.slowlog to org.elasticsearch.server; + exports org.elasticsearch.xpack.security.authc.support to org.elasticsearch.internal.security; provides org.elasticsearch.index.SlowLogFieldProvider with org.elasticsearch.xpack.security.slowlog.SecuritySlowLogFieldProvider; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 4fe4b35683343..f4457dcbbfaa9 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -290,6 +290,7 @@ import org.elasticsearch.xpack.security.authc.service.FileServiceAccountTokenStore; import org.elasticsearch.xpack.security.authc.service.IndexServiceAccountTokenStore; import org.elasticsearch.xpack.security.authc.service.ServiceAccountService; +import org.elasticsearch.xpack.security.authc.support.SecondaryAuthActions; import org.elasticsearch.xpack.security.authc.support.SecondaryAuthenticator; import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; import org.elasticsearch.xpack.security.authz.AuthorizationDenialMessages; @@ -587,6 +588,7 @@ public class Security extends Plugin private final SetOnce> reloadableComponents = new SetOnce<>(); private final SetOnce authorizationDenialMessages = new SetOnce<>(); private final SetOnce reservedRoleNameCheckerFactory = new SetOnce<>(); + private final SetOnce secondaryAuthActions = new SetOnce<>(); public Security(Settings settings) { this(settings, Collections.emptyList()); @@ -1080,7 +1082,8 @@ Collection createComponents( getLicenseState(), threadPool, securityContext.get(), - destructiveOperations + destructiveOperations, + secondaryAuthActions.get() == null ? Set::of : secondaryAuthActions.get() ) ); @@ -2115,6 +2118,7 @@ public void loadExtensions(ExtensionLoader loader) { loadSingletonExtensionAndSetOnce(loader, hasPrivilegesRequestBuilderFactory, HasPrivilegesRequestBuilderFactory.class); loadSingletonExtensionAndSetOnce(loader, authorizationDenialMessages, AuthorizationDenialMessages.class); loadSingletonExtensionAndSetOnce(loader, reservedRoleNameCheckerFactory, ReservedRoleNameChecker.Factory.class); + loadSingletonExtensionAndSetOnce(loader, secondaryAuthActions, SecondaryAuthActions.class); } private void loadSingletonExtensionAndSetOnce(ExtensionLoader loader, SetOnce setOnce, Class clazz) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilter.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilter.java index 08544d316e87a..d499e55b21b70 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilter.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilter.java @@ -28,12 +28,14 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.security.SecurityContext; +import org.elasticsearch.xpack.core.security.authc.support.SecondaryAuthentication; import org.elasticsearch.xpack.core.security.authz.privilege.HealthAndStatsPrivilege; import org.elasticsearch.xpack.core.security.user.InternalUsers; import org.elasticsearch.xpack.security.action.SecurityActionMapper; import org.elasticsearch.xpack.security.audit.AuditTrailService; import org.elasticsearch.xpack.security.audit.AuditUtil; import org.elasticsearch.xpack.security.authc.AuthenticationService; +import org.elasticsearch.xpack.security.authc.support.SecondaryAuthActions; import org.elasticsearch.xpack.security.authz.AuthorizationService; import org.elasticsearch.xpack.security.authz.AuthorizationUtils; @@ -51,6 +53,7 @@ public class SecurityActionFilter implements ActionFilter { private final ThreadContext threadContext; private final SecurityContext securityContext; private final DestructiveOperations destructiveOperations; + private final SecondaryAuthActions secondaryAuthActions; public SecurityActionFilter( AuthenticationService authcService, @@ -59,7 +62,8 @@ public SecurityActionFilter( XPackLicenseState licenseState, ThreadPool threadPool, SecurityContext securityContext, - DestructiveOperations destructiveOperations + DestructiveOperations destructiveOperations, + SecondaryAuthActions secondaryAuthActions ) { this.authcService = authcService; this.authzService = authzService; @@ -68,6 +72,7 @@ public SecurityActionFilter( this.threadContext = threadPool.getThreadContext(); this.securityContext = securityContext; this.destructiveOperations = destructiveOperations; + this.secondaryAuthActions = secondaryAuthActions; } @Override @@ -109,6 +114,18 @@ operations are blocked on license expiration. All data operations (read and writ TransportVersion.current(), // current version since this is on the same node (original) -> { applyInternal(task, chain, action, request, contextPreservingListener); } ); + } else if (secondaryAuthActions.get().contains(action) && threadContext.getHeader("secondary_auth_action_applied") == null) { + SecondaryAuthentication secondaryAuth = securityContext.getSecondaryAuthentication(); + if (secondaryAuth == null) { + throw new IllegalArgumentException("es-secondary-authorization header must be used to call action [" + action + "]"); + } else { + secondaryAuth.execute(ignore -> { + // this header exists to ensure that if this action goes across nodes we don't attempt to swap out the user again + threadContext.putHeader("secondary_auth_action_applied", "true"); + applyInternal(task, chain, action, request, contextPreservingListener); + return null; + }); + } } else { try (ThreadContext.StoredContext ignore = threadContext.newStoredContextPreservingResponseHeaders()) { applyInternal(task, chain, action, request, contextPreservingListener); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/SecondaryAuthActions.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/SecondaryAuthActions.java new file mode 100644 index 0000000000000..0c6f6e7270627 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/SecondaryAuthActions.java @@ -0,0 +1,22 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.authc.support; + +import java.util.Set; + +/** + * Actions that are only available when a secondary authenticator is present. The user represented by the secondary authenticator will + * be used as the user for these actions. Secondary authorization requires both the primary and secondary authentication passes. + * Any actions returned here will ensure that the RBAC authorization represents the secondary user. + * If these actions are called without a secondary authenticated user, an exception will be thrown. + * {@see SecondaryAuthenticator} + */ +@FunctionalInterface +public interface SecondaryAuthActions { + Set get(); +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilterTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilterTests.java index a2ab6c1864783..0191062eb4631 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilterTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilterTests.java @@ -37,6 +37,7 @@ import org.elasticsearch.xpack.core.security.authc.Authentication.RealmRef; import org.elasticsearch.xpack.core.security.authc.AuthenticationField; import org.elasticsearch.xpack.core.security.authc.AuthenticationTestHelper; +import org.elasticsearch.xpack.core.security.authc.support.SecondaryAuthentication; import org.elasticsearch.xpack.core.security.authz.AuthorizationServiceField; import org.elasticsearch.xpack.core.security.authz.accesscontrol.IndicesAccessControl; import org.elasticsearch.xpack.core.security.user.InternalUsers; @@ -47,8 +48,10 @@ import org.elasticsearch.xpack.security.authc.AuthenticationService; import org.elasticsearch.xpack.security.authz.AuthorizationService; import org.junit.Before; +import org.mockito.ArgumentCaptor; import java.util.Collections; +import java.util.Set; import static org.elasticsearch.test.ActionListenerUtils.anyActionListener; import static org.elasticsearch.xpack.core.security.authz.AuthorizationServiceField.INDICES_PERMISSIONS_KEY; @@ -62,6 +65,7 @@ import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoInteractions; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; @@ -114,7 +118,8 @@ public void init() throws Exception { licenseState, threadPool, securityContext, - destructiveOperations + destructiveOperations, + () -> Set.of("_action_secondary_auth") ); } @@ -306,6 +311,79 @@ public void testActionProcessException() throws Exception { verifyNoMoreInteractions(chain); } + public void testSecondaryAuth() throws Exception { + ActionRequest request = mock(ActionRequest.class); + ActionListener listener = mock(ActionListener.class); + Task task = mock(Task.class); + User user1 = new User("user1", "r1", "r2"); + User user2 = new User("user2", "r3", "r4"); + Authentication authentication = AuthenticationTestHelper.builder() + .user(user1) + .realmRef(new RealmRef("test", "test", "foo")) + .build(false); + Authentication secondaryAuth = AuthenticationTestHelper.builder() + .user(user2) + .realmRef(new RealmRef("test2", "test2", "foo2")) + .build(false); + String requestId = UUIDs.randomBase64UUID(); + + // mock primary and secondary authentication headers already set + assertNull(threadContext.getTransient(AuthenticationField.AUTHENTICATION_KEY)); + threadContext.putTransient(AuthenticationField.AUTHENTICATION_KEY, authentication); + threadContext.putHeader(AuthenticationField.AUTHENTICATION_KEY, authentication.encode()); + assertNull(threadContext.getTransient(SecondaryAuthentication.THREAD_CTX_KEY)); + threadContext.putTransient(SecondaryAuthentication.THREAD_CTX_KEY, secondaryAuth); + threadContext.putHeader(SecondaryAuthentication.THREAD_CTX_KEY, secondaryAuth.encode()); + + String actionName = "_action_secondary_auth"; + // ensure that the filter swaps out to the secondary user + doAnswer(i -> { + final Object[] args = i.getArguments(); + assertThat(args, arrayWithSize(4)); + ActionListener callback = (ActionListener) args[args.length - 1]; + assertSame(threadContext.getTransient(AuthenticationField.AUTHENTICATION_KEY), secondaryAuth); + assertEquals(threadContext.getHeader(AuthenticationField.AUTHENTICATION_KEY), secondaryAuth.encode()); + threadContext.putHeader("_xpack_audit_request_id", requestId); + callback.onResponse(secondaryAuth); + return Void.TYPE; + }).when(authcService).authenticate(eq(actionName), eq(request), eq(InternalUsers.SYSTEM_USER), anyActionListener()); + + mockAuthorize(); + ActionResponse actionResponse = mock(ActionResponse.class); + mockChain(task, actionName, request, actionResponse); + filter.apply(task, actionName, request, listener, chain); + verify(authzService).authorize(eq(secondaryAuth), eq(actionName), eq(request), anyActionListener()); + verify(auditTrail).coordinatingActionResponse(eq(requestId), eq(secondaryAuth), eq(actionName), eq(request), eq(actionResponse)); + } + + public void testSecondaryAuthRequired() throws Exception { + ActionRequest request = mock(ActionRequest.class); + ActionListener listener = mock(ActionListener.class); + Task task = mock(Task.class); + User user1 = new User("user1", "r1", "r2"); + Authentication authentication = AuthenticationTestHelper.builder() + .user(user1) + .realmRef(new RealmRef("test", "test", "foo")) + .build(false); + // mock primary but not secondary authentication headers already set + assertNull(threadContext.getTransient(AuthenticationField.AUTHENTICATION_KEY)); + threadContext.putTransient(AuthenticationField.AUTHENTICATION_KEY, authentication); + threadContext.putHeader(AuthenticationField.AUTHENTICATION_KEY, authentication.encode()); + String actionName = "_action_secondary_auth"; + ActionResponse actionResponse = mock(ActionResponse.class); + mockChain(task, actionName, request, actionResponse); + filter.apply(task, actionName, request, listener, chain); + ArgumentCaptor exceptionCaptor = ArgumentCaptor.forClass(Exception.class); + verify(listener).onFailure(exceptionCaptor.capture()); + assertTrue(exceptionCaptor.getValue() instanceof IllegalArgumentException); + assertEquals( + "es-secondary-authorization header must be used to call action [" + actionName + "]", + exceptionCaptor.getValue().getMessage() + ); + verifyNoInteractions(authcService); + verifyNoInteractions(authzService); + } + private void mockAuthentication(ActionRequest request, Authentication authentication, String requestId) { doAnswer(i -> { final Object[] args = i.getArguments(); From 01efbbf137248011f23d9448c66e2585eb27f34c Mon Sep 17 00:00:00 2001 From: Niels Bauman <33722607+nielsbauman@users.noreply.github.com> Date: Thu, 4 Apr 2024 16:51:02 +0200 Subject: [PATCH 032/173] Remove AwaitsFix of fixed test (#107104) This `@AwaitsFixed` linked to https://github.com/elastic/elasticsearch/issues/102813, which was marked as a duplicate of https://github.com/elastic/elasticsearch/issues/102337, which in turn was fixed by https://github.com/elastic/elasticsearch/pull/102724. --- .../datastreams/action/GetDataStreamsResponseTests.java | 1 - 1 file changed, 1 deletion(-) diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java index e200ff7cba2e1..2118c98b377bc 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java @@ -63,7 +63,6 @@ protected Response mutateInstance(Response instance) { } @SuppressWarnings("unchecked") - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102813") public void testResponseIlmAndDataStreamLifecycleRepresentation() throws Exception { // we'll test a data stream with 3 backing indices and a failure store - two backing indices managed by ILM (having the ILM policy // configured for them) and the remainder without any ILM policy configured From 6c986cbd8a6f8b737ba02c27c92bd5eba861de57 Mon Sep 17 00:00:00 2001 From: Volodymyr Krasnikov <129072588+volodk85@users.noreply.github.com> Date: Thu, 4 Apr 2024 08:20:08 -0700 Subject: [PATCH 033/173] Add primary and coordination operation rejection ratio metrics (#106978) Introduce two new metrics `es.indexing.primary_operations.rejections.ratio` and `es.indexing.coordinating_operations.rejections.ratio` They are needed as a second signal (along with rejections rate) to determine if it is time to alert about 429s error during _bulk indexing + add IT test to ensure that indexing metrics are published --- .../metrics/NodeIndexingMetricsIT.java | 264 ++++++++++++++++++ .../monitor/metrics/NodeMetrics.java | 44 +++ 2 files changed, 308 insertions(+) create mode 100644 server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/NodeIndexingMetricsIT.java diff --git a/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/NodeIndexingMetricsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/NodeIndexingMetricsIT.java new file mode 100644 index 0000000000000..6cca0ccb3fdf3 --- /dev/null +++ b/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/NodeIndexingMetricsIT.java @@ -0,0 +1,264 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.monitor.metrics; + +import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.bulk.BulkRequestBuilder; +import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.delete.DeleteRequest; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.PluginsService; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.telemetry.Measurement; +import org.elasticsearch.telemetry.TestTelemetryPlugin; +import org.elasticsearch.test.ESIntegTestCase; + +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.function.Function; + +import static org.elasticsearch.index.IndexingPressure.MAX_INDEXING_BYTES; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; + +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, numClientNodes = 0) +public class NodeIndexingMetricsIT extends ESIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return List.of(TestTelemetryPlugin.class); + } + + public void testNodeIndexingMetricsArePublishing() throws Exception { + + final String dataNode = internalCluster().startNode(); + ensureStableCluster(1); + + final TestTelemetryPlugin plugin = internalCluster().getInstance(PluginsService.class, dataNode) + .filterPlugins(TestTelemetryPlugin.class) + .findFirst() + .orElseThrow(); + plugin.resetMeter(); + + assertAcked(prepareCreate("test").get()); + + // index some documents + final int docsCount = randomIntBetween(500, 1000); + for (int i = 0; i < docsCount; i++) { + var indexResponse = client(dataNode).index(new IndexRequest("test").id("doc_" + i).source(Map.of("key", i, "val", i))) + .actionGet(); + // check that all documents were created successfully since metric counters below assume that + assertThat(indexResponse.status(), equalTo(RestStatus.CREATED)); + } + + // delete documents + final int deletesCount = randomIntBetween(1, 50); + for (int i = 0; i < deletesCount; i++) { + client(dataNode).delete(new DeleteRequest().index("test").id("doc_" + i)).actionGet(); + } + + // simulate async apm `polling` call for metrics + plugin.collect(); + + assertBusy(() -> { + var indexingTotal = getRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.docs.total"); + assertThat(indexingTotal.getLong(), equalTo((long) docsCount)); + + var indexingCurrent = getRecordedMetric(plugin::getLongGaugeMeasurement, "es.indexing.docs.current.total"); + assertThat(indexingCurrent.getLong(), equalTo(0L)); + + var indexingFailedTotal = getRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.indexing.failed.total"); + assertThat(indexingFailedTotal.getLong(), equalTo(0L)); + + var deletionTotal = getRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.deletion.docs.total"); + assertThat(deletionTotal.getLong(), equalTo((long) deletesCount)); + + var deletionCurrent = getRecordedMetric(plugin::getLongGaugeMeasurement, "es.indexing.deletion.docs.current.total"); + assertThat(deletionCurrent.getLong(), equalTo(0L)); + + var indexingTime = getRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.time"); + assertThat(indexingTime.getLong(), greaterThan(0L)); + + var deletionTime = getRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.deletion.time"); + assertThat(deletionTime.getLong(), greaterThanOrEqualTo(0L)); + + var throttleTime = getRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indices.throttle.time"); + assertThat(throttleTime.getLong(), equalTo(0L)); + + var noopTotal = getRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indices.noop.total"); + assertThat(noopTotal.getLong(), equalTo(0L)); + + var coordinatingOperationsSize = getRecordedMetric( + plugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating_operations.size" + ); + assertThat(coordinatingOperationsSize.getLong(), greaterThan(0L)); + + var coordinatingOperationsTotal = getRecordedMetric( + plugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating_operations.total" + ); + // Note: `delete` request goes thru `TransportBulkAction` invoking coordinating/primary limit checks + assertThat(coordinatingOperationsTotal.getLong(), equalTo((long) docsCount + deletesCount)); + + var coordinatingOperationsCurrentSize = getRecordedMetric( + plugin::getLongGaugeMeasurement, + "es.indexing.coordinating_operations.current.size" + ); + assertThat(coordinatingOperationsCurrentSize.getLong(), equalTo(0L)); + + var coordinatingOperationsCurrentTotal = getRecordedMetric( + plugin::getLongGaugeMeasurement, + "es.indexing.coordinating_operations.current.total" + ); + assertThat(coordinatingOperationsCurrentTotal.getLong(), equalTo(0L)); + + var coordinatingOperationsRejectionsTotal = getRecordedMetric( + plugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating_operations.rejections.total" + ); + assertThat(coordinatingOperationsRejectionsTotal.getLong(), equalTo(0L)); + + var coordinatingOperationsRejectionsRatio = getRecordedMetric( + plugin::getDoubleGaugeMeasurement, + "es.indexing.coordinating_operations.rejections.ratio" + ); + assertThat(coordinatingOperationsRejectionsRatio.getDouble(), equalTo(0.0)); + + var primaryOperationsSize = getRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.primary_operations.size"); + assertThat(primaryOperationsSize.getLong(), greaterThan(0L)); + + var primaryOperationsTotal = getRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.primary_operations.total"); + // Note: `delete` request goes thru `TransportBulkAction` invoking coordinating/primary limit checks + assertThat(primaryOperationsTotal.getLong(), equalTo((long) docsCount + deletesCount)); + + var primaryOperationsCurrentSize = getRecordedMetric( + plugin::getLongGaugeMeasurement, + "es.indexing.primary_operations.current.size" + ); + assertThat(primaryOperationsCurrentSize.getLong(), equalTo(0L)); + + var primaryOperationsCurrentTotal = getRecordedMetric( + plugin::getLongGaugeMeasurement, + "es.indexing.primary_operations.current.total" + ); + assertThat(primaryOperationsCurrentTotal.getLong(), equalTo(0L)); + + var primaryOperationsRejectionsTotal = getRecordedMetric( + plugin::getLongAsyncCounterMeasurement, + "es.indexing.primary_operations.rejections.total" + ); + assertThat(primaryOperationsRejectionsTotal.getLong(), equalTo(0L)); + + var primaryOperationsRejectionsRatio = getRecordedMetric( + plugin::getDoubleGaugeMeasurement, + "es.indexing.primary_operations.rejections.ratio" + ); + assertThat(primaryOperationsRejectionsRatio.getDouble(), equalTo(0.0)); + + }); + + } + + public void testCoordinatingRejectionMetricsArePublishing() throws Exception { + + // lower Indexing Pressure limits to trigger coordinating rejections + final String dataNode = internalCluster().startNode(Settings.builder().put(MAX_INDEXING_BYTES.getKey(), "1KB")); + ensureStableCluster(1); + + final TestTelemetryPlugin plugin = internalCluster().getInstance(PluginsService.class, dataNode) + .filterPlugins(TestTelemetryPlugin.class) + .findFirst() + .orElseThrow(); + plugin.resetMeter(); + + assertAcked(prepareCreate("test").get()); + + final BulkRequestBuilder bulkRequestBuilder = new BulkRequestBuilder(client(dataNode)); + final int batchCount = randomIntBetween(100, 1000); + for (int i = 0; i < batchCount; i++) { + bulkRequestBuilder.add(new IndexRequest("test").source("field", randomAlphaOfLength(100))); + } + + // big batch should not pass thru coordinating limit check + expectThrows(EsRejectedExecutionException.class, bulkRequestBuilder); + + // simulate async apm `polling` call for metrics + plugin.collect(); + + // this bulk request is too big to pass coordinating limit check + assertBusy(() -> { + var coordinatingOperationsRejectionsTotal = getRecordedMetric( + plugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating_operations.rejections.total" + ); + assertThat(coordinatingOperationsRejectionsTotal.getLong(), equalTo(1L)); + }); + } + + public void testPrimaryRejectionMetricsArePublishing() throws Exception { + + // setting low Indexing Pressure limits to trigger primary rejections + final String dataNode = internalCluster().startNode(Settings.builder().put(MAX_INDEXING_BYTES.getKey(), "1KB").build()); + // setting high Indexing Pressure limits to pass coordinating checks + final String coordinatingNode = internalCluster().startCoordinatingOnlyNode( + Settings.builder().put(MAX_INDEXING_BYTES.getKey(), "10MB").build() + ); + ensureStableCluster(2); + + final TestTelemetryPlugin plugin = internalCluster().getInstance(PluginsService.class, dataNode) + .filterPlugins(TestTelemetryPlugin.class) + .findFirst() + .orElseThrow(); + plugin.resetMeter(); + + final int numberOfShards = randomIntBetween(1, 5); + assertAcked(prepareCreate("test", Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numberOfShards)).get()); + + final BulkRequest bulkRequest = new BulkRequest(); + final int batchCount = randomIntBetween(50, 100); + for (int i = 0; i < batchCount; i++) { + bulkRequest.add(new IndexRequest("test").source("field", randomAlphaOfLength(2048))); + } + + // big batch should pass thru coordinating limit check but fail on primary + // note the bulk request is sent to coordinating client + final BulkResponse bulkResponse = client(coordinatingNode).bulk(bulkRequest).actionGet(); + assertThat(bulkResponse.hasFailures(), equalTo(true)); + assertThat(Arrays.stream(bulkResponse.getItems()).allMatch(item -> item.status() == RestStatus.TOO_MANY_REQUESTS), equalTo(true)); + + // simulate async apm `polling` call for metrics + plugin.collect(); + + // this bulk request is too big to pass coordinating limit check + assertBusy(() -> { + var primaryOperationsRejectionsTotal = getRecordedMetric( + plugin::getLongAsyncCounterMeasurement, + "es.indexing.primary_operations.rejections.total" + ); + assertThat(primaryOperationsRejectionsTotal.getLong(), equalTo((long) numberOfShards)); + }); + + } + + private static Measurement getRecordedMetric(Function> metricGetter, String name) { + final List measurements = metricGetter.apply(name); + assertFalse("Indexing metric is not recorded", measurements.isEmpty()); + assertThat(measurements.size(), equalTo(1)); + return measurements.get(0); + } +} diff --git a/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java b/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java index 8874c43c919ca..527acb8d4fcbc 100644 --- a/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java +++ b/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java @@ -15,9 +15,11 @@ import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.util.SingleObjectCache; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.stats.IndexingPressureStats; import org.elasticsearch.monitor.jvm.GcNames; import org.elasticsearch.monitor.jvm.JvmStats; import org.elasticsearch.node.NodeService; +import org.elasticsearch.telemetry.metric.DoubleWithAttributes; import org.elasticsearch.telemetry.metric.LongWithAttributes; import org.elasticsearch.telemetry.metric.MeterRegistry; @@ -526,6 +528,27 @@ private void registerAsyncMetrics(MeterRegistry registry) { ) ); + metrics.add( + registry.registerDoubleGauge( + "es.indexing.coordinating_operations.rejections.ratio", + "Ratio of rejected coordinating operations", + "ratio", + () -> { + var totalCoordinatingOperations = Optional.ofNullable(stats.getOrRefresh()) + .map(NodeStats::getIndexingPressureStats) + .map(IndexingPressureStats::getTotalCoordinatingOps) + .orElse(0L); + var totalCoordinatingRejections = Optional.ofNullable(stats.getOrRefresh()) + .map(NodeStats::getIndexingPressureStats) + .map(IndexingPressureStats::getCoordinatingRejections) + .orElse(0L); + // rejections do not count towards `totalCoordinatingOperations` + var totalOps = totalCoordinatingOperations + totalCoordinatingRejections; + return new DoubleWithAttributes(totalOps != 0 ? (double) totalCoordinatingRejections / totalOps : 0.0); + } + ) + ); + metrics.add( registry.registerLongAsyncCounter( "es.indexing.primary_operations.size", @@ -596,6 +619,27 @@ private void registerAsyncMetrics(MeterRegistry registry) { ) ); + metrics.add( + registry.registerDoubleGauge( + "es.indexing.primary_operations.rejections.ratio", + "Ratio of rejected primary operations", + "ratio", + () -> { + var totalPrimaryOperations = Optional.ofNullable(stats.getOrRefresh()) + .map(NodeStats::getIndexingPressureStats) + .map(IndexingPressureStats::getTotalPrimaryOps) + .orElse(0L); + var totalPrimaryRejections = Optional.ofNullable(stats.getOrRefresh()) + .map(NodeStats::getIndexingPressureStats) + .map(IndexingPressureStats::getPrimaryRejections) + .orElse(0L); + // rejections do not count towards `totalPrimaryOperations` + var totalOps = totalPrimaryOperations + totalPrimaryRejections; + return new DoubleWithAttributes(totalOps != 0 ? (double) totalPrimaryRejections / totalOps : 0.0); + } + ) + ); + metrics.add( registry.registerLongGauge( "es.indexing.memory.limit.size", From 75f548765f3583f41aa3baeffbafdf861aa5a11e Mon Sep 17 00:00:00 2001 From: Fang Xing <155562079+fang-xing-esql@users.noreply.github.com> Date: Thu, 4 Apr 2024 11:34:35 -0400 Subject: [PATCH 034/173] fix substring type resolution (#107109) --- .../function/scalar/string/Substring.java | 10 +++++-- .../AbstractScalarFunctionTestCase.java | 2 +- .../scalar/string/SubstringTests.java | 29 +++++++++++++++++-- 3 files changed, 35 insertions(+), 6 deletions(-) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java index 88187b8ba65bc..3bd7d660352c3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java @@ -15,6 +15,7 @@ import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.TypeResolutions; import org.elasticsearch.xpack.ql.expression.function.OptionalArgument; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -28,8 +29,8 @@ import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.SECOND; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.THIRD; -import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isInteger; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isString; +import static org.elasticsearch.xpack.ql.type.DataTypes.INTEGER; public class Substring extends EsqlScalarFunction implements OptionalArgument { @@ -67,12 +68,15 @@ protected TypeResolution resolveType() { return resolution; } - resolution = isInteger(start, sourceText(), SECOND); + resolution = TypeResolutions.isType(start, dt -> dt == INTEGER, sourceText(), SECOND, "integer"); + if (resolution.unresolved()) { return resolution; } - return length == null ? TypeResolution.TYPE_RESOLVED : isInteger(length, sourceText(), THIRD); + return length == null + ? TypeResolution.TYPE_RESOLVED + : TypeResolutions.isType(length, dt -> dt == INTEGER, sourceText(), THIRD, "integer"); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/AbstractScalarFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/AbstractScalarFunctionTestCase.java index 2b75010ef66a1..a0f63a46649e2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/AbstractScalarFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/AbstractScalarFunctionTestCase.java @@ -165,7 +165,7 @@ private String expectedTypeName(Set validTypes) { if (withoutNull.equals(Arrays.asList(strings()))) { return "string"; } - if (withoutNull.equals(Arrays.asList(integers()))) { + if (withoutNull.equals(Arrays.asList(integers())) || withoutNull.equals(List.of(DataTypes.INTEGER))) { return "integer"; } if (withoutNull.equals(Arrays.asList(rationals()))) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java index 8dbc9eaeeccd6..4736ba2cc74d7 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java @@ -64,7 +64,32 @@ public static Iterable parameters() { DataTypes.KEYWORD, equalTo(new BytesRef(text.substring(start - 1, start + length - 1))) ); - }))); + }), + new TestCaseSupplier( + "Substring basic test with start long", + List.of(DataTypes.KEYWORD, DataTypes.LONG, DataTypes.INTEGER), + () -> TestCaseSupplier.TestCase.typeError( + List.of( + new TestCaseSupplier.TypedData(new BytesRef("text"), DataTypes.KEYWORD, "str"), + new TestCaseSupplier.TypedData(1L, DataTypes.LONG, "start"), + new TestCaseSupplier.TypedData(2, DataTypes.INTEGER, "length") + ), + "second argument of [] must be [integer], found value [start] type [long]" + ) + ), + new TestCaseSupplier( + "Substring basic test with length double", + List.of(DataTypes.KEYWORD, DataTypes.INTEGER, DataTypes.DOUBLE), + () -> TestCaseSupplier.TestCase.typeError( + List.of( + new TestCaseSupplier.TypedData(new BytesRef("text"), DataTypes.KEYWORD, "str"), + new TestCaseSupplier.TypedData(1L, DataTypes.INTEGER, "start"), + new TestCaseSupplier.TypedData(2.0, DataTypes.DOUBLE, "length") + ), + "third argument of [] must be [integer], found value [length] type [double]" + ) + ) + )); } @Override @@ -90,7 +115,7 @@ public void testNoLengthToString() { @Override protected List argSpec() { - return List.of(required(strings()), required(integers()), optional(integers())); + return List.of(required(strings()), required(DataTypes.INTEGER), optional(DataTypes.INTEGER)); } @Override From 0cc19f32704cfd168d3d3fa3cb2200c27e3b4c6b Mon Sep 17 00:00:00 2001 From: James Baiera Date: Thu, 4 Apr 2024 12:18:29 -0400 Subject: [PATCH 035/173] Extract failure store specific settings to the failure store (#107063) Moves the failure store specific settings logic to the same place that we manage their mappings. --- .../DataStreamFailureStoreDefinition.java | 42 +++++++++++++++++++ .../MetadataCreateDataStreamService.java | 18 ++------ 2 files changed, 46 insertions(+), 14 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamFailureStoreDefinition.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamFailureStoreDefinition.java index f1fc107df5f62..43c4eae41c948 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamFailureStoreDefinition.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamFailureStoreDefinition.java @@ -9,6 +9,9 @@ package org.elasticsearch.cluster.metadata; import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.RoutingFieldMapper; @@ -20,6 +23,7 @@ */ public class DataStreamFailureStoreDefinition { + public static final String FAILURE_STORE_REFRESH_INTERVAL_SETTING_NAME = "data_streams.failure_store.refresh_interval"; public static final CompressedXContent DATA_STREAM_FAILURE_STORE_MAPPING; static { @@ -131,4 +135,42 @@ public class DataStreamFailureStoreDefinition { throw new AssertionError(e); } } + + public static TimeValue getFailureStoreRefreshInterval(Settings settings) { + return settings.getAsTime(FAILURE_STORE_REFRESH_INTERVAL_SETTING_NAME, null); + } + + /** + * Like {@link DataStreamFailureStoreDefinition#applyFailureStoreSettings} but optionally applied on an existing {@link Settings} + * @param existingSettings initial settings to update + * @param nodeSettings settings from the cluster service which capture the node's current settings + * @return either the existing settings if no changes are needed, or a new settings instance which includes failure store specific + * settings + */ + public static Settings buildFailureStoreIndexSettings(Settings existingSettings, Settings nodeSettings) { + // Optionally set a custom refresh interval for the failure store index. + TimeValue refreshInterval = getFailureStoreRefreshInterval(nodeSettings); + if (refreshInterval != null) { + return Settings.builder() + .put(existingSettings) + .put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), refreshInterval) + .build(); + } + return existingSettings; + } + + /** + * Like {@link DataStreamFailureStoreDefinition#buildFailureStoreIndexSettings} but for usage with a {@link Settings.Builder} + * @param nodeSettings settings from the cluster service which capture the node's current settings + * @param builder to capture failure store specific index settings + * @return the original settings builder, with any failure store specific settings applied + */ + public static Settings.Builder applyFailureStoreSettings(Settings nodeSettings, Settings.Builder builder) { + // Optionally set a custom refresh interval for the failure store index. + TimeValue refreshInterval = getFailureStoreRefreshInterval(nodeSettings); + if (refreshInterval != null) { + builder.put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), refreshInterval); + } + return builder; + } } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java index 6c933ba1480df..6d0b424cad8f2 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java @@ -31,7 +31,6 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexMode; -import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.DataStreamTimestampFieldMapper; import org.elasticsearch.index.mapper.MappingLookup; import org.elasticsearch.index.mapper.MetadataFieldMapper; @@ -422,15 +421,10 @@ public static ClusterState createFailureStoreIndex( return currentState; } - var indexSettings = MetadataRolloverService.HIDDEN_INDEX_SETTINGS; - // Optionally set a custom refresh interval for the failure store index. - var refreshInterval = getFailureStoreRefreshInterval(settings); - if (refreshInterval != null) { - indexSettings = Settings.builder() - .put(indexSettings) - .put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), refreshInterval) - .build(); - } + var indexSettings = DataStreamFailureStoreDefinition.buildFailureStoreIndexSettings( + MetadataRolloverService.HIDDEN_INDEX_SETTINGS, + settings + ); CreateIndexClusterStateUpdateRequest createIndexRequest = new CreateIndexClusterStateUpdateRequest( cause, @@ -489,8 +483,4 @@ public static void validateTimestampFieldMapping(MappingLookup mappingLookup) th // Sanity check (this validation logic should already have been executed when merging mappings): fieldMapper.validate(mappingLookup); } - - public static TimeValue getFailureStoreRefreshInterval(Settings settings) { - return settings.getAsTime(FAILURE_STORE_REFRESH_INTERVAL_SETTING_NAME, null); - } } From 7483844edc23675e22cdd37ddd0beaa715c5d962 Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Thu, 4 Apr 2024 20:12:41 +0300 Subject: [PATCH 036/173] ESQL: Allow grouping key inside stats expressions (#106579) Similar to aggs, allow grouping keys to used inside STATS expressions by introducing a synthetic eval, e.g.: STATS a = x + count(*) BY x becomes STATS c = count(*) BY x | EVAL a = x + c | KEEP a, x To better handle overriding aliases, introduce EsqlAggregate which keeps the declared structure intact during analysis and verification while merging the output. The deduplication happens now in the optimization phase. Fix small bug that caused replacement of expressions inside aggregations to be skipped despite being applied Improved Verifier to not repeat error messages in case for Aggregates Removed verification heuristics for missing columns as functions as it was too broad --- docs/changelog/106579.yaml | 5 + .../src/main/resources/stats.csv-spec | 141 +++++++++++++++- .../xpack/esql/analysis/Analyzer.java | 142 +++++++++------- .../xpack/esql/analysis/Verifier.java | 92 +++++++---- .../esql/optimizer/LogicalPlanOptimizer.java | 94 ++++++++--- .../xpack/esql/parser/LogicalPlanBuilder.java | 31 +++- .../esql/plan/logical/EsqlAggregate.java | 60 +++++++ .../xpack/esql/analysis/AnalyzerTests.java | 63 ++++++- .../xpack/esql/analysis/VerifierTests.java | 82 ++++++--- .../optimizer/LogicalPlanOptimizerTests.java | 155 ++++++++++++++++++ .../esql/parser/StatementParserTests.java | 16 +- 11 files changed, 728 insertions(+), 153 deletions(-) create mode 100644 docs/changelog/106579.yaml create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsqlAggregate.java diff --git a/docs/changelog/106579.yaml b/docs/changelog/106579.yaml new file mode 100644 index 0000000000000..104ed3066a6f6 --- /dev/null +++ b/docs/changelog/106579.yaml @@ -0,0 +1,5 @@ +pr: 106579 +summary: "ESQL: Allow grouping key inside stats expressions" +area: ES|QL +type: enhancement +issues: [] diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index 0a18568cf3c84..70d5053c64c45 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -1044,7 +1044,7 @@ FROM employees ; // tag::docsStatsByExpression-result[] -my_count:long |LEFT(last_name, 1):keyword +my_count:long |LEFT(last_name, 1):keyword 2 |A 11 |B 5 |C @@ -1188,6 +1188,145 @@ e:i | l:i 4 | 3 ; +nestedAggsOverGroupingExpressionWithoutAlias#[skip:-8.13.99,reason:supported in 8.14] +FROM employees +| STATS e = max(languages + emp_no) + 1 by languages + emp_no +| SORT e +| LIMIT 3 +; + +e:i | languages + emp_no:i +10004 | 10003 +10007 | 10006 +10008 | 10007 +; + +nestedAggsOverGroupingExpressionMultiGroupWithoutAlias#[skip:-8.13.99,reason:supported in 8.14] +FROM employees +| STATS e = max(languages + emp_no + 10) + 1 by languages + emp_no, f = emp_no % 3 +| SORT e, f +| LIMIT 3 +; + +e:i | languages + emp_no:i | f:i +10014 | 10003 | 2 +10017 | 10006 | 0 +10018 | 10007 | 0 +; + +nestedAggsOverGroupingExpressionWithAlias#[skip:-8.13.99,reason:supported in 8.14] +FROM employees +| STATS e = max(languages + emp_no + 10) + 1 by languages + emp_no +| SORT e +| LIMIT 3 +; + +e:i | languages + emp_no:i +10014 | 10003 +10017 | 10006 +10018 | 10007 +; + +nestedAggsOverGroupingExpressionWithAlias#[skip:-8.13.99,reason:supported in 8.14] +FROM employees +| STATS e = max(a), f = min(a), g = count(a) + 1 by a = languages + emp_no +| SORT a +| LIMIT 3 +; + +e: i | f:i | g:l | a:i +10003 | 10003 | 2 | 10003 +10006 | 10006 | 2 | 10006 +10007 | 10007 | 3 | 10007 +; + +nestedAggsOverGroupingTwiceWithAlias#[skip:-8.13.99,reason:supported in 8.14] +FROM employees +| STATS vals = COUNT() BY x = emp_no, x = languages +| SORT x +| LIMIT 3 +; + +vals: l| x:i +15 | 1 +19 | 2 +17 | 3 +; + +nestedAggsOverGroupingWithAlias#[skip:-8.13.99,reason:supported in 8.14] +FROM employees +| STATS e = length(f) + 1, count(*) by f = first_name +| SORT f +| LIMIT 3 +; + +e:i | count(*):l | f:s +10 | 1 | Alejandro +8 | 1 | Amabile +7 | 1 | Anneke +; + +nestedAggsOverGroupingWithAliasInsideExpression#[skip:-8.13.99,reason:supported in 8.14] +FROM employees +| STATS m = max(l), o = min(s) by l = languages, s = salary + 1 +| SORT l, s +| LIMIT 5 +; + +m:i | o:i | l:i | s:i +1 | 25977 | 1 | 25977 +1 | 28036 | 1 | 28036 +1 | 34342 | 1 | 34342 +1 | 39111 | 1 | 39111 +1 | 39729 | 1 | 39729 +; + +nestedAggsOverGroupingWithAliasAndProjection#[skip:-8.13.99,reason:supported in 8.14] +FROM employees +| STATS e = length(f) + 1, c = count(*) by f = first_name +| KEEP e +| SORT e +| LIMIT 5 +; + +e:i +4 +4 +4 +4 +5 +; + +nestedAggsOverGroupingAndAggWithAliasAndProjection#[skip:-8.13.99,reason:supported in 8.14] +FROM employees +| STATS e = length(f) + count(*), m = max(emp_no) by f = first_name +| KEEP e +| SORT e +| LIMIT 5 +; + +e:l +4 +4 +4 +4 +5 +; + +nestedAggsOverGroupingExpAndAggWithAliasAndProjection#[skip:-8.13.99,reason:supported in 8.14] +FROM employees +| STATS e = f + count(*), m = max(emp_no) by f = length(first_name) % 2 +| KEEP e +| SORT e +| LIMIT 3 +; + +e:l +44 +47 +null +; + defaultNameWithSpace ROW a = 1 | STATS couNt(*) | SORT `couNt(*)` ; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java index 5c787415a8419..005dd8081a9e8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java @@ -8,14 +8,15 @@ package org.elasticsearch.xpack.esql.analysis; import org.elasticsearch.common.logging.HeaderWarning; -import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.VerificationException; +import org.elasticsearch.xpack.esql.expression.NamedExpressions; import org.elasticsearch.xpack.esql.expression.UnresolvedNamePattern; import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute; import org.elasticsearch.xpack.esql.plan.logical.Drop; import org.elasticsearch.xpack.esql.plan.logical.Enrich; +import org.elasticsearch.xpack.esql.plan.logical.EsqlAggregate; import org.elasticsearch.xpack.esql.plan.logical.EsqlUnresolvedRelation; import org.elasticsearch.xpack.esql.plan.logical.Eval; import org.elasticsearch.xpack.esql.plan.logical.Keep; @@ -27,9 +28,11 @@ import org.elasticsearch.xpack.ql.analyzer.AnalyzerRules; import org.elasticsearch.xpack.ql.analyzer.AnalyzerRules.BaseAnalyzerRule; import org.elasticsearch.xpack.ql.analyzer.AnalyzerRules.ParameterizedAnalyzerRule; +import org.elasticsearch.xpack.ql.capabilities.Resolvables; import org.elasticsearch.xpack.ql.common.Failure; import org.elasticsearch.xpack.ql.expression.Alias; import org.elasticsearch.xpack.ql.expression.Attribute; +import org.elasticsearch.xpack.ql.expression.AttributeMap; import org.elasticsearch.xpack.ql.expression.EmptyAttribute; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.Expressions; @@ -40,6 +43,8 @@ import org.elasticsearch.xpack.ql.expression.ReferenceAttribute; import org.elasticsearch.xpack.ql.expression.UnresolvedAttribute; import org.elasticsearch.xpack.ql.expression.UnresolvedStar; +import org.elasticsearch.xpack.ql.expression.function.FunctionDefinition; +import org.elasticsearch.xpack.ql.expression.function.FunctionRegistry; import org.elasticsearch.xpack.ql.expression.function.UnresolvedFunction; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.ql.index.EsIndex; @@ -53,6 +58,7 @@ import org.elasticsearch.xpack.ql.rule.ParameterizedRuleExecutor; import org.elasticsearch.xpack.ql.rule.Rule; import org.elasticsearch.xpack.ql.rule.RuleExecutor; +import org.elasticsearch.xpack.ql.session.Configuration; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; @@ -60,6 +66,7 @@ import org.elasticsearch.xpack.ql.type.InvalidMappedField; import org.elasticsearch.xpack.ql.type.UnsupportedEsField; import org.elasticsearch.xpack.ql.util.CollectionUtils; +import org.elasticsearch.xpack.ql.util.Holder; import org.elasticsearch.xpack.ql.util.StringUtils; import java.util.ArrayList; @@ -70,7 +77,6 @@ import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; -import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -85,7 +91,6 @@ import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.dateTimeToLong; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_POINT; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_SHAPE; -import static org.elasticsearch.xpack.ql.analyzer.AnalyzerRules.resolveFunction; import static org.elasticsearch.xpack.ql.type.DataTypes.DATETIME; import static org.elasticsearch.xpack.ql.type.DataTypes.DOUBLE; import static org.elasticsearch.xpack.ql.type.DataTypes.FLOAT; @@ -105,14 +110,7 @@ public class Analyzer extends ParameterizedRuleExecutor> rules; static { - var resolution = new Batch<>( - "Resolution", - new ResolveTable(), - new ResolveEnrich(), - new ResolveRefs(), - new ResolveFunctions(), - new RemoveDuplicateProjections() - ); + var resolution = new Batch<>("Resolution", new ResolveTable(), new ResolveEnrich(), new ResolveFunctions(), new ResolveRefs()); var finish = new Batch<>("Finish Analysis", Limiter.ONCE, new AddImplicitLimit(), new PromoteStringsInDateComparisons()); rules = List.of(resolution, finish); } @@ -313,6 +311,10 @@ protected LogicalPlan doRule(LogicalPlan plan) { childrenOutput.addAll(output); } + if (plan instanceof Aggregate agg) { + return resolveAggregate(agg, childrenOutput); + } + if (plan instanceof Drop d) { return resolveDrop(d, childrenOutput); } @@ -337,7 +339,60 @@ protected LogicalPlan doRule(LogicalPlan plan) { return resolveMvExpand(p, childrenOutput); } - return plan.transformExpressionsUp(UnresolvedAttribute.class, ua -> maybeResolveAttribute(ua, childrenOutput)); + return plan.transformExpressionsOnly(UnresolvedAttribute.class, ua -> maybeResolveAttribute(ua, childrenOutput)); + } + + private LogicalPlan resolveAggregate(Aggregate a, List childrenOutput) { + // if the grouping is resolved but the aggs are not, use the former to resolve the latter + // e.g. STATS a ... GROUP BY a = x + 1 + Holder changed = new Holder<>(false); + List groupings = a.groupings(); + // first resolve groupings since the aggs might refer to them + // trying to globally resolve unresolved attributes will lead to some being marked as unresolvable + if (Resolvables.resolved(groupings) == false) { + List newGroupings = new ArrayList<>(groupings.size()); + for (Expression g : groupings) { + Expression resolved = g.transformUp(UnresolvedAttribute.class, ua -> maybeResolveAttribute(ua, childrenOutput)); + if (resolved != g) { + changed.set(true); + } + newGroupings.add(resolved); + } + groupings = newGroupings; + if (changed.get()) { + a = new EsqlAggregate(a.source(), a.child(), newGroupings, a.aggregates()); + changed.set(false); + } + } + + if (a.expressionsResolved() == false && Resolvables.resolved(groupings)) { + AttributeMap resolved = new AttributeMap<>(); + for (Expression e : groupings) { + Attribute attr = Expressions.attribute(e); + if (attr != null) { + resolved.put(attr, attr); + } + } + List resolvedList = NamedExpressions.mergeOutputAttributes(new ArrayList<>(resolved.keySet()), childrenOutput); + List newAggregates = new ArrayList<>(); + + for (NamedExpression aggregate : a.aggregates()) { + var agg = (NamedExpression) aggregate.transformUp(UnresolvedAttribute.class, ua -> { + Expression ne = ua; + Attribute maybeResolved = maybeResolveAttribute(ua, resolvedList); + if (maybeResolved != null) { + changed.set(true); + ne = maybeResolved; + } + return ne; + }); + newAggregates.add(agg); + } + + a = changed.get() ? new EsqlAggregate(a.source(), a.child(), groupings, newAggregates) : a; + } + + return a; } private LogicalPlan resolveMvExpand(MvExpand p, List childrenOutput) { @@ -664,59 +719,30 @@ private static class ResolveFunctions extends ParameterizedAnalyzerRule resolveFunction(uf, context.configuration(), context.functionRegistry()) ); } - } - - /** - * Rule that removes duplicate projects - this is done as a separate rule to allow - * full validation of the node before looking at the duplication. - * The duplication needs to be addressed to avoid ambiguity errors from commands further down - * the line. - */ - private static class RemoveDuplicateProjections extends BaseAnalyzerRule { - - @Override - protected boolean skipResolved() { - return false; - } - - @Override - protected LogicalPlan doRule(LogicalPlan plan) { - if (plan.resolved()) { - if (plan instanceof Aggregate agg) { - plan = removeAggDuplicates(agg); - } - } - return plan; - } - private static LogicalPlan removeAggDuplicates(Aggregate agg) { - var groupings = agg.groupings(); - var newGroupings = new LinkedHashSet<>(groupings); - // reuse existing objects - groupings = newGroupings.size() == groupings.size() ? groupings : new ArrayList<>(newGroupings); - - var aggregates = agg.aggregates(); - var newAggregates = new ArrayList<>(aggregates); - var nameSet = Sets.newHashSetWithExpectedSize(newAggregates.size()); - // remove duplicates in reverse to preserve the last one appearing - for (int i = newAggregates.size() - 1; i >= 0; i--) { - var aggregate = newAggregates.get(i); - if (nameSet.add(aggregate.name()) == false) { - newAggregates.remove(i); + public static org.elasticsearch.xpack.ql.expression.function.Function resolveFunction( + UnresolvedFunction uf, + Configuration configuration, + FunctionRegistry functionRegistry + ) { + org.elasticsearch.xpack.ql.expression.function.Function f = null; + if (uf.analyzed()) { + f = uf; + } else { + String functionName = functionRegistry.resolveAlias(uf.name()); + if (functionRegistry.functionExists(functionName) == false) { + f = uf.missing(functionName, functionRegistry.listFunctions()); + } else { + FunctionDefinition def = functionRegistry.resolveFunction(functionName); + f = uf.buildResolved(configuration, def); } } - // reuse existing objects - aggregates = newAggregates.size() == aggregates.size() ? aggregates : newAggregates; - // replace aggregate if needed - agg = (groupings == agg.groupings() && newAggregates == agg.aggregates()) - ? agg - : new Aggregate(agg.source(), agg.child(), groupings, aggregates); - return agg; + return f; } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java index 6492743c8548b..de6c3208df2ec 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java @@ -23,10 +23,11 @@ import org.elasticsearch.xpack.ql.expression.Alias; import org.elasticsearch.xpack.ql.expression.Attribute; import org.elasticsearch.xpack.ql.expression.AttributeMap; +import org.elasticsearch.xpack.ql.expression.AttributeSet; import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.Expressions; import org.elasticsearch.xpack.ql.expression.NamedExpression; import org.elasticsearch.xpack.ql.expression.TypeResolutions; -import org.elasticsearch.xpack.ql.expression.UnresolvedAttribute; import org.elasticsearch.xpack.ql.expression.function.aggregate.AggregateFunction; import org.elasticsearch.xpack.ql.expression.predicate.BinaryOperator; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; @@ -45,6 +46,7 @@ import java.util.LinkedHashSet; import java.util.List; import java.util.Set; +import java.util.function.Consumer; import java.util.stream.Stream; import static org.elasticsearch.xpack.ql.analyzer.VerifierChecks.checkFilterConditionType; @@ -87,16 +89,8 @@ else if (p.resolved()) { p.forEachExpressionUp(Alias.class, a -> aliases.put(a.toAttribute(), a.child())); return; } - // handle aggregate first to disambiguate between missing fields or incorrect function declaration - if (p instanceof Aggregate aggregate) { - for (NamedExpression agg : aggregate.aggregates()) { - var child = Alias.unwrap(agg); - if (child instanceof UnresolvedAttribute) { - failures.add(fail(child, "invalid stats declaration; [{}] is not an aggregate function", child.sourceText())); - } - } - } - p.forEachExpression(e -> { + + Consumer unresolvedExpressions = e -> { // everything is fine, skip expression if (e.resolved()) { return; @@ -118,7 +112,20 @@ else if (p.resolved()) { failures.add(fail(ae, ae.typeResolved().message())); } }); - }); + }; + + // aggregates duplicate grouping inside aggs - to avoid potentially confusing messages, we only check the aggregates + if (p instanceof Aggregate agg) { + // do groupings first + var groupings = agg.groupings(); + groupings.forEach(unresolvedExpressions); + // followed by just the aggregates (to avoid going through the groups again) + var aggs = agg.aggregates(); + int size = aggs.size() - groupings.size(); + aggs.subList(0, size).forEach(unresolvedExpressions); + } else { + p.forEachExpression(unresolvedExpressions); + } }); // in case of failures bail-out as all other checks will be redundant @@ -155,35 +162,47 @@ else if (p.resolved()) { private static void checkAggregate(LogicalPlan p, Set failures, AttributeMap aliases) { if (p instanceof Aggregate agg) { - - List nakedGroups = new ArrayList<>(agg.groupings().size()); + List groupings = agg.groupings(); + AttributeSet groupRefs = new AttributeSet(); // check grouping // The grouping can not be an aggregate function - agg.groupings().forEach(e -> { + groupings.forEach(e -> { e.forEachUp(g -> { if (g instanceof AggregateFunction af) { failures.add(fail(g, "cannot use an aggregate [{}] for grouping", af)); } }); - nakedGroups.add(Alias.unwrap(e)); + // keep the grouping attributes (common case) + Attribute attr = Expressions.attribute(e); + if (attr != null) { + groupRefs.add(attr); + } }); - // check aggregates - accept only aggregate functions or expressions in which each naked attribute is copied as - // specified in the grouping clause - agg.aggregates().forEach(e -> { + // check aggregates - accept only aggregate functions or expressions over grouping + // don't allow the group by itself to avoid duplicates in the output + // and since the groups are copied, only look at the declared aggregates + List aggs = agg.aggregates(); + aggs.subList(0, aggs.size() - groupings.size()).forEach(e -> { var exp = Alias.unwrap(e); if (exp.foldable()) { failures.add(fail(exp, "expected an aggregate function but found [{}]", exp.sourceText())); } // traverse the tree to find invalid matches - checkInvalidNamedExpressionUsage(exp, nakedGroups, failures, 0); + checkInvalidNamedExpressionUsage(exp, groupings, groupRefs, failures, 0); }); } } // traverse the expression and look either for an agg function or a grouping match // stop either when no children are left, the leaves are literals or a reference attribute is given - private static void checkInvalidNamedExpressionUsage(Expression e, List groups, Set failures, int level) { + private static void checkInvalidNamedExpressionUsage( + Expression e, + List groups, + AttributeSet groupRefs, + Set failures, + int level + ) { // found an aggregate, constant or a group, bail out if (e instanceof AggregateFunction af) { af.field().forEachDown(AggregateFunction.class, f -> { @@ -191,21 +210,38 @@ private static void checkInvalidNamedExpressionUsage(Expression e, List se.semanticEquals(ne))) { + foundInGrouping = true; + failures.add( + fail( + e, + "column [{}] cannot be used as an aggregate once declared in the STATS BY grouping key [{}]", + ne.name(), + g.sourceText() + ) + ); + break; + } + } + if (foundInGrouping == false) { + failures.add(fail(e, "column [{}] must appear in the STATS BY clause or be used in an aggregate function", ne.name())); + } } // other keep on going else { for (Expression child : e.children()) { - checkInvalidNamedExpressionUsage(child, groups, failures, level + 1); + checkInvalidNamedExpressionUsage(child, groups, groupRefs, failures, level + 1); } } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java index fe2a3076380df..d0375e0b50849 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java @@ -9,6 +9,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.util.Maps; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockUtils; @@ -30,6 +31,7 @@ import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.ql.analyzer.AnalyzerRules; import org.elasticsearch.xpack.ql.common.Failures; import org.elasticsearch.xpack.ql.expression.Alias; import org.elasticsearch.xpack.ql.expression.Attribute; @@ -113,10 +115,11 @@ protected static Batch substitutions() { return new Batch<>( "Substitutions", Limiter.ONCE, - // first extract nested aggs top-level - this simplifies the rest of the rules - new ReplaceStatsAggExpressionWithEval(), - // second extract nested aggs inside of them + new RemoveStatsOverride(), + // first extract nested expressions inside aggs new ReplaceStatsNestedExpressionWithEval(), + // then extract nested aggs top-level + new ReplaceStatsAggExpressionWithEval(), // lastly replace surrogate functions new SubstituteSurrogates(), new ReplaceRegexMatch(), @@ -1289,9 +1292,9 @@ protected LogicalPlan rule(Aggregate aggregate) { Attribute attr = expToAttribute.computeIfAbsent(field.canonical(), k -> { Alias newAlias = new Alias(k.source(), syntheticName(k, af, counter[0]++), null, k, null, true); evals.add(newAlias); - aggsChanged.set(true); return newAlias.toAttribute(); }); + aggsChanged.set(true); // replace field with attribute List newChildren = new ArrayList<>(af.children()); newChildren.set(0, attr); @@ -1327,7 +1330,12 @@ static String syntheticName(Expression expression, AggregateFunction af, int cou * stats a = sum(a) + min(b) by x * becomes * stats a1 = sum(a), a2 = min(b) by x | eval a = a1 + a2 | keep a, x - * + * The rule also considers expressions applied over groups: + * stats a = x + 1 by x becomes stats by x | eval a = x + 1 | keep a, x + * And to combine the two: + * stats a = x + count(*) by x + * becomes + * stats a1 = count(*) by x | eval a = x + a1 | keep a1, x * Since the logic is very similar, this rule also handles duplicate aggregate functions to avoid duplicate compute * stats a = min(x), b = min(x), c = count(*), d = count() by g * becomes @@ -1344,7 +1352,7 @@ protected LogicalPlan rule(Aggregate aggregate) { AttributeMap aliases = new AttributeMap<>(); aggregate.forEachExpressionUp(Alias.class, a -> aliases.put(a.toAttribute(), a.child())); - // break down each aggregate into AggregateFunction + // break down each aggregate into AggregateFunction and/or grouping key // preserve the projection at the end List aggs = aggregate.aggregates(); @@ -1386,14 +1394,11 @@ protected LogicalPlan rule(Aggregate aggregate) { newProjections.add(as.replaceChild(found.toAttribute())); } } - // nested expression over aggregate function - replace them with reference and move the expression into a - // follow-up eval + // nested expression over aggregate function or groups + // replace them with reference and move the expression into a follow-up eval else { - Holder transformed = new Holder<>(false); + changed.set(true); Expression aggExpression = child.transformUp(AggregateFunction.class, af -> { - transformed.set(true); - changed.set(true); - AggregateFunction canonical = (AggregateFunction) af.canonical(); Alias alias = rootAggs.get(canonical); if (alias == null) { @@ -1415,17 +1420,8 @@ protected LogicalPlan rule(Aggregate aggregate) { return alias.toAttribute(); }); - Alias alias = as; - if (transformed.get()) { - // if at least a change occurred, update the alias and add it to the eval - alias = as.replaceChild(aggExpression); - newEvals.add(alias); - } - // aliased grouping - else { - newAggs.add(alias); - } - + Alias alias = as.replaceChild(aggExpression); + newEvals.add(alias); newProjections.add(alias.toAttribute()); } } @@ -1535,6 +1531,58 @@ private LogicalPlan rule(Eval eval) { } } + /** + * Rule that removes Aggregate overrides in grouping, aggregates and across them inside. + * The overrides appear when the same alias is used multiple times in aggregations and/or groupings: + * STATS x = COUNT(*), x = MIN(a) BY x = b + 1, x = c + 10 + * becomes + * STATS BY x = c + 10 + * That is the last declaration for a given alias, overrides all the other declarations, with + * groups having priority vs aggregates. + * Separately, it replaces expressions used as group keys inside the aggregates with references: + * STATS max(a + b + 1) BY a + b + * becomes + * STATS max($x + 1) BY $x = a + b + */ + private static class RemoveStatsOverride extends AnalyzerRules.AnalyzerRule { + + @Override + protected boolean skipResolved() { + return false; + } + + @Override + protected LogicalPlan rule(Aggregate agg) { + return agg.resolved() ? removeAggDuplicates(agg) : agg; + } + + private static Aggregate removeAggDuplicates(Aggregate agg) { + var groupings = agg.groupings(); + var aggregates = agg.aggregates(); + + groupings = removeDuplicateNames(groupings); + aggregates = removeDuplicateNames(aggregates); + + // replace EsqlAggregate with Aggregate + return new Aggregate(agg.source(), agg.child(), groupings, aggregates); + } + + private static List removeDuplicateNames(List list) { + var newList = new ArrayList<>(list); + var nameSet = Sets.newHashSetWithExpectedSize(list.size()); + + // remove duplicates + for (int i = list.size() - 1; i >= 0; i--) { + var element = list.get(i); + var name = Expressions.name(element); + if (nameSet.add(name) == false) { + newList.remove(i); + } + } + return newList.size() == list.size() ? list : newList; + } + } + private abstract static class ParameterizedOptimizerRule extends ParameterizedRule< SubPlan, LogicalPlan, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java index b942ccbfb8872..8906014adeecd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java @@ -13,12 +13,14 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.dissect.DissectException; import org.elasticsearch.dissect.DissectParser; +import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.expression.UnresolvedNamePattern; import org.elasticsearch.xpack.esql.parser.EsqlBaseParser.MetadataOptionContext; import org.elasticsearch.xpack.esql.parser.EsqlBaseParser.QualifiedNamePatternContext; import org.elasticsearch.xpack.esql.plan.logical.Dissect; import org.elasticsearch.xpack.esql.plan.logical.Drop; import org.elasticsearch.xpack.esql.plan.logical.Enrich; +import org.elasticsearch.xpack.esql.plan.logical.EsqlAggregate; import org.elasticsearch.xpack.esql.plan.logical.EsqlUnresolvedRelation; import org.elasticsearch.xpack.esql.plan.logical.Eval; import org.elasticsearch.xpack.esql.plan.logical.Explain; @@ -30,6 +32,7 @@ import org.elasticsearch.xpack.esql.plan.logical.Row; import org.elasticsearch.xpack.esql.plan.logical.meta.MetaFunctions; import org.elasticsearch.xpack.esql.plan.logical.show.ShowInfo; +import org.elasticsearch.xpack.ql.common.Failure; import org.elasticsearch.xpack.ql.expression.Alias; import org.elasticsearch.xpack.ql.expression.Attribute; import org.elasticsearch.xpack.ql.expression.EmptyAttribute; @@ -42,10 +45,10 @@ import org.elasticsearch.xpack.ql.expression.ReferenceAttribute; import org.elasticsearch.xpack.ql.expression.UnresolvedAttribute; import org.elasticsearch.xpack.ql.expression.UnresolvedStar; +import org.elasticsearch.xpack.ql.expression.function.UnresolvedFunction; import org.elasticsearch.xpack.ql.options.EsSourceOptions; import org.elasticsearch.xpack.ql.parser.ParserUtils; import org.elasticsearch.xpack.ql.plan.TableIdentifier; -import org.elasticsearch.xpack.ql.plan.logical.Aggregate; import org.elasticsearch.xpack.ql.plan.logical.Filter; import org.elasticsearch.xpack.ql.plan.logical.Limit; import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan; @@ -55,6 +58,7 @@ import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.LinkedHashSet; @@ -237,16 +241,31 @@ public PlanFactory visitStatsCommand(EsqlBaseParser.StatsCommandContext ctx) { } // grouping keys are automatically added as aggregations however the user is not allowed to specify them if (groupings.isEmpty() == false && aggregates.isEmpty() == false) { - var groupNames = new LinkedHashSet<>(Expressions.names(Expressions.references(groupings))); + var groupNames = new LinkedHashSet<>(Expressions.names(groupings)); + var groupRefNames = new LinkedHashSet<>(Expressions.names(Expressions.references(groupings))); for (NamedExpression aggregate : aggregates) { - if (Alias.unwrap(aggregate) instanceof UnresolvedAttribute ua && groupNames.contains(ua.name())) { - throw new ParsingException(ua.source(), "Cannot specify grouping expression [{}] as an aggregate", ua.name()); + Expression e = Alias.unwrap(aggregate); + if (e.resolved() == false && e instanceof UnresolvedFunction == false) { + String name = e.sourceText(); + if (groupNames.contains(name)) { + fail(e, "grouping key [{}] already specified in the STATS BY clause", name); + } else if (groupRefNames.contains(name)) { + fail(e, "Cannot specify grouping expression [{}] as an aggregate", name); + } } } } - aggregates.addAll(groupings); - return input -> new Aggregate(source(ctx), input, new ArrayList<>(groupings), aggregates); + // since groupings are aliased, add refs to it in the aggregates + for (Expression group : groupings) { + aggregates.add(Expressions.attribute(group)); + } + + return input -> new EsqlAggregate(source(ctx), input, new ArrayList<>(groupings), aggregates); + } + + private void fail(Expression exp, String message, Object... args) { + throw new VerificationException(Collections.singletonList(Failure.fail(exp, message, args))); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsqlAggregate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsqlAggregate.java new file mode 100644 index 0000000000000..847ed3c9972a8 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsqlAggregate.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plan.logical; + +import org.elasticsearch.xpack.ql.expression.Attribute; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.Expressions; +import org.elasticsearch.xpack.ql.expression.NamedExpression; +import org.elasticsearch.xpack.ql.plan.logical.Aggregate; +import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.ql.tree.NodeInfo; +import org.elasticsearch.xpack.ql.tree.Source; + +import java.util.List; + +import static java.util.Collections.emptyList; +import static org.elasticsearch.xpack.esql.expression.NamedExpressions.mergeOutputAttributes; + +/** + * Extension of Aggregate for handling duplicates. + * In ESQL is it possible to declare multiple aggregations and groupings with the same name, with the last declaration in grouping + * winning. + * As some of these declarations can be invalid, for validation reasons we need to keep the data around yet allowing will lead to + * ambiguity in the output. + * Hence this class - to allow the declaration to be moved over and thus for the Verifier to pick up the declaration while providing + * a proper output. + * To simplify things, the Aggregate class will be replaced with a vanilla one. + */ +public class EsqlAggregate extends Aggregate { + + private List lazyOutput; + + public EsqlAggregate(Source source, LogicalPlan child, List groupings, List aggregates) { + super(source, child, groupings, aggregates); + } + + @Override + public List output() { + if (lazyOutput == null) { + lazyOutput = mergeOutputAttributes(Expressions.asAttributes(aggregates()), emptyList()); + } + + return lazyOutput; + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, EsqlAggregate::new, child(), groupings(), aggregates()); + } + + @Override + public EsqlAggregate replaceChild(LogicalPlan newChild) { + return new EsqlAggregate(source(), newChild, groupings(), aggregates()); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index 543e7c93526d2..aedc789620480 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -21,7 +21,9 @@ import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.enrich.ResolvedEnrichPolicy; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; +import org.elasticsearch.xpack.esql.expression.function.aggregate.Count; import org.elasticsearch.xpack.esql.expression.function.aggregate.Max; +import org.elasticsearch.xpack.esql.expression.function.aggregate.Min; import org.elasticsearch.xpack.esql.parser.ParsingException; import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.EsqlUnresolvedRelation; @@ -1177,9 +1179,14 @@ public void testAggsWithDuplicates() throws Exception { var order = as(limit.child(), OrderBy.class); var agg = as(order.child(), Aggregate.class); var aggregates = agg.aggregates(); - assertThat(aggregates, hasSize(2)); - assertThat(Expressions.names(aggregates), contains("x", "b")); + var output = agg.output(); + assertThat(output, hasSize(2)); + assertThat(Expressions.names(output), contains("x", "b")); var alias = as(aggregates.get(0), Alias.class); + var count = as(alias.child(), Count.class); + alias = as(aggregates.get(1), Alias.class); + var min = as(alias.child(), Min.class); + alias = as(aggregates.get(2), Alias.class); var max = as(alias.child(), Max.class); } @@ -1193,9 +1200,53 @@ public void testAggsWithOverridingInputAndGrouping() throws Exception { var limit = as(plan, Limit.class); var order = as(limit.child(), OrderBy.class); var agg = as(order.child(), Aggregate.class); - var aggregates = agg.aggregates(); - assertThat(aggregates, hasSize(1)); - assertThat(Expressions.names(aggregates), contains("b")); + var output = agg.output(); + assertThat(output, hasSize(1)); + assertThat(Expressions.names(output), contains("b")); + } + + /** + * Expects + * Limit[1000[INTEGER]] + * \_EsqlAggregate[[emp_no{f}#9 + languages{f}#12 AS emp_no + languages],[MIN(emp_no{f}#9 + languages{f}#12) AS min(emp_no + langu + * ages), emp_no + languages{r}#7]] + * \_EsRelation[test][_meta_field{f}#15, emp_no{f}#9, first_name{f}#10, g..] + */ + public void testAggsOverGroupingKey() throws Exception { + var plan = analyze(""" + from test + | stats min(emp_no + languages) by emp_no + languages + """); + var limit = as(plan, Limit.class); + var agg = as(limit.child(), Aggregate.class); + var output = agg.output(); + assertThat(output, hasSize(2)); + var aggs = agg.aggregates(); + var min = as(Alias.unwrap(aggs.get(0)), Min.class); + assertThat(min.arguments(), hasSize(1)); + var group = Alias.unwrap(agg.groupings().get(0)); + assertEquals(min.arguments().get(0), group); + } + + /** + * Expects + * Limit[1000[INTEGER]] + * \_EsqlAggregate[[emp_no{f}#9 + languages{f}#12 AS a],[MIN(a{r}#7) AS min(a), a{r}#7]] + * \_EsRelation[test][_meta_field{f}#15, emp_no{f}#9, first_name{f}#10, g..] + */ + public void testAggsOverGroupingKeyWithAlias() throws Exception { + var plan = analyze(""" + from test + | stats min(a) by a = emp_no + languages + """); + var limit = as(plan, Limit.class); + var agg = as(limit.child(), Aggregate.class); + var output = agg.output(); + assertThat(output, hasSize(2)); + var aggs = agg.aggregates(); + var min = as(Alias.unwrap(aggs.get(0)), Min.class); + assertThat(min.arguments(), hasSize(1)); + assertEquals(Expressions.attribute(min.arguments().get(0)), Expressions.attribute(agg.groupings().get(0))); } public void testAggsWithoutAgg() throws Exception { @@ -1708,7 +1759,7 @@ public void testFoldableInGrouping() { |stats x by 1 """)); - assertThat(e.getMessage(), containsString("[x] is not an aggregate function")); + assertThat(e.getMessage(), containsString("Unknown column [x]")); } public void testScalarFunctionsInStats() { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index 866a4c458c424..d5d82207a770e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -67,14 +67,6 @@ public void testAggsExpressionsInStatsAggs() { "1:44: column [salary] must appear in the STATS BY clause or be used in an aggregate function", error("from test | eval z = 2 | stats x = avg(z), salary by emp_no") ); - assertEquals( - "1:26: scalar functions over groupings [first_name] not allowed yet", - error("from test | stats length(first_name), count(1) by first_name") - ); - assertEquals( - "1:36: scalar functions over groupings [languages] not allowed yet", - error("from test | stats max(languages) + languages by l = languages") - ); assertEquals( "1:23: nested aggregations [max(salary)] not allowed inside other aggregations [max(max(salary))]", error("from test | stats max(max(salary)) by first_name") @@ -91,7 +83,35 @@ public void testAggsExpressionsInStatsAggs() { "1:23: second argument of [count_distinct(languages, languages)] must be a constant, received [languages]", error("from test | stats x = count_distinct(languages, languages) by emp_no") ); + // no agg function + assertEquals("1:19: expected an aggregate function but found [5]", error("from test | stats 5 by emp_no")); + + // don't allow naked group + assertEquals("1:19: grouping key [emp_no] already specified in the STATS BY clause", error("from test | stats emp_no BY emp_no")); + // don't allow naked group - even when it's an expression + assertEquals( + "1:19: grouping key [languages + emp_no] already specified in the STATS BY clause", + error("from test | stats languages + emp_no BY languages + emp_no") + ); + // don't allow group alias + assertEquals( + "1:19: grouping key [e] already specified in the STATS BY clause", + error("from test | stats e BY e = languages + emp_no") + ); + var message = error("from test | stats languages + emp_no BY e = languages + emp_no"); + assertThat( + message, + containsString( + "column [emp_no] cannot be used as an aggregate once declared in the STATS BY grouping key [e = languages + emp_no]" + ) + ); + assertThat( + message, + containsString( + " column [languages] cannot be used as an aggregate once declared in the STATS BY grouping key [e = languages + emp_no]" + ) + ); } public void testAggsInsideGrouping() { @@ -103,16 +123,37 @@ public void testAggsInsideGrouping() { public void testAggsWithInvalidGrouping() { assertEquals( - "1:35: column [languages] must appear in the STATS BY clause or be used in an aggregate function", + "1:35: column [languages] cannot be used as an aggregate once declared in the STATS BY grouping key [l = languages % 3]", error("from test| stats max(languages) + languages by l = languages % 3") ); } + public void testGroupingAlias() throws Exception { + assertEquals( + "1:23: column [languages] cannot be used as an aggregate once declared in the STATS BY grouping key [l = languages % 3]", + error("from test | stats l = languages + 3 by l = languages % 3 | keep l") + ); + } + + public void testGroupingAliasDuplicate() throws Exception { + assertEquals( + "1:22: column [languages] cannot be used as an aggregate " + + "once declared in the STATS BY grouping key [l = languages % 3, l = languages, l = languages % 2]", + error("from test| stats l = languages + 3 by l = languages % 3, l = languages, l = languages % 2 | keep l") + ); + + assertEquals( + "1:22: column [languages] cannot be used as an aggregate " + "once declared in the STATS BY grouping key [l = languages % 3]", + error("from test| stats l = languages + 3, l = languages % 2 by l = languages % 3 | keep l") + ); + + } + public void testAggsIgnoreCanonicalGrouping() { // the grouping column should appear verbatim - ignore canonical representation as they complicate things significantly // for no real benefit (1+languages != languages + 1) assertEquals( - "1:39: column [languages] must appear in the STATS BY clause or be used in an aggregate function", + "1:39: column [languages] cannot be used as an aggregate once declared in the STATS BY grouping key [l = languages + 1]", error("from test| stats max(languages) + 1 + languages by l = languages + 1") ); } @@ -129,20 +170,6 @@ public void testAggsInsideEval() throws Exception { assertEquals("1:29: aggregate function [max(b)] not allowed outside STATS command", error("row a = 1, b = 2 | eval x = max(b)")); } - public void testAggsWithExpressionOverAggs() { - assertEquals( - "1:44: scalar functions over groupings [languages] not allowed yet", - error("from test | stats max(languages + 1) , m = languages + min(salary + 1) by l = languages, s = salary") - ); - } - - public void testAggScalarOverGroupingColumn() { - assertEquals( - "1:26: scalar functions over groupings [first_name] not allowed yet", - error("from test | stats length(first_name), count(1) by first_name") - ); - } - public void testGroupingInAggs() { assertEquals("2:12: column [salary] must appear in the STATS BY clause or be used in an aggregate function", error(""" from test @@ -352,8 +379,11 @@ public void testNestedAggField() { assertEquals("1:27: Unknown column [avg]", error("from test | stats c = avg(avg)")); } - public void testUnfinishedAggFunction() { - assertEquals("1:23: invalid stats declaration; [avg] is not an aggregate function", error("from test | stats c = avg")); + public void testNotFoundFieldInNestedFunction() { + assertEquals(""" + 1:30: Unknown column [missing] + line 1:43: Unknown column [not_found] + line 1:23: Unknown column [avg]""", error("from test | stats c = avg by missing + 1, not_found")); } public void testSpatialSort() { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 050ee2caefec0..b2f7690108900 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -149,6 +149,7 @@ import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; import static org.elasticsearch.xpack.ql.type.DataTypes.VERSION; +import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; @@ -365,6 +366,51 @@ public void testCombineProjectionWithAggregation() { assertThat(Expressions.names(agg.groupings()), contains("last_name", "first_name")); } + /** + * Expects + * TopN[[Order[x{r}#10,ASC,LAST]],1000[INTEGER]] + * \_Aggregate[[languages{f}#16],[MAX(emp_no{f}#13) AS x, languages{f}#16]] + * \_EsRelation[test][_meta_field{f}#19, emp_no{f}#13, first_name{f}#14, ..] + */ + public void testRemoveOverridesInAggregate() throws Exception { + var plan = plan(""" + from test + | stats x = count(emp_no), x = min(emp_no), x = max(emp_no) by languages + | sort x + """); + + var topN = as(plan, TopN.class); + var agg = as(topN.child(), Aggregate.class); + var aggregates = agg.aggregates(); + assertThat(aggregates, hasSize(2)); + assertThat(Expressions.names(aggregates), contains("x", "languages")); + var alias = as(aggregates.get(0), Alias.class); + var max = as(alias.child(), Max.class); + assertThat(Expressions.name(max.arguments().get(0)), equalTo("emp_no")); + } + + // expected stats b by b (grouping overrides the rest of the aggs) + + /** + * Expects + * TopN[[Order[b{r}#10,ASC,LAST]],1000[INTEGER]] + * \_Aggregate[[b{r}#10],[languages{f}#16 AS b]] + * \_EsRelation[test][_meta_field{f}#19, emp_no{f}#13, first_name{f}#14, ..] + */ + public void testAggsWithOverridingInputAndGrouping() throws Exception { + var plan = plan(""" + from test + | stats b = count(emp_no), b = max(emp_no) by b = languages + | sort b + """); + + var topN = as(plan, TopN.class); + var agg = as(topN.child(), Aggregate.class); + var aggregates = agg.aggregates(); + assertThat(aggregates, hasSize(1)); + assertThat(Expressions.names(aggregates), contains("b")); + } + /** * Project[[s{r}#4 AS d, s{r}#4, last_name{f}#21, first_name{f}#18]] * \_Limit[1000[INTEGER]] @@ -3074,6 +3120,115 @@ public void testNestedMultiExpressionsInGroupingAndAggs() { assertThat(Expressions.names(agg.output()), contains("count(salary + 1)", "max(salary + 23)", "languages + 1", "emp_no % 3")); } + /** + * Expects + * Limit[1000[INTEGER]] + * \_Aggregate[[g{r}#8],[COUNT($$emp_no_%_2_+_la>$COUNT$0{r}#20) AS c, g{r}#8]] + * \_Eval[[emp_no{f}#10 % 2[INTEGER] AS g, languages{f}#13 + emp_no{f}#10 % 2[INTEGER] AS $$emp_no_%_2_+_la>$COUNT$0]] + * \_EsRelation[test][_meta_field{f}#16, emp_no{f}#10, first_name{f}#11, ..] + */ + public void testNestedExpressionsWithGroupingKeyInAggs() { + var plan = optimizedPlan(""" + from test + | stats c = count(languages + emp_no % 2) by g = emp_no % 2 + """); + + var limit = as(plan, Limit.class); + var aggregate = as(limit.child(), Aggregate.class); + assertThat(Expressions.names(aggregate.aggregates()), contains("c", "g")); + assertThat(Expressions.names(aggregate.groupings()), contains("g")); + var eval = as(aggregate.child(), Eval.class); + var fields = eval.fields(); + // emp_no % 2 + var value = Alias.unwrap(fields.get(0)); + var math = as(value, Mod.class); + assertThat(Expressions.name(math.left()), is("emp_no")); + assertThat(math.right().fold(), is(2)); + // languages + emp_no % 2 + var add = as(Alias.unwrap(fields.get(1).canonical()), Add.class); + if (add.left() instanceof Mod mod) { + add = add.swapLeftAndRight(); + } + assertThat(Expressions.name(add.left()), is("languages")); + var mod = as(add.right().canonical(), Mod.class); + assertThat(Expressions.name(mod.left()), is("emp_no")); + assertThat(mod.right().fold(), is(2)); + } + + /** + * Expects + * Limit[1000[INTEGER]] + * \_Aggregate[[emp_no % 2{r}#12, languages + salary{r}#15],[MAX(languages + salary{r}#15) AS m, COUNT($$languages_+_sal>$COUN + * T$0{r}#28) AS c, emp_no % 2{r}#12, languages + salary{r}#15]] + * \_Eval[[emp_no{f}#18 % 2[INTEGER] AS emp_no % 2, languages{f}#21 + salary{f}#23 AS languages + salary, languages{f}#2 + * 1 + salary{f}#23 + emp_no{f}#18 % 2[INTEGER] AS $$languages_+_sal>$COUNT$0]] + * \_EsRelation[test][_meta_field{f}#24, emp_no{f}#18, first_name{f}#19, ..] + */ + public void testNestedExpressionsWithMultiGrouping() { + var plan = optimizedPlan(""" + from test + | stats m = max(languages + salary), c = count(languages + salary + emp_no % 2) by emp_no % 2, languages + salary + """); + + var limit = as(plan, Limit.class); + var aggregate = as(limit.child(), Aggregate.class); + assertThat(Expressions.names(aggregate.aggregates()), contains("m", "c", "emp_no % 2", "languages + salary")); + assertThat(Expressions.names(aggregate.groupings()), contains("emp_no % 2", "languages + salary")); + var eval = as(aggregate.child(), Eval.class); + var fields = eval.fields(); + // emp_no % 2 + var value = Alias.unwrap(fields.get(0).canonical()); + var math = as(value, Mod.class); + assertThat(Expressions.name(math.left()), is("emp_no")); + assertThat(math.right().fold(), is(2)); + // languages + salary + var add = as(Alias.unwrap(fields.get(1).canonical()), Add.class); + assertThat(Expressions.name(add.left()), anyOf(is("languages"), is("salary"))); + assertThat(Expressions.name(add.right()), anyOf(is("salary"), is("languages"))); + // languages + salary + emp_no % 2 + var add2 = as(Alias.unwrap(fields.get(2).canonical()), Add.class); + if (add2.left() instanceof Mod mod) { + add2 = add2.swapLeftAndRight(); + } + var add3 = as(add2.left().canonical(), Add.class); + var mod = as(add2.right().canonical(), Mod.class); + // languages + salary + assertThat(Expressions.name(add3.left()), anyOf(is("languages"), is("salary"))); + assertThat(Expressions.name(add3.right()), anyOf(is("salary"), is("languages"))); + // emp_no % 2 + assertThat(Expressions.name(mod.left()), is("emp_no")); + assertThat(mod.right().fold(), is(2)); + } + + /** + * Expects + * Project[[e{r}#5, languages + emp_no{r}#8]] + * \_Eval[[$$MAX$max(languages_+>$0{r}#20 + 1[INTEGER] AS e]] + * \_Limit[1000[INTEGER]] + * \_Aggregate[[languages + emp_no{r}#8],[MAX(emp_no{f}#10 + languages{f}#13) AS $$MAX$max(languages_+>$0, languages + emp_no{ + * r}#8]] + * \_Eval[[languages{f}#13 + emp_no{f}#10 AS languages + emp_no]] + * \_EsRelation[test][_meta_field{f}#16, emp_no{f}#10, first_name{f}#11, ..] + */ + public void testNestedExpressionsInStatsWithExpression() { + var plan = optimizedPlan(""" + from test + | stats e = max(languages + emp_no) + 1 by languages + emp_no + """); + + var project = as(plan, Project.class); + var eval = as(project.child(), Eval.class); + var fields = eval.fields(); + assertThat(Expressions.names(fields), contains("e")); + var limit = as(eval.child(), Limit.class); + var agg = as(limit.child(), Aggregate.class); + var groupings = agg.groupings(); + assertThat(Expressions.names(groupings), contains("languages + emp_no")); + eval = as(agg.child(), Eval.class); + fields = eval.fields(); + assertThat(Expressions.names(fields), contains("languages + emp_no")); + } + public void testLogicalPlanOptimizerVerifier() { var plan = plan(""" from test diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index f7bb90208af3f..49fb3af5384b4 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.Randomness; import org.elasticsearch.core.Tuple; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThan; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThanOrEqual; @@ -21,6 +22,7 @@ import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; import org.elasticsearch.xpack.esql.plan.logical.Dissect; import org.elasticsearch.xpack.esql.plan.logical.Enrich; +import org.elasticsearch.xpack.esql.plan.logical.EsqlAggregate; import org.elasticsearch.xpack.esql.plan.logical.EsqlUnresolvedRelation; import org.elasticsearch.xpack.esql.plan.logical.Eval; import org.elasticsearch.xpack.esql.plan.logical.Explain; @@ -40,7 +42,6 @@ import org.elasticsearch.xpack.ql.expression.function.UnresolvedFunction; import org.elasticsearch.xpack.ql.expression.predicate.logical.Not; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; -import org.elasticsearch.xpack.ql.plan.logical.Aggregate; import org.elasticsearch.xpack.ql.plan.logical.Filter; import org.elasticsearch.xpack.ql.plan.logical.Limit; import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan; @@ -242,7 +243,7 @@ public void testEvalImplicitNames() { public void testStatsWithGroups() { assertEquals( - new Aggregate( + new EsqlAggregate( EMPTY, PROCESSING_CMD_INPUT, List.of(attribute("c"), attribute("d.e")), @@ -258,7 +259,7 @@ public void testStatsWithGroups() { public void testStatsWithoutGroups() { assertEquals( - new Aggregate( + new EsqlAggregate( EMPTY, PROCESSING_CMD_INPUT, List.of(), @@ -273,7 +274,7 @@ public void testStatsWithoutGroups() { public void testStatsWithoutAggs() throws Exception { assertEquals( - new Aggregate(EMPTY, PROCESSING_CMD_INPUT, List.of(attribute("a")), List.of(attribute("a"))), + new EsqlAggregate(EMPTY, PROCESSING_CMD_INPUT, List.of(attribute("a")), List.of(attribute("a"))), processingCommand("stats by a") ); } @@ -299,7 +300,7 @@ public void testAggsWithGroupKeyAsAgg() throws Exception { """ }; for (String query : queries) { - expectError(query, "Cannot specify grouping expression [a] as an aggregate"); + expectVerificationError(query, "grouping key [a] already specified in the STATS BY clause"); } } @@ -1105,6 +1106,11 @@ private void expectError(String query, String errorMessage) { assertThat(e.getMessage(), containsString(errorMessage)); } + private void expectVerificationError(String query, String errorMessage) { + VerificationException e = expectThrows(VerificationException.class, "Expected syntax error for " + query, () -> statement(query)); + assertThat(e.getMessage(), containsString(errorMessage)); + } + private void expectError(String query, List params, String errorMessage) { ParsingException e = expectThrows(ParsingException.class, "Expected syntax error for " + query, () -> statement(query, params)); assertThat(e.getMessage(), containsString(errorMessage)); From c2bd3e40746ef715144084b2d8f6ba9cf19eb4fd Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Thu, 4 Apr 2024 10:59:24 -0700 Subject: [PATCH 037/173] ESQL: Fix flaky test in LogicalPlanOptimizerTests --- .../xpack/esql/optimizer/LogicalPlanOptimizerTests.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index b2f7690108900..95843b954ef91 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -3190,8 +3190,8 @@ public void testNestedExpressionsWithMultiGrouping() { if (add2.left() instanceof Mod mod) { add2 = add2.swapLeftAndRight(); } - var add3 = as(add2.left().canonical(), Add.class); - var mod = as(add2.right().canonical(), Mod.class); + var add3 = as(add2.left(), Add.class); + var mod = as(add2.right(), Mod.class); // languages + salary assertThat(Expressions.name(add3.left()), anyOf(is("languages"), is("salary"))); assertThat(Expressions.name(add3.right()), anyOf(is("salary"), is("languages"))); From a5e7525d817a77a0ae0c91d120cc0d3f2dbf2dd4 Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Thu, 4 Apr 2024 11:22:28 -0700 Subject: [PATCH 038/173] ESQL: Disable flaky test --- .../xpack/esql/optimizer/LogicalPlanOptimizerTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 95843b954ef91..63c2a33543073 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -3164,6 +3164,7 @@ public void testNestedExpressionsWithGroupingKeyInAggs() { * 1 + salary{f}#23 + emp_no{f}#18 % 2[INTEGER] AS $$languages_+_sal>$COUNT$0]] * \_EsRelation[test][_meta_field{f}#24, emp_no{f}#18, first_name{f}#19, ..] */ + @AwaitsFix(bugUrl = "disabled since canonical representation relies on hashing which is runtime defined") public void testNestedExpressionsWithMultiGrouping() { var plan = optimizedPlan(""" from test From 3486a0815ef368e2475ed6c97cdff2145f5fec4c Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Thu, 4 Apr 2024 21:01:06 +0200 Subject: [PATCH 039/173] Remove unused methods from SearchContext (#107111) A couple methods can be removed here, they're only used on sub-classes. That also cleans up some unsupport operation exception code that is never hit. --- .../search/DefaultSearchContext.java | 5 ---- .../internal/FilteredSearchContext.java | 25 ------------------- .../search/internal/SearchContext.java | 10 -------- .../search/internal/SubSearchContext.java | 19 -------------- .../search/rank/RankSearchContext.java | 25 ------------------- .../elasticsearch/test/TestSearchContext.java | 19 -------------- 6 files changed, 103 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java b/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java index 0e6800b9c8d48..49ab7bf74ca91 100644 --- a/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java @@ -481,7 +481,6 @@ public SearchContext aggregations(SearchContextAggregations aggregations) { return this; } - @Override public void addSearchExt(SearchExtBuilder searchExtBuilder) { // it's ok to use the writeable name here given that we enforce it to be the same as the name of the element that gets // parsed by the corresponding parser. There is one single name and one single way to retrieve the parsed object from the context. @@ -508,7 +507,6 @@ public SuggestionSearchContext suggest() { return suggest; } - @Override public void suggest(SuggestionSearchContext suggest) { this.suggest = suggest; } @@ -613,7 +611,6 @@ public TimeValue timeout() { return timeout; } - @Override public void timeout(TimeValue timeout) { this.timeout = timeout; } @@ -688,7 +685,6 @@ public FieldDoc searchAfter() { return searchAfter; } - @Override public SearchContext collapse(CollapseContext collapse) { this.collapse = collapse; return this; @@ -786,7 +782,6 @@ public List groupStats() { return this.groupStats; } - @Override public void groupStats(List groupStats) { this.groupStats = groupStats; } diff --git a/server/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java b/server/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java index c02a959231a61..7e54eeacffd7d 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java @@ -139,11 +139,6 @@ public SuggestionSearchContext suggest() { return in.suggest(); } - @Override - public void suggest(SuggestionSearchContext suggest) { - in.suggest(suggest); - } - @Override public RankShardContext rankShardContext() { return in.rankShardContext(); @@ -204,11 +199,6 @@ public TimeValue timeout() { return in.timeout(); } - @Override - public void timeout(TimeValue timeout) { - in.timeout(timeout); - } - @Override public int terminateAfter() { return in.terminateAfter(); @@ -334,11 +324,6 @@ public List groupStats() { return in.groupStats(); } - @Override - public void groupStats(List groupStats) { - in.groupStats(groupStats); - } - @Override public boolean version() { return in.version(); @@ -409,11 +394,6 @@ public long getRelativeTimeInMillis() { return in.getRelativeTimeInMillis(); } - @Override - public void addSearchExt(SearchExtBuilder searchExtBuilder) { - in.addSearchExt(searchExtBuilder); - } - @Override public SearchExtBuilder getSearchExt(String name) { return in.getSearchExt(name); @@ -444,11 +424,6 @@ public boolean isCancelled() { return in.isCancelled(); } - @Override - public SearchContext collapse(CollapseContext collapse) { - return in.collapse(collapse); - } - @Override public CollapseContext collapse() { return in.collapse(); diff --git a/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java b/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java index 512df4d15dcb0..9580c450fd921 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java @@ -122,8 +122,6 @@ public final void close() { public abstract SearchContext aggregations(SearchContextAggregations aggregations); - public abstract void addSearchExt(SearchExtBuilder searchExtBuilder); - public abstract SearchExtBuilder getSearchExt(String name); public abstract SearchHighlightContext highlight(); @@ -139,8 +137,6 @@ public InnerHitsContext innerHits() { public abstract SuggestionSearchContext suggest(); - public abstract void suggest(SuggestionSearchContext suggest); - public abstract RankShardContext rankShardContext(); public abstract void rankShardContext(RankShardContext rankShardContext); @@ -217,8 +213,6 @@ public final void assignRescoreDocIds(RescoreDocIds rescoreDocIds) { public abstract TimeValue timeout(); - public abstract void timeout(TimeValue timeout); - public abstract int terminateAfter(); public abstract void terminateAfter(int terminateAfter); @@ -255,8 +249,6 @@ public final void assignRescoreDocIds(RescoreDocIds rescoreDocIds) { public abstract FieldDoc searchAfter(); - public abstract SearchContext collapse(CollapseContext collapse); - public abstract CollapseContext collapse(); public abstract SearchContext parsedPostFilter(ParsedQuery postFilter); @@ -310,8 +302,6 @@ public Query rewrittenQuery() { @Nullable public abstract List groupStats(); - public abstract void groupStats(List groupStats); - public abstract boolean version(); public abstract void version(boolean version); diff --git a/server/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java b/server/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java index f31b319882b5a..91cd647be673d 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java @@ -9,7 +9,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TotalHits; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.search.aggregations.SearchContextAggregations; import org.elasticsearch.search.collapse.CollapseContext; @@ -22,9 +21,6 @@ import org.elasticsearch.search.fetch.subphase.highlight.SearchHighlightContext; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.sort.SortAndFormats; -import org.elasticsearch.search.suggest.SuggestionSearchContext; - -import java.util.List; public class SubSearchContext extends FilteredSearchContext { @@ -104,11 +100,6 @@ public void highlight(SearchHighlightContext highlight) { this.highlight = highlight; } - @Override - public void suggest(SuggestionSearchContext suggest) { - throw new UnsupportedOperationException("Not supported"); - } - @Override public boolean hasScriptFields() { return scriptFields != null && scriptFields.fields().isEmpty() == false; @@ -160,11 +151,6 @@ public SubSearchContext fetchFieldsContext(FetchFieldsContext fetchFieldsContext return this; } - @Override - public void timeout(TimeValue timeout) { - throw new UnsupportedOperationException("Not supported"); - } - @Override public void terminateAfter(int terminateAfter) { throw new UnsupportedOperationException("Not supported"); @@ -269,11 +255,6 @@ public void explain(boolean explain) { this.explain = explain; } - @Override - public void groupStats(List groupStats) { - throw new UnsupportedOperationException("Not supported"); - } - @Override public boolean version() { return version; diff --git a/server/src/main/java/org/elasticsearch/search/rank/RankSearchContext.java b/server/src/main/java/org/elasticsearch/search/rank/RankSearchContext.java index 86f7566683d21..d144e45becc12 100644 --- a/server/src/main/java/org/elasticsearch/search/rank/RankSearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/rank/RankSearchContext.java @@ -257,11 +257,6 @@ public SearchContext aggregations(SearchContextAggregations aggregations) { throw new UnsupportedOperationException(); } - @Override - public void addSearchExt(SearchExtBuilder searchExtBuilder) { - throw new UnsupportedOperationException(); - } - @Override public SearchExtBuilder getSearchExt(String name) { throw new UnsupportedOperationException(); @@ -287,11 +282,6 @@ public SuggestionSearchContext suggest() { throw new UnsupportedOperationException(); } - @Override - public void suggest(SuggestionSearchContext suggest) { - throw new UnsupportedOperationException(); - } - @Override public RankShardContext rankShardContext() { throw new UnsupportedOperationException(); @@ -357,11 +347,6 @@ public BitsetFilterCache bitsetFilterCache() { throw new UnsupportedOperationException(); } - @Override - public void timeout(TimeValue timeout) { - throw new UnsupportedOperationException(); - } - @Override public void terminateAfter(int terminateAfter) { throw new UnsupportedOperationException(); @@ -397,11 +382,6 @@ public SearchContext searchAfter(FieldDoc searchAfter) { throw new UnsupportedOperationException(); } - @Override - public SearchContext collapse(CollapseContext collapse) { - throw new UnsupportedOperationException(); - } - @Override public SearchContext parsedPostFilter(ParsedQuery postFilter) { throw new UnsupportedOperationException(); @@ -457,11 +437,6 @@ public List groupStats() { throw new UnsupportedOperationException(); } - @Override - public void groupStats(List groupStats) { - throw new UnsupportedOperationException(); - } - @Override public boolean version() { throw new UnsupportedOperationException(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java index 80d1b82fbfcfe..8db968b59ae1f 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java +++ b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java @@ -171,11 +171,6 @@ public SearchContext aggregations(SearchContextAggregations searchContextAggrega return this; } - @Override - public void addSearchExt(SearchExtBuilder searchExtBuilder) { - searchExtBuilders.put(searchExtBuilder.getWriteableName(), searchExtBuilder); - } - @Override public SearchExtBuilder getSearchExt(String name) { return searchExtBuilders.get(name); @@ -194,9 +189,6 @@ public SuggestionSearchContext suggest() { return null; } - @Override - public void suggest(SuggestionSearchContext suggest) {} - @Override public List rescore() { return Collections.emptyList(); @@ -267,9 +259,6 @@ public TimeValue timeout() { return TimeValue.ZERO; } - @Override - public void timeout(TimeValue timeout) {} - @Override public int terminateAfter() { return terminateAfter; @@ -340,11 +329,6 @@ public FieldDoc searchAfter() { return searchAfter; } - @Override - public SearchContext collapse(CollapseContext collapse) { - return null; - } - @Override public CollapseContext collapse() { return null; @@ -431,9 +415,6 @@ public List groupStats() { return null; } - @Override - public void groupStats(List groupStats) {} - @Override public boolean version() { return false; From b3b4214e47729d007f350b08fd6749e627ea6247 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Thu, 4 Apr 2024 13:25:11 -0700 Subject: [PATCH 040/173] Skip multi-release source sets in idea project import by default (#107123) There is an existing IntelliJ bug that prevents doing a full project build when source sets for multi-release jars are present. This changes the project import behavior so that these source sets are ignored by default and can be explicitly enabled by adding `org.gradle.mrjar.idea.enabled=true` to your `~/.gradle/gradle.properties` file should you need to actively work on that code. --- .../src/main/groovy/elasticsearch.ide.gradle | 27 ++++++------ .../gradle/internal/MrjarPlugin.java | 42 +++++++++++-------- 2 files changed, 40 insertions(+), 29 deletions(-) diff --git a/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle b/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle index ccbe9cd2f4a2b..6cb22dad9bc79 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle @@ -130,7 +130,8 @@ if (providers.systemProperty('idea.active').getOrNull() == 'true') { ':server:generateModulesList', ':server:generatePluginsList', ':generateProviderImpls', - ':libs:elasticsearch-native:elasticsearch-native-libraries:extractLibs'].collect { elasticsearchProject.right()?.task(it) ?: it }) + ':libs:elasticsearch-native:elasticsearch-native-libraries:extractLibs', + ':x-pack:libs:es-opensaml-security-api:shadowJar'].collect { elasticsearchProject.right()?.task(it) ?: it }) } // this path is produced by the extractLibs task above @@ -239,20 +240,22 @@ if (providers.systemProperty('idea.active').getOrNull() == 'true') { * but before the XML document, e.g. a doctype or comment */ void modifyXml(Object path, Action action, String preface = null) { - Node xml = parseXml(path) - action.execute(xml) + if (project.file(path).exists()) { + Node xml = parseXml(path) + action.execute(xml) - File xmlFile = project.file(path) - xmlFile.withPrintWriter { writer -> - def printer = new XmlNodePrinter(writer) - printer.namespaceAware = true - printer.preserveWhitespace = true - writer.write("\n") + File xmlFile = project.file(path) + xmlFile.withPrintWriter { writer -> + def printer = new XmlNodePrinter(writer) + printer.namespaceAware = true + printer.preserveWhitespace = true + writer.write("\n") - if (preface != null) { - writer.write(preface) + if (preface != null) { + writer.write(preface) + } + printer.print(xml) } - printer.print(xml) } } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java index 6524247c4c8f6..c64bd3cc9c068 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java @@ -49,6 +49,7 @@ public class MrjarPlugin implements Plugin { private static final Pattern MRJAR_SOURCESET_PATTERN = Pattern.compile("main(\\d{2})"); + private static final String MRJAR_IDEA_ENABLED = "org.gradle.mrjar.idea.enabled"; private final JavaToolchainService javaToolchains; @@ -61,23 +62,30 @@ public class MrjarPlugin implements Plugin { public void apply(Project project) { project.getPluginManager().apply(ElasticsearchJavaBasePlugin.class); var javaExtension = project.getExtensions().getByType(JavaPluginExtension.class); - - List mainVersions = findSourceVersions(project); - List mainSourceSets = new ArrayList<>(); - mainSourceSets.add(SourceSet.MAIN_SOURCE_SET_NAME); - List testSourceSets = new ArrayList<>(mainSourceSets); - testSourceSets.add(SourceSet.TEST_SOURCE_SET_NAME); - for (int javaVersion : mainVersions) { - String mainSourceSetName = SourceSet.MAIN_SOURCE_SET_NAME + javaVersion; - SourceSet mainSourceSet = addSourceSet(project, javaExtension, mainSourceSetName, mainSourceSets, javaVersion); - configureSourceSetInJar(project, mainSourceSet, javaVersion); - mainSourceSets.add(mainSourceSetName); - testSourceSets.add(mainSourceSetName); - - String testSourceSetName = SourceSet.TEST_SOURCE_SET_NAME + javaVersion; - SourceSet testSourceSet = addSourceSet(project, javaExtension, testSourceSetName, testSourceSets, javaVersion); - testSourceSets.add(testSourceSetName); - createTestTask(project, testSourceSet, javaVersion, mainSourceSets); + var isIdea = System.getProperty("idea.active", "false").equals("true"); + var ideaSourceSetsEnabled = project.hasProperty(MRJAR_IDEA_ENABLED) && project.property(MRJAR_IDEA_ENABLED).equals("true"); + + // Ignore version-specific source sets if we are importing into IntelliJ and have not explicitly enabled this. + // Avoids an IntelliJ bug: + // https://youtrack.jetbrains.com/issue/IDEA-285640/Compiler-Options-Settings-language-level-is-set-incorrectly-with-JDK-19ea + if (isIdea == false || ideaSourceSetsEnabled) { + List mainVersions = findSourceVersions(project); + List mainSourceSets = new ArrayList<>(); + mainSourceSets.add(SourceSet.MAIN_SOURCE_SET_NAME); + List testSourceSets = new ArrayList<>(mainSourceSets); + testSourceSets.add(SourceSet.TEST_SOURCE_SET_NAME); + for (int javaVersion : mainVersions) { + String mainSourceSetName = SourceSet.MAIN_SOURCE_SET_NAME + javaVersion; + SourceSet mainSourceSet = addSourceSet(project, javaExtension, mainSourceSetName, mainSourceSets, javaVersion); + configureSourceSetInJar(project, mainSourceSet, javaVersion); + mainSourceSets.add(mainSourceSetName); + testSourceSets.add(mainSourceSetName); + + String testSourceSetName = SourceSet.TEST_SOURCE_SET_NAME + javaVersion; + SourceSet testSourceSet = addSourceSet(project, javaExtension, testSourceSetName, testSourceSets, javaVersion); + testSourceSets.add(testSourceSetName); + createTestTask(project, testSourceSet, javaVersion, mainSourceSets); + } } configureMrjar(project); From ff8f75c6795c1608313125b3337cbac15058daea Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Fri, 5 Apr 2024 03:25:25 +0200 Subject: [PATCH 041/173] Remove unused ShrinkAction (#107127) ShrinkAction isn't used anymore, we can remove it and its trivial test coverage. --- .../admin/indices/shrink/ShrinkAction.java | 23 ------------------- .../xpack/core/ilm/OperationMode.java | 4 +--- .../authz/privilege/IndexPrivilegeTests.java | 2 -- .../ResizeRequestInterceptorTests.java | 9 +++----- 4 files changed, 4 insertions(+), 34 deletions(-) delete mode 100644 server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ShrinkAction.java diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ShrinkAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ShrinkAction.java deleted file mode 100644 index 129c07b64fd4d..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ShrinkAction.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.indices.shrink; - -import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; - -public class ShrinkAction extends ActionType { - - public static final ShrinkAction INSTANCE = new ShrinkAction(); - public static final String NAME = "indices:admin/shrink"; - - private ShrinkAction() { - super(NAME); - } - -} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/OperationMode.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/OperationMode.java index 9c07db9841e23..95a1bf8493e42 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/OperationMode.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/OperationMode.java @@ -7,8 +7,6 @@ package org.elasticsearch.xpack.core.ilm; -import org.elasticsearch.action.admin.indices.shrink.ShrinkAction; - /** * Enum representing the different modes that Index Lifecycle Service can operate in. */ @@ -24,7 +22,7 @@ public boolean isValidChange(OperationMode nextMode) { }, /** - * this represents a state where only sensitive actions (like {@link ShrinkAction}) will be executed + * this represents a state where only sensitive actions (like {@link ShrinkStep}) will be executed * until they finish, at which point the operation mode will move to STOPPED. */ STOPPING { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilegeTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilegeTests.java index b755d3497f649..b05f7065ff63c 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilegeTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilegeTests.java @@ -9,7 +9,6 @@ import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; -import org.elasticsearch.action.admin.indices.shrink.ShrinkAction; import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; import org.elasticsearch.action.delete.TransportDeleteAction; import org.elasticsearch.action.index.TransportIndexAction; @@ -68,7 +67,6 @@ public void testFindPrivilegesThatGrant() { equalTo(List.of("monitor", "cross_cluster_replication", "manage", "all")) ); assertThat(findPrivilegesThatGrant(RefreshAction.NAME), equalTo(List.of("maintenance", "manage", "all"))); - assertThat(findPrivilegesThatGrant(ShrinkAction.NAME), equalTo(List.of("manage", "all"))); } public void testPrivilegesForRollupFieldCapsAction() { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/ResizeRequestInterceptorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/ResizeRequestInterceptorTests.java index f52102ded442b..817d5739b4b9f 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/ResizeRequestInterceptorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/ResizeRequestInterceptorTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.shrink.ResizeAction; import org.elasticsearch.action.admin.indices.shrink.ResizeRequest; -import org.elasticsearch.action.admin.indices.shrink.ShrinkAction; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -78,7 +77,6 @@ public void testResizeRequestInterceptorThrowsWhenFLSDLSEnabled() { } else { queries = null; } - final String action = randomFrom(ShrinkAction.NAME, ResizeAction.NAME); IndicesAccessControl accessControl = new IndicesAccessControl( true, Collections.singletonMap( @@ -94,7 +92,7 @@ public void testResizeRequestInterceptorThrowsWhenFLSDLSEnabled() { ResizeRequestInterceptor resizeRequestInterceptor = new ResizeRequestInterceptor(threadPool, licenseState, auditTrailService); PlainActionFuture plainActionFuture = new PlainActionFuture<>(); - RequestInfo requestInfo = new RequestInfo(authentication, new ResizeRequest("bar", "foo"), action, null); + RequestInfo requestInfo = new RequestInfo(authentication, new ResizeRequest("bar", "foo"), ResizeAction.NAME, null); AuthorizationEngine mockEngine = mock(AuthorizationEngine.class); doAnswer(invocationOnMock -> { ActionListener listener = (ActionListener) invocationOnMock.getArguments()[3]; @@ -126,7 +124,6 @@ public void testResizeRequestInterceptorThrowsWhenTargetHasGreaterPermissions() .user(new User("john", "role")) .realmRef(new RealmRef("realm", "type", "node", null)) .build(); - final String action = randomFrom(ShrinkAction.NAME, ResizeAction.NAME); IndicesAccessControl accessControl = new IndicesAccessControl(true, Collections.emptyMap()); new SecurityContext(Settings.EMPTY, threadContext).putIndicesAccessControl(accessControl); ResizeRequestInterceptor resizeRequestInterceptor = new ResizeRequestInterceptor(threadPool, licenseState, auditTrailService); @@ -134,7 +131,7 @@ public void testResizeRequestInterceptorThrowsWhenTargetHasGreaterPermissions() AuthorizationEngine mockEngine = mock(AuthorizationEngine.class); { PlainActionFuture plainActionFuture = new PlainActionFuture<>(); - RequestInfo requestInfo = new RequestInfo(authentication, new ResizeRequest("target", "source"), action, null); + RequestInfo requestInfo = new RequestInfo(authentication, new ResizeRequest("target", "source"), ResizeAction.NAME, null); doAnswer(invocationOnMock -> { ActionListener listener = (ActionListener) invocationOnMock.getArguments()[3]; listener.onResponse(AuthorizationResult.deny()); @@ -159,7 +156,7 @@ public void testResizeRequestInterceptorThrowsWhenTargetHasGreaterPermissions() // swap target and source for success { PlainActionFuture plainActionFuture = new PlainActionFuture<>(); - RequestInfo requestInfo = new RequestInfo(authentication, new ResizeRequest("source", "target"), action, null); + RequestInfo requestInfo = new RequestInfo(authentication, new ResizeRequest("source", "target"), ResizeAction.NAME, null); doAnswer(invocationOnMock -> { ActionListener listener = (ActionListener) invocationOnMock.getArguments()[3]; listener.onResponse(AuthorizationResult.granted()); From 952f07dcb70a7866afeeb80680315a34eef85621 Mon Sep 17 00:00:00 2001 From: Volodymyr Krasnikov <129072588+volodk85@users.noreply.github.com> Date: Thu, 4 Apr 2024 21:28:24 -0700 Subject: [PATCH 042/173] Mute TopNFunctionTests#testToXContent (#107132) --- .../org/elasticsearch/xpack/profiling/TopNFunctionTests.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TopNFunctionTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TopNFunctionTests.java index afbbe24979466..3a91550767094 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TopNFunctionTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TopNFunctionTests.java @@ -21,6 +21,8 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; public class TopNFunctionTests extends ESTestCase { + + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107117") public void testToXContent() throws IOException { String fileID = "6tVKI4mSYDEJ-ABAIpYXcg"; int frameType = 1; From ab19b60a730e9803c05eecf1fd0cba55fee1a9ca Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Fri, 5 Apr 2024 11:12:54 +0300 Subject: [PATCH 043/173] Add check for lossy params in source (#107097) Lossy params may cause issues with reindexing data under the hood. The current logic requires an extra setting to enable checking for lossy params that is not currently available. --- .../index/mapper/SourceFieldMapper.java | 41 ++++++++++++---- .../mapper/DynamicFieldsBuilderTests.java | 3 +- .../index/mapper/SourceFieldMapperTests.java | 47 +++++++++++++++++++ .../query/SearchExecutionContextTests.java | 2 +- 4 files changed, 83 insertions(+), 10 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java index 15770785e11f9..4a6eaa5b26c39 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.IndexMode; @@ -28,6 +29,7 @@ import org.elasticsearch.xcontent.XContentType; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -39,6 +41,8 @@ public class SourceFieldMapper extends MetadataFieldMapper { public static final String CONTENT_TYPE = "_source"; + public static final String LOSSY_PARAMETERS_ALLOWED_SETTING_NAME = "index.lossy.source-mapping-parameters"; + /** The source mode */ private enum Mode { DISABLED, @@ -128,9 +132,12 @@ public static class Builder extends MetadataFieldMapper.Builder { private final IndexMode indexMode; - public Builder(IndexMode indexMode) { + private final boolean supportsNonDefaultParameterValues; + + public Builder(IndexMode indexMode, final Settings settings) { super(Defaults.NAME); this.indexMode = indexMode; + this.supportsNonDefaultParameterValues = settings.getAsBoolean(LOSSY_PARAMETERS_ALLOWED_SETTING_NAME, true); } public Builder setSynthetic() { @@ -145,13 +152,10 @@ protected Parameter[] getParameters() { private boolean isDefault() { Mode m = mode.get(); - if (m != null && (indexMode == IndexMode.TIME_SERIES && m == Mode.SYNTHETIC) == false) { + if (m != null && (((indexMode == IndexMode.TIME_SERIES && m == Mode.SYNTHETIC) == false) || m == Mode.DISABLED)) { return false; } - if (enabled.get().value() == false) { - return false; - } - return includes.getValue().isEmpty() && excludes.getValue().isEmpty(); + return enabled.get().value() && includes.getValue().isEmpty() && excludes.getValue().isEmpty(); } @Override @@ -167,6 +171,27 @@ public SourceFieldMapper build() { if (isDefault()) { return indexMode == IndexMode.TIME_SERIES ? TSDB_DEFAULT : DEFAULT; } + if (supportsNonDefaultParameterValues == false) { + List disallowed = new ArrayList<>(); + if (enabled.get().value() == false) { + disallowed.add("enabled"); + } + if (includes.get().isEmpty() == false) { + disallowed.add("includes"); + } + if (excludes.get().isEmpty() == false) { + disallowed.add("excludes"); + } + if (mode.get() == Mode.DISABLED) { + disallowed.add("mode=disabled"); + } + assert disallowed.isEmpty() == false; + throw new MapperParsingException( + disallowed.size() == 1 + ? "Parameter [" + disallowed.get(0) + "] is not allowed in source" + : "Parameters [" + String.join(",", disallowed) + "] are not allowed in source" + ); + } SourceFieldMapper sourceFieldMapper = new SourceFieldMapper( mode.get(), enabled.get(), @@ -186,7 +211,7 @@ public SourceFieldMapper build() { c -> c.getIndexSettings().getMode() == IndexMode.TIME_SERIES ? c.getIndexSettings().getIndexVersionCreated().onOrAfter(IndexVersions.V_8_7_0) ? TSDB_DEFAULT : TSDB_LEGACY_DEFAULT : DEFAULT, - c -> new Builder(c.getIndexSettings().getMode()) + c -> new Builder(c.getIndexSettings().getMode(), c.getSettings()) ); static final class SourceFieldType extends MappedFieldType { @@ -321,7 +346,7 @@ protected String contentType() { @Override public FieldMapper.Builder getMergeBuilder() { - return new Builder(indexMode).init(this); + return new Builder(indexMode, Settings.EMPTY).init(this); } /** diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java index 329d8a795732f..229e2e6f72cc1 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.Explicit; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; @@ -67,7 +68,7 @@ public void testCreateDynamicStringFieldAsKeywordForDimension() throws IOExcepti XContentParser parser = createParser(JsonXContent.jsonXContent, source); SourceToParse sourceToParse = new SourceToParse("test", new BytesArray(source), XContentType.JSON); - SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null).setSynthetic().build(); + SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY).setSynthetic().build(); RootObjectMapper root = new RootObjectMapper.Builder("_doc", Explicit.IMPLICIT_TRUE).add( new PassThroughObjectMapper.Builder("labels").setContainsDimensions().dynamic(ObjectMapper.Dynamic.TRUE) ).build(MapperBuilderContext.root(false, false)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java index 5601290fed5c7..47b8bb3be36b7 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java @@ -11,6 +11,7 @@ import org.apache.lucene.index.IndexableField; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; @@ -238,4 +239,50 @@ public void testSyntheticSourceInTimeSeries() throws IOException { assertTrue(mapper.sourceMapper().isSynthetic()); assertEquals("{\"_source\":{\"mode\":\"synthetic\"}}", mapper.sourceMapper().toString()); } + + public void testSupportsNonDefaultParameterValues() throws IOException { + Settings settings = Settings.builder().put(SourceFieldMapper.LOSSY_PARAMETERS_ALLOWED_SETTING_NAME, false).build(); + Exception e = expectThrows( + MapperParsingException.class, + () -> createMapperService(settings, topMapping(b -> b.startObject("_source").field("enabled", false).endObject())) + .documentMapper() + .sourceMapper() + ); + assertThat(e.getMessage(), containsString("Parameter [enabled] is not allowed in source")); + + e = expectThrows( + MapperParsingException.class, + () -> createMapperService(settings, topMapping(b -> b.startObject("_source").array("includes", "foo").endObject())) + .documentMapper() + .sourceMapper() + ); + assertThat(e.getMessage(), containsString("Parameter [includes] is not allowed in source")); + + e = expectThrows( + MapperParsingException.class, + () -> createMapperService(settings, topMapping(b -> b.startObject("_source").array("excludes", "foo").endObject())) + .documentMapper() + .sourceMapper() + ); + assertThat(e.getMessage(), containsString("Parameter [excludes] is not allowed in source")); + + e = expectThrows( + MapperParsingException.class, + () -> createMapperService(settings, topMapping(b -> b.startObject("_source").field("mode", "disabled").endObject())) + .documentMapper() + .sourceMapper() + ); + assertThat(e.getMessage(), containsString("Parameter [mode=disabled] is not allowed in source")); + + e = expectThrows( + MapperParsingException.class, + () -> createMapperService( + settings, + topMapping( + b -> b.startObject("_source").field("enabled", false).array("includes", "foo").array("excludes", "foo").endObject() + ) + ).documentMapper().sourceMapper() + ); + assertThat(e.getMessage(), containsString("Parameters [enabled,includes,excludes] are not allowed in source")); + } } diff --git a/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java b/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java index 2f31bac135716..3085ff89603ce 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java @@ -382,7 +382,7 @@ public void testSearchRequestRuntimeFieldsAndMultifieldDetection() { public void testSyntheticSourceSearchLookup() throws IOException { // Build a mapping using synthetic source - SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null).setSynthetic().build(); + SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY).setSynthetic().build(); RootObjectMapper root = new RootObjectMapper.Builder("_doc", Explicit.IMPLICIT_TRUE).add( new KeywordFieldMapper.Builder("cat", IndexVersion.current()).ignoreAbove(100) ).build(MapperBuilderContext.root(true, false)); From ddb1b7463fa8cf773ab403895f05b9776992e256 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Fri, 5 Apr 2024 10:27:54 +0200 Subject: [PATCH 044/173] Small adjustments to metadata fields fetching (#107071) While looking at #106325, which moves fetching of metadata fields out of the StoredFieldsPhase, I noticed some small adjustments that we can make to FieldsVisitor, CustomFieldsVisitor and StoredFieldsPhase. These are not functional changes, the only goal is to make things simpler and clearer, hopefully. - add test coverage for situation where _routing is provided with docs, hence returned by default make a stronger connection between CustomFieldsVisitor and FieldsVisitor around fields that are treated differently (_ignored, _routing, _id and _source) - explicitly exclude _id from StoredFieldsPhase like we already do for _source as it's retrieved separately - move the _source exclusion in StoredFieldsPhase to after calling getMatchingFieldNames, so that patterns that match _source still exclude it --- .../search/source/MetadataFetchingIT.java | 40 +++++++++++++++++++ .../fieldvisitor/CustomFieldsVisitor.java | 17 ++++---- .../index/fieldvisitor/FieldsVisitor.java | 26 +++++------- .../fetch/subphase/StoredFieldsPhase.java | 22 +++++----- 4 files changed, 71 insertions(+), 34 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java index 7d902cf140839..b8d1d45a6f85d 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java @@ -8,6 +8,7 @@ package org.elasticsearch.search.source; import org.apache.lucene.search.join.ScoreMode; +import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.common.ValidationException; import org.elasticsearch.index.query.InnerHitBuilder; import org.elasticsearch.index.query.NestedQueryBuilder; @@ -81,6 +82,11 @@ public void testWithRouting() { prepareIndex("test").setId("1").setSource("field", "value").setRouting("toto").get(); refresh(); + assertResponse(prepareSearch("test"), response -> { + assertThat(response.getHits().getAt(0).getId(), notNullValue()); + assertThat(response.getHits().getAt(0).field("_routing"), notNullValue()); + assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); + }); assertResponse(prepareSearch("test").storedFields("_none_").setFetchSource(false), response -> { assertThat(response.getHits().getAt(0).getId(), nullValue()); assertThat(response.getHits().getAt(0).field("_routing"), nullValue()); @@ -90,6 +96,40 @@ public void testWithRouting() { assertThat(response.getHits().getAt(0).getId(), nullValue()); assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); }); + + GetResponse getResponse = client().prepareGet("test", "1").setRouting("toto").get(); + assertTrue(getResponse.isExists()); + assertEquals("toto", getResponse.getFields().get("_routing").getValue()); + } + + public void testWithIgnored() { + assertAcked(prepareCreate("test").setMapping("ip", "type=ip,ignore_malformed=true")); + ensureGreen(); + + prepareIndex("test").setId("1").setSource("ip", "value").get(); + refresh(); + + assertResponse(prepareSearch("test"), response -> { + assertThat(response.getHits().getAt(0).getId(), notNullValue()); + assertThat(response.getHits().getAt(0).field("_ignored").getValue(), equalTo("ip")); + assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); + }); + assertResponse(prepareSearch("test").storedFields("_none_"), response -> { + assertThat(response.getHits().getAt(0).getId(), nullValue()); + assertThat(response.getHits().getAt(0).field("_ignored"), nullValue()); + assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); + }); + + { + GetResponse getResponse = client().prepareGet("test", "1").get(); + assertTrue(getResponse.isExists()); + assertThat(getResponse.getField("_ignored"), nullValue()); + } + { + GetResponse getResponse = client().prepareGet("test", "1").setStoredFields("_ignored").get(); + assertTrue(getResponse.isExists()); + assertEquals("ip", getResponse.getField("_ignored").getValue()); + } } public void testInvalid() { diff --git a/server/src/main/java/org/elasticsearch/index/fieldvisitor/CustomFieldsVisitor.java b/server/src/main/java/org/elasticsearch/index/fieldvisitor/CustomFieldsVisitor.java index d7f6e3541838b..92f74615711f1 100644 --- a/server/src/main/java/org/elasticsearch/index/fieldvisitor/CustomFieldsVisitor.java +++ b/server/src/main/java/org/elasticsearch/index/fieldvisitor/CustomFieldsVisitor.java @@ -8,26 +8,27 @@ package org.elasticsearch.index.fieldvisitor; import org.apache.lucene.index.FieldInfo; +import org.elasticsearch.index.mapper.IgnoredFieldMapper; import java.util.HashSet; -import java.util.List; import java.util.Set; /** - * A field visitor that allows to load a selection of the stored fields by exact name - * {@code _id} and {@code _routing} fields are always loaded. + * A field visitor that allows to load a selection of the stored fields by exact name. + * {@code _id}, {@code _routing}, and {@code _ignored} fields are always loaded. + * {@code _source} is always loaded unless disabled explicitly. */ public class CustomFieldsVisitor extends FieldsVisitor { - private final Set fields; public CustomFieldsVisitor(Set fields, boolean loadSource) { super(loadSource); this.fields = new HashSet<>(fields); - // metadata fields are already handled by FieldsVisitor, so removing - // them here means that if the only fields requested are metadata - // fields then we can shortcut loading - List.of("_id", "_routing", "_source").forEach(this.fields::remove); + // metadata fields that are always retrieved are already handled by FieldsVisitor, so removing + // them here means that if the only fields requested are those metadata fields then we can shortcut loading + FieldsVisitor.BASE_REQUIRED_FIELDS.forEach(this.fields::remove); + this.fields.remove(this.sourceFieldName); + this.fields.remove(IgnoredFieldMapper.NAME); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/fieldvisitor/FieldsVisitor.java b/server/src/main/java/org/elasticsearch/index/fieldvisitor/FieldsVisitor.java index 4789dcc131b89..bf4ad150b1ee4 100644 --- a/server/src/main/java/org/elasticsearch/index/fieldvisitor/FieldsVisitor.java +++ b/server/src/main/java/org/elasticsearch/index/fieldvisitor/FieldsVisitor.java @@ -34,10 +34,10 @@ * Base {@link StoredFieldVisitor} that retrieves all non-redundant metadata. */ public class FieldsVisitor extends FieldNamesProvidingStoredFieldsVisitor { - private static final Set BASE_REQUIRED_FIELDS = Set.of(IdFieldMapper.NAME, RoutingFieldMapper.NAME); + static final Set BASE_REQUIRED_FIELDS = Set.of(IdFieldMapper.NAME, RoutingFieldMapper.NAME); private final boolean loadSource; - private final String sourceFieldName; + final String sourceFieldName; private final Set requiredFields; protected BytesReference source; protected String id; @@ -63,6 +63,7 @@ public Status needsField(FieldInfo fieldInfo) { // Always load _ignored to be explicit about ignored fields // This works because _ignored is added as the first metadata mapper, // so its stored fields always appear first in the list. + // Note that _ignored is also multi-valued, which is why it can't be removed from the set like other fields if (IgnoredFieldMapper.NAME.equals(fieldInfo.name)) { return Status.YES; } @@ -72,8 +73,7 @@ public Status needsField(FieldInfo fieldInfo) { return Status.YES; } } - // All these fields are single-valued so we can stop when the set is - // empty + // All these fields are single-valued so we can stop when the set is empty return requiredFields.isEmpty() ? Status.STOP : Status.NO; } @@ -100,7 +100,7 @@ public void binaryField(FieldInfo fieldInfo, byte[] value) { binaryField(fieldInfo, new BytesRef(value)); } - public void binaryField(FieldInfo fieldInfo, BytesRef value) { + private void binaryField(FieldInfo fieldInfo, BytesRef value) { if (sourceFieldName.equals(fieldInfo.name)) { source = new BytesArray(value); } else if (IdFieldMapper.NAME.equals(fieldInfo.name)) { @@ -147,12 +147,6 @@ public void doubleField(FieldInfo fieldInfo, double value) { addValue(fieldInfo.name, value); } - public void objectField(FieldInfo fieldInfo, Object object) { - assert IdFieldMapper.NAME.equals(fieldInfo.name) == false : "_id field must go through binaryField"; - assert sourceFieldName.equals(fieldInfo.name) == false : "source field must go through binaryField"; - addValue(fieldInfo.name, object); - } - public BytesReference source() { return source; } @@ -178,7 +172,9 @@ public Map> fields() { } public void reset() { - if (fieldsValues != null) fieldsValues.clear(); + if (fieldsValues != null) { + fieldsValues.clear(); + } source = null; id = null; @@ -193,11 +189,7 @@ void addValue(String name, Object value) { fieldsValues = new HashMap<>(); } - List values = fieldsValues.get(name); - if (values == null) { - values = new ArrayList<>(2); - fieldsValues.put(name, values); - } + List values = fieldsValues.computeIfAbsent(name, k -> new ArrayList<>(2)); values.add(value); } } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/StoredFieldsPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/StoredFieldsPhase.java index d6950df962433..483285dba1fa7 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/StoredFieldsPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/StoredFieldsPhase.java @@ -10,6 +10,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.elasticsearch.common.document.DocumentField; +import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.IgnoredFieldMapper; import org.elasticsearch.index.mapper.LegacyTypeFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; @@ -73,16 +74,19 @@ public FetchSubPhaseProcessor getProcessor(FetchContext fetchContext) { if (storedFieldsContext.fieldNames() != null) { SearchExecutionContext sec = fetchContext.getSearchExecutionContext(); for (String field : storedFieldsContext.fieldNames()) { - if (SourceFieldMapper.NAME.equals(field) == false) { - Collection fieldNames = sec.getMatchingFieldNames(field); - for (String fieldName : fieldNames) { - MappedFieldType ft = sec.getFieldType(fieldName); - if (ft.isStored() == false) { - continue; - } - storedFields.add(new StoredField(fieldName, ft, sec.isMetadataField(ft.name()))); - fieldsToLoad.add(ft.name()); + Collection fieldNames = sec.getMatchingFieldNames(field); + for (String fieldName : fieldNames) { + // _id and _source are always retrieved anyway, no need to do it explicitly. See FieldsVisitor. + // They are not returned as part of HitContext#loadedFields hence they are not added to documents by this sub-phase + if (IdFieldMapper.NAME.equals(field) || SourceFieldMapper.NAME.equals(field)) { + continue; + } + MappedFieldType ft = sec.getFieldType(fieldName); + if (ft.isStored() == false) { + continue; } + storedFields.add(new StoredField(fieldName, ft, sec.isMetadataField(ft.name()))); + fieldsToLoad.add(ft.name()); } } } From 3f2070896543012a7ffbd6c83788699dd19fbd60 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Fri, 5 Apr 2024 10:51:53 +0200 Subject: [PATCH 045/173] Remove unused NamedWriteableRegistry from a search REST actions (#107126) We don't need `NamedWriteableRegistry`to parse search requests any longer, this was an unused parameter. Removing it from search request parsing allows for removing it as a dependency from a number of places. --- .../script/mustache/MustachePlugin.java | 2 +- .../mustache/RestSearchTemplateAction.java | 6 +--- .../RestSearchTemplateActionTests.java | 3 +- .../AbstractBaseReindexRestHandler.java | 14 +++------ .../AbstractBulkByQueryRestHandler.java | 11 +------ .../elasticsearch/reindex/ReindexPlugin.java | 6 ++-- .../reindex/RestDeleteByQueryAction.java | 11 +++---- .../reindex/RestReindexAction.java | 9 ++---- .../reindex/RestUpdateByQueryAction.java | 11 +++---- .../reindex/RestDeleteByQueryActionTests.java | 3 +- .../reindex/RestReindexActionTests.java | 14 +++------ .../reindex/RestUpdateByQueryActionTests.java | 3 +- .../elasticsearch/action/ActionModule.java | 6 ++-- .../action/search/RestMultiSearchAction.java | 29 ++----------------- .../rest/action/search/RestSearchAction.java | 27 +++-------------- .../search/MultiSearchRequestTests.java | 7 ++--- .../search/RestMultiSearchActionTests.java | 10 +------ .../action/search/RestSearchActionTests.java | 3 +- .../xpack/search/AsyncSearch.java | 2 +- .../search/RestSubmitAsyncSearchAction.java | 19 ++---------- .../RestSubmitAsyncSearchActionTests.java | 10 +------ .../org/elasticsearch/xpack/fleet/Fleet.java | 2 +- .../rest/RestFleetMultiSearchAction.java | 1 - .../fleet/rest/RestFleetSearchAction.java | 19 ++---------- .../elasticsearch/xpack/rollup/Rollup.java | 2 +- .../rollup/rest/RestRollupSearchAction.java | 6 +--- 26 files changed, 50 insertions(+), 186 deletions(-) diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java index c698a603055ad..8c1a410ee8a66 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java @@ -67,7 +67,7 @@ public List getRestHandlers( Predicate clusterSupportsFeature ) { return Arrays.asList( - new RestSearchTemplateAction(namedWriteableRegistry, clusterSupportsFeature), + new RestSearchTemplateAction(clusterSupportsFeature), new RestMultiSearchTemplateAction(settings), new RestRenderSearchTemplateAction() ); diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java index a29c10b7501f1..ab1f90bc59933 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.rest.BaseRestHandler; @@ -36,11 +35,9 @@ public class RestSearchTemplateAction extends BaseRestHandler { private static final Set RESPONSE_PARAMS = Set.of(TYPED_KEYS_PARAM, RestSearchAction.TOTAL_HITS_AS_INT_PARAM); - private final NamedWriteableRegistry namedWriteableRegistry; private final Predicate clusterSupportsFeature; - public RestSearchTemplateAction(NamedWriteableRegistry namedWriteableRegistry, Predicate clusterSupportsFeature) { - this.namedWriteableRegistry = namedWriteableRegistry; + public RestSearchTemplateAction(Predicate clusterSupportsFeature) { this.clusterSupportsFeature = clusterSupportsFeature; } @@ -73,7 +70,6 @@ public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client searchRequest, request, null, - namedWriteableRegistry, clusterSupportsFeature, size -> searchRequest.source().size(size) ); diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestSearchTemplateActionTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestSearchTemplateActionTests.java index 1efa0ada221ef..1f1955e5ca171 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestSearchTemplateActionTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestSearchTemplateActionTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.script.mustache; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.search.RestSearchAction; @@ -28,7 +27,7 @@ public final class RestSearchTemplateActionTests extends RestActionTestCase { @Before public void setUpAction() { - controller().registerHandler(new RestSearchTemplateAction(mock(NamedWriteableRegistry.class), nf -> false)); + controller().registerHandler(new RestSearchTemplateAction(nf -> false)); verifyingClient.setExecuteVerifier((actionType, request) -> mock(SearchTemplateResponse.class)); verifyingClient.setExecuteLocallyVerifier((actionType, request) -> mock(SearchTemplateResponse.class)); } diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractBaseReindexRestHandler.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractBaseReindexRestHandler.java index 48c50450656f3..6643e2e9d20ea 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractBaseReindexRestHandler.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractBaseReindexRestHandler.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.index.reindex.AbstractBulkByScrollRequest; import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.index.reindex.BulkByScrollTask; @@ -39,15 +38,10 @@ protected AbstractBaseReindexRestHandler(A action) { this.action = action; } - protected RestChannelConsumer doPrepareRequest( - RestRequest request, - NamedWriteableRegistry namedWriteableRegistry, - NodeClient client, - boolean includeCreated, - boolean includeUpdated - ) throws IOException { + protected RestChannelConsumer doPrepareRequest(RestRequest request, NodeClient client, boolean includeCreated, boolean includeUpdated) + throws IOException { // Build the internal request - Request internal = setCommonOptions(request, buildRequest(request, namedWriteableRegistry)); + Request internal = setCommonOptions(request, buildRequest(request)); // Executes the request and waits for completion if (request.paramAsBoolean("wait_for_completion", true)) { @@ -78,7 +72,7 @@ protected RestChannelConsumer doPrepareRequest( /** * Build the Request based on the RestRequest. */ - protected abstract Request buildRequest(RestRequest request, NamedWriteableRegistry namedWriteableRegistry) throws IOException; + protected abstract Request buildRequest(RestRequest request) throws IOException; /** * Sets common options of {@link AbstractBulkByScrollRequest} requests. diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractBulkByQueryRestHandler.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractBulkByQueryRestHandler.java index cb0d09f1f2450..8cf7d2200ad36 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractBulkByQueryRestHandler.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractBulkByQueryRestHandler.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.ActionType; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.RestApiVersion; @@ -45,7 +44,6 @@ protected AbstractBulkByQueryRestHandler(A action) { protected void parseInternalRequest( Request internal, RestRequest restRequest, - NamedWriteableRegistry namedWriteableRegistry, Predicate clusterSupportsFeature, Map> bodyConsumers ) throws IOException { @@ -58,14 +56,7 @@ protected void parseInternalRequest( IntConsumer sizeConsumer = restRequest.getRestApiVersion() == RestApiVersion.V_7 ? size -> setMaxDocsFromSearchSize(internal, size) : size -> failOnSizeSpecified(); - RestSearchAction.parseSearchRequest( - searchRequest, - restRequest, - parser, - namedWriteableRegistry, - clusterSupportsFeature, - sizeConsumer - ); + RestSearchAction.parseSearchRequest(searchRequest, restRequest, parser, clusterSupportsFeature, sizeConsumer); } searchRequest.source().size(restRequest.paramAsInt("scroll_size", searchRequest.source().size())); diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexPlugin.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexPlugin.java index 8cdfc77db6f7f..1a40f77250e5f 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexPlugin.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexPlugin.java @@ -76,9 +76,9 @@ public List getRestHandlers( Predicate clusterSupportsFeature ) { return Arrays.asList( - new RestReindexAction(namedWriteableRegistry, clusterSupportsFeature), - new RestUpdateByQueryAction(namedWriteableRegistry, clusterSupportsFeature), - new RestDeleteByQueryAction(namedWriteableRegistry, clusterSupportsFeature), + new RestReindexAction(clusterSupportsFeature), + new RestUpdateByQueryAction(clusterSupportsFeature), + new RestDeleteByQueryAction(clusterSupportsFeature), new RestRethrottleAction(nodesInCluster) ); } diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/RestDeleteByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/RestDeleteByQueryAction.java index cc98dc06575b8..ff0ef1282b2d9 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/RestDeleteByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/RestDeleteByQueryAction.java @@ -9,7 +9,6 @@ package org.elasticsearch.reindex; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.reindex.DeleteByQueryAction; @@ -31,12 +30,10 @@ @ServerlessScope(Scope.PUBLIC) public class RestDeleteByQueryAction extends AbstractBulkByQueryRestHandler { - private final NamedWriteableRegistry namedWriteableRegistry; private final Predicate clusterSupportsFeature; - public RestDeleteByQueryAction(NamedWriteableRegistry namedWriteableRegistry, Predicate clusterSupportsFeature) { + public RestDeleteByQueryAction(Predicate clusterSupportsFeature) { super(DeleteByQueryAction.INSTANCE); - this.namedWriteableRegistry = namedWriteableRegistry; this.clusterSupportsFeature = clusterSupportsFeature; } @@ -58,11 +55,11 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { - return doPrepareRequest(request, namedWriteableRegistry, client, false, false); + return doPrepareRequest(request, client, false, false); } @Override - protected DeleteByQueryRequest buildRequest(RestRequest request, NamedWriteableRegistry namedWriteableRegistry) throws IOException { + protected DeleteByQueryRequest buildRequest(RestRequest request) throws IOException { /* * Passing the search request through DeleteByQueryRequest first allows * it to set its own defaults which differ from SearchRequest's @@ -74,7 +71,7 @@ protected DeleteByQueryRequest buildRequest(RestRequest request, NamedWriteableR consumers.put("conflicts", o -> internal.setConflicts((String) o)); consumers.put("max_docs", s -> setMaxDocsValidateIdentical(internal, ((Number) s).intValue())); - parseInternalRequest(internal, request, namedWriteableRegistry, clusterSupportsFeature, consumers); + parseInternalRequest(internal, request, clusterSupportsFeature, consumers); return internal; } diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/RestReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/RestReindexAction.java index 253fd581cfceb..a693b0babaa9f 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/RestReindexAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/RestReindexAction.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.reindex.ReindexAction; import org.elasticsearch.index.reindex.ReindexRequest; @@ -34,12 +33,10 @@ @ServerlessScope(Scope.PUBLIC) public class RestReindexAction extends AbstractBaseReindexRestHandler implements RestRequestFilter { - private final NamedWriteableRegistry namedWriteableRegistry; private final Predicate clusterSupportsFeature; - public RestReindexAction(NamedWriteableRegistry namedWriteableRegistry, Predicate clusterSupportsFeature) { + public RestReindexAction(Predicate clusterSupportsFeature) { super(ReindexAction.INSTANCE); - this.namedWriteableRegistry = namedWriteableRegistry; this.clusterSupportsFeature = clusterSupportsFeature; } @@ -55,11 +52,11 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { - return doPrepareRequest(request, namedWriteableRegistry, client, true, true); + return doPrepareRequest(request, client, true, true); } @Override - protected ReindexRequest buildRequest(RestRequest request, NamedWriteableRegistry namedWriteableRegistry) throws IOException { + protected ReindexRequest buildRequest(RestRequest request) throws IOException { if (request.hasParam("pipeline")) { throw new IllegalArgumentException( "_reindex doesn't support [pipeline] as a query parameter. Specify it in the [dest] object instead." diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/RestUpdateByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/RestUpdateByQueryAction.java index 50536a164727a..2a6146b9fad1c 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/RestUpdateByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/RestUpdateByQueryAction.java @@ -9,7 +9,6 @@ package org.elasticsearch.reindex; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.reindex.UpdateByQueryAction; @@ -32,12 +31,10 @@ @ServerlessScope(Scope.PUBLIC) public class RestUpdateByQueryAction extends AbstractBulkByQueryRestHandler { - private final NamedWriteableRegistry namedWriteableRegistry; private final Predicate clusterSupportsFeature; - public RestUpdateByQueryAction(NamedWriteableRegistry namedWriteableRegistry, Predicate clusterSupportsFeature) { + public RestUpdateByQueryAction(Predicate clusterSupportsFeature) { super(UpdateByQueryAction.INSTANCE); - this.namedWriteableRegistry = namedWriteableRegistry; this.clusterSupportsFeature = clusterSupportsFeature; } @@ -58,11 +55,11 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { - return doPrepareRequest(request, namedWriteableRegistry, client, false, true); + return doPrepareRequest(request, client, false, true); } @Override - protected UpdateByQueryRequest buildRequest(RestRequest request, NamedWriteableRegistry namedWriteableRegistry) throws IOException { + protected UpdateByQueryRequest buildRequest(RestRequest request) throws IOException { if (request.getRestApiVersion() == RestApiVersion.V_7 && request.hasParam("type")) { request.param("type"); } @@ -78,7 +75,7 @@ protected UpdateByQueryRequest buildRequest(RestRequest request, NamedWriteableR consumers.put("script", o -> internal.setScript(Script.parse(o))); consumers.put("max_docs", s -> setMaxDocsValidateIdentical(internal, ((Number) s).intValue())); - parseInternalRequest(internal, request, namedWriteableRegistry, clusterSupportsFeature, consumers); + parseInternalRequest(internal, request, clusterSupportsFeature, consumers); internal.setPipeline(request.param("pipeline")); return internal; diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/RestDeleteByQueryActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/RestDeleteByQueryActionTests.java index aa457fae9e377..013eace19f1b5 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/RestDeleteByQueryActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/RestDeleteByQueryActionTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.reindex; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.rest.RestRequest; @@ -31,7 +30,7 @@ public final class RestDeleteByQueryActionTests extends RestActionTestCase { @Before public void setUpAction() { - controller().registerHandler(new RestDeleteByQueryAction(mock(NamedWriteableRegistry.class), nf -> false)); + controller().registerHandler(new RestDeleteByQueryAction(nf -> false)); verifyingClient.setExecuteVerifier((actionType, request) -> mock(BulkByScrollResponse.class)); verifyingClient.setExecuteLocallyVerifier((actionType, request) -> mock(BulkByScrollResponse.class)); } diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/RestReindexActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/RestReindexActionTests.java index ddb8c2ce0225d..2e1810482bb5f 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/RestReindexActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/RestReindexActionTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.index.reindex.AbstractBulkByScrollRequest; import org.elasticsearch.index.reindex.ReindexRequest; import org.elasticsearch.test.rest.FakeRestRequest; @@ -21,10 +20,8 @@ import org.junit.Before; import java.io.IOException; -import java.util.Collections; import static java.util.Collections.singletonMap; -import static org.mockito.Mockito.mock; public class RestReindexActionTests extends RestActionTestCase { @@ -32,7 +29,7 @@ public class RestReindexActionTests extends RestActionTestCase { @Before public void setUpAction() { - action = new RestReindexAction(mock(NamedWriteableRegistry.class), nf -> false); + action = new RestReindexAction(nf -> false); controller().registerHandler(action); } @@ -56,10 +53,7 @@ public void testPipelineQueryParameterIsError() throws IOException { request.withContent(BytesReference.bytes(body), body.contentType()); } request.withParams(singletonMap("pipeline", "doesn't matter")); - Exception e = expectThrows( - IllegalArgumentException.class, - () -> action.buildRequest(request.build(), new NamedWriteableRegistry(Collections.emptyList())) - ); + Exception e = expectThrows(IllegalArgumentException.class, () -> action.buildRequest(request.build())); assertEquals("_reindex doesn't support [pipeline] as a query parameter. Specify it in the [dest] object instead.", e.getMessage()); } @@ -68,14 +62,14 @@ public void testSetScrollTimeout() throws IOException { { FakeRestRequest.Builder requestBuilder = new FakeRestRequest.Builder(xContentRegistry()); requestBuilder.withContent(new BytesArray("{}"), XContentType.JSON); - ReindexRequest request = action.buildRequest(requestBuilder.build(), new NamedWriteableRegistry(Collections.emptyList())); + ReindexRequest request = action.buildRequest(requestBuilder.build()); assertEquals(AbstractBulkByScrollRequest.DEFAULT_SCROLL_TIMEOUT, request.getScrollTime()); } { FakeRestRequest.Builder requestBuilder = new FakeRestRequest.Builder(xContentRegistry()); requestBuilder.withParams(singletonMap("scroll", "10m")); requestBuilder.withContent(new BytesArray("{}"), XContentType.JSON); - ReindexRequest request = action.buildRequest(requestBuilder.build(), new NamedWriteableRegistry(Collections.emptyList())); + ReindexRequest request = action.buildRequest(requestBuilder.build()); assertEquals("10m", request.getScrollTime().toString()); } } diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/RestUpdateByQueryActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/RestUpdateByQueryActionTests.java index a3f468df89e1e..b83f11a91d1b8 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/RestUpdateByQueryActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/RestUpdateByQueryActionTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.reindex; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.rest.RestRequest; @@ -31,7 +30,7 @@ public final class RestUpdateByQueryActionTests extends RestActionTestCase { @Before public void setUpAction() { - controller().registerHandler(new RestUpdateByQueryAction(mock(NamedWriteableRegistry.class), nf -> false)); + controller().registerHandler(new RestUpdateByQueryAction(nf -> false)); verifyingClient.setExecuteVerifier((actionType, request) -> mock(BulkByScrollResponse.class)); verifyingClient.setExecuteLocallyVerifier((actionType, request) -> mock(BulkByScrollResponse.class)); } diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index 7e03b495438d8..cd01184801c64 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -933,14 +933,12 @@ public void initRestHandlers(Supplier nodesInCluster, Predicate< registerHandler.accept(new RestBulkAction(settings)); registerHandler.accept(new RestUpdateAction()); - registerHandler.accept(new RestSearchAction(restController.getSearchUsageHolder(), namedWriteableRegistry, clusterSupportsFeature)); + registerHandler.accept(new RestSearchAction(restController.getSearchUsageHolder(), clusterSupportsFeature)); registerHandler.accept(new RestSearchScrollAction()); registerHandler.accept(new RestClearScrollAction()); registerHandler.accept(new RestOpenPointInTimeAction()); registerHandler.accept(new RestClosePointInTimeAction()); - registerHandler.accept( - new RestMultiSearchAction(settings, restController.getSearchUsageHolder(), namedWriteableRegistry, clusterSupportsFeature) - ); + registerHandler.accept(new RestMultiSearchAction(settings, restController.getSearchUsageHolder(), clusterSupportsFeature)); registerHandler.accept(new RestKnnSearchAction()); registerHandler.accept(new RestValidateQueryAction()); diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java index 69cc4f23f3956..0a7a4a9701a90 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java @@ -17,7 +17,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.TriFunction; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.Tuple; @@ -51,18 +50,11 @@ public class RestMultiSearchAction extends BaseRestHandler { private final boolean allowExplicitIndex; private final SearchUsageHolder searchUsageHolder; - private final NamedWriteableRegistry namedWriteableRegistry; private final Predicate clusterSupportsFeature; - public RestMultiSearchAction( - Settings settings, - SearchUsageHolder searchUsageHolder, - NamedWriteableRegistry namedWriteableRegistry, - Predicate clusterSupportsFeature - ) { + public RestMultiSearchAction(Settings settings, SearchUsageHolder searchUsageHolder, Predicate clusterSupportsFeature) { this.allowExplicitIndex = MULTI_ALLOW_EXPLICIT_INDEX.get(settings); this.searchUsageHolder = searchUsageHolder; - this.namedWriteableRegistry = namedWriteableRegistry; this.clusterSupportsFeature = clusterSupportsFeature; } @@ -85,13 +77,7 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - final MultiSearchRequest multiSearchRequest = parseRequest( - request, - namedWriteableRegistry, - allowExplicitIndex, - searchUsageHolder, - clusterSupportsFeature - ); + final MultiSearchRequest multiSearchRequest = parseRequest(request, allowExplicitIndex, searchUsageHolder, clusterSupportsFeature); return channel -> { final RestCancellableNodeClient cancellableClient = new RestCancellableNodeClient(client, request.getHttpChannel()); cancellableClient.execute( @@ -107,19 +93,11 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC */ public static MultiSearchRequest parseRequest( RestRequest restRequest, - NamedWriteableRegistry namedWriteableRegistry, boolean allowExplicitIndex, SearchUsageHolder searchUsageHolder, Predicate clusterSupportsFeature ) throws IOException { - return parseRequest( - restRequest, - namedWriteableRegistry, - allowExplicitIndex, - searchUsageHolder, - clusterSupportsFeature, - (k, v, r) -> false - ); + return parseRequest(restRequest, allowExplicitIndex, searchUsageHolder, clusterSupportsFeature, (k, v, r) -> false); } /** @@ -128,7 +106,6 @@ public static MultiSearchRequest parseRequest( */ public static MultiSearchRequest parseRequest( RestRequest restRequest, - NamedWriteableRegistry namedWriteableRegistry, boolean allowExplicitIndex, SearchUsageHolder searchUsageHolder, Predicate clusterSupportsFeature, diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java index cfb70da9fb454..3dbb98f7a7685 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java @@ -16,7 +16,6 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.core.Booleans; import org.elasticsearch.core.Nullable; @@ -71,16 +70,10 @@ public class RestSearchAction extends BaseRestHandler { public static final Set RESPONSE_PARAMS = Set.of(TYPED_KEYS_PARAM, TOTAL_HITS_AS_INT_PARAM, INCLUDE_NAMED_QUERIES_SCORE_PARAM); private final SearchUsageHolder searchUsageHolder; - private final NamedWriteableRegistry namedWriteableRegistry; private final Predicate clusterSupportsFeature; - public RestSearchAction( - SearchUsageHolder searchUsageHolder, - NamedWriteableRegistry namedWriteableRegistry, - Predicate clusterSupportsFeature - ) { + public RestSearchAction(SearchUsageHolder searchUsageHolder, Predicate clusterSupportsFeature) { this.searchUsageHolder = searchUsageHolder; - this.namedWriteableRegistry = namedWriteableRegistry; this.clusterSupportsFeature = clusterSupportsFeature; } @@ -124,15 +117,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC */ IntConsumer setSize = size -> searchRequest.source().size(size); request.withContentOrSourceParamParserOrNull( - parser -> parseSearchRequest( - searchRequest, - request, - parser, - namedWriteableRegistry, - clusterSupportsFeature, - setSize, - searchUsageHolder - ) + parser -> parseSearchRequest(searchRequest, request, parser, clusterSupportsFeature, setSize, searchUsageHolder) ); return channel -> { @@ -148,7 +133,6 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC * @param request the rest request to read from * @param requestContentParser body of the request to read. This method does not attempt to read the body from the {@code request} * parameter - * @param namedWriteableRegistry the registry of named writeables * @param clusterSupportsFeature used to check if certain features are available in this cluster * @param setSize how the size url parameter is handled. {@code udpate_by_query} and regular search differ here. */ @@ -156,11 +140,10 @@ public static void parseSearchRequest( SearchRequest searchRequest, RestRequest request, XContentParser requestContentParser, - NamedWriteableRegistry namedWriteableRegistry, Predicate clusterSupportsFeature, IntConsumer setSize ) throws IOException { - parseSearchRequest(searchRequest, request, requestContentParser, namedWriteableRegistry, clusterSupportsFeature, setSize, null); + parseSearchRequest(searchRequest, request, requestContentParser, clusterSupportsFeature, setSize, null); } /** @@ -170,8 +153,7 @@ public static void parseSearchRequest( * @param request the rest request to read from * @param requestContentParser body of the request to read. This method does not attempt to read the body from the {@code request} * parameter, will be null when there is no request body to parse - * @param namedWriteableRegistry the registry of named writeables - @param clusterSupportsFeature used to check if certain features are available in this cluster + * @param clusterSupportsFeature used to check if certain features are available in this cluster * @param setSize how the size url parameter is handled. {@code udpate_by_query} and regular search differ here. * @param searchUsageHolder the holder of search usage stats */ @@ -179,7 +161,6 @@ public static void parseSearchRequest( SearchRequest searchRequest, RestRequest request, @Nullable XContentParser requestContentParser, - NamedWriteableRegistry namedWriteableRegistry, Predicate clusterSupportsFeature, IntConsumer setSize, @Nullable SearchUsageHolder searchUsageHolder diff --git a/server/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java b/server/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java index 5f24f72d5cc8f..a45730a82dbc2 100644 --- a/server/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java @@ -97,7 +97,7 @@ public void testFailWithUnknownKey() { ).build(); IllegalArgumentException ex = expectThrows( IllegalArgumentException.class, - () -> RestMultiSearchAction.parseRequest(restRequest, null, true, new UsageService().getSearchUsageHolder(), nf -> false) + () -> RestMultiSearchAction.parseRequest(restRequest, true, new UsageService().getSearchUsageHolder(), nf -> false) ); assertEquals("key [unknown_key] is not supported in the metadata section", ex.getMessage()); } @@ -113,7 +113,6 @@ public void testSimpleAddWithCarriageReturn() throws Exception { ).build(); MultiSearchRequest request = RestMultiSearchAction.parseRequest( restRequest, - null, true, new UsageService().getSearchUsageHolder(), nf -> false @@ -137,7 +136,6 @@ public void testDefaultIndicesOptions() throws IOException { ).withParams(Collections.singletonMap("ignore_unavailable", "true")).build(); MultiSearchRequest request = RestMultiSearchAction.parseRequest( restRequest, - null, true, new UsageService().getSearchUsageHolder(), nf -> false @@ -250,7 +248,7 @@ public void testMsearchTerminatedByNewline() throws Exception { ).build(); IllegalArgumentException expectThrows = expectThrows( IllegalArgumentException.class, - () -> RestMultiSearchAction.parseRequest(restRequest, null, true, new UsageService().getSearchUsageHolder(), nf -> false) + () -> RestMultiSearchAction.parseRequest(restRequest, true, new UsageService().getSearchUsageHolder(), nf -> false) ); assertEquals("The msearch request must be terminated by a newline [\n]", expectThrows.getMessage()); @@ -261,7 +259,6 @@ public void testMsearchTerminatedByNewline() throws Exception { ).build(); MultiSearchRequest msearchRequest = RestMultiSearchAction.parseRequest( restRequestWithNewLine, - null, true, new UsageService().getSearchUsageHolder(), nf -> false diff --git a/server/src/test/java/org/elasticsearch/rest/action/search/RestMultiSearchActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/search/RestMultiSearchActionTests.java index f2a11336c7f4b..15e1d479ddf9a 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/search/RestMultiSearchActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/search/RestMultiSearchActionTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.search.MultiSearchResponse; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.RestRequest; @@ -30,16 +29,9 @@ public final class RestMultiSearchActionTests extends RestActionTestCase { final List contentTypeHeader = Collections.singletonList(compatibleMediaType(XContentType.VND_JSON, RestApiVersion.V_7)); - private RestMultiSearchAction action; - @Before public void setUpAction() { - action = new RestMultiSearchAction( - Settings.EMPTY, - new UsageService().getSearchUsageHolder(), - mock(NamedWriteableRegistry.class), - nf -> false - ); + RestMultiSearchAction action = new RestMultiSearchAction(Settings.EMPTY, new UsageService().getSearchUsageHolder(), nf -> false); controller().registerHandler(action); verifyingClient.setExecuteVerifier((actionType, request) -> mock(MultiSearchResponse.class)); verifyingClient.setExecuteLocallyVerifier((actionType, request) -> mock(MultiSearchResponse.class)); diff --git a/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java index 5f641ef8fd84f..77cc94c44e151 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.search.builder.SearchSourceBuilder; @@ -37,7 +36,7 @@ public final class RestSearchActionTests extends RestActionTestCase { @Before public void setUpAction() { - action = new RestSearchAction(new UsageService().getSearchUsageHolder(), mock(NamedWriteableRegistry.class), nf -> false); + action = new RestSearchAction(new UsageService().getSearchUsageHolder(), nf -> false); controller().registerHandler(action); verifyingClient.setExecuteVerifier((actionType, request) -> mock(SearchResponse.class)); verifyingClient.setExecuteLocallyVerifier((actionType, request) -> mock(SearchResponse.class)); diff --git a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearch.java b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearch.java index c551312f68c0b..b719d4ca3bf82 100644 --- a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearch.java +++ b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearch.java @@ -57,7 +57,7 @@ public List getRestHandlers( Predicate clusterSupportsFeature ) { return Arrays.asList( - new RestSubmitAsyncSearchAction(restController.getSearchUsageHolder(), namedWriteableRegistry, clusterSupportsFeature), + new RestSubmitAsyncSearchAction(restController.getSearchUsageHolder(), clusterSupportsFeature), new RestGetAsyncSearchAction(), new RestGetAsyncStatusAction(), new RestDeleteAsyncSearchAction() diff --git a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchAction.java b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchAction.java index d98677d456b90..bd09d8f7740a1 100644 --- a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchAction.java +++ b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.search; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -37,16 +36,10 @@ public final class RestSubmitAsyncSearchAction extends BaseRestHandler { static final Set RESPONSE_PARAMS = Collections.singleton(TYPED_KEYS_PARAM); private final SearchUsageHolder searchUsageHolder; - private final NamedWriteableRegistry namedWriteableRegistry; private final Predicate clusterSupportsFeature; - public RestSubmitAsyncSearchAction( - SearchUsageHolder searchUsageHolder, - NamedWriteableRegistry namedWriteableRegistry, - Predicate clusterSupportsFeature - ) { + public RestSubmitAsyncSearchAction(SearchUsageHolder searchUsageHolder, Predicate clusterSupportsFeature) { this.searchUsageHolder = searchUsageHolder; - this.namedWriteableRegistry = namedWriteableRegistry; this.clusterSupportsFeature = clusterSupportsFeature; } @@ -69,15 +62,7 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli // them as supported. We rely on SubmitAsyncSearchRequest#validate to fail in case they are set. // Note that ccs_minimize_roundtrips is also set this way, which is a supported option. request.withContentOrSourceParamParserOrNull( - parser -> parseSearchRequest( - submit.getSearchRequest(), - request, - parser, - namedWriteableRegistry, - clusterSupportsFeature, - setSize, - searchUsageHolder - ) + parser -> parseSearchRequest(submit.getSearchRequest(), request, parser, clusterSupportsFeature, setSize, searchUsageHolder) ); if (request.hasParam("wait_for_completion_timeout")) { diff --git a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchActionTests.java b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchActionTests.java index fe6ed8b57d1e0..cc1d4f4e6270d 100644 --- a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchActionTests.java +++ b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchActionTests.java @@ -8,7 +8,6 @@ import org.apache.lucene.util.SetOnce; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.TimeValue; import org.elasticsearch.rest.RestRequest; @@ -27,19 +26,12 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; -import static org.mockito.Mockito.mock; public class RestSubmitAsyncSearchActionTests extends RestActionTestCase { - private RestSubmitAsyncSearchAction action; - @Before public void setUpAction() { - action = new RestSubmitAsyncSearchAction( - new UsageService().getSearchUsageHolder(), - mock(NamedWriteableRegistry.class), - nf -> false - ); + RestSubmitAsyncSearchAction action = new RestSubmitAsyncSearchAction(new UsageService().getSearchUsageHolder(), nf -> false); controller().registerHandler(action); } diff --git a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/Fleet.java b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/Fleet.java index c2e4e2aa2ca98..b16bea7c65b5b 100644 --- a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/Fleet.java +++ b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/Fleet.java @@ -367,7 +367,7 @@ public List getRestHandlers( ) { return List.of( new RestGetGlobalCheckpointsAction(), - new RestFleetSearchAction(restController.getSearchUsageHolder(), namedWriteableRegistry, clusterSupportsFeature), + new RestFleetSearchAction(restController.getSearchUsageHolder(), clusterSupportsFeature), new RestFleetMultiSearchAction(settings, restController.getSearchUsageHolder(), namedWriteableRegistry, clusterSupportsFeature), new RestGetSecretsAction(), new RestPostSecretsAction(), diff --git a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetMultiSearchAction.java b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetMultiSearchAction.java index 28cc7c5172631..530b64729a5d1 100644 --- a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetMultiSearchAction.java +++ b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetMultiSearchAction.java @@ -77,7 +77,6 @@ public List routes() { protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { final MultiSearchRequest multiSearchRequest = RestMultiSearchAction.parseRequest( request, - namedWriteableRegistry, allowExplicitIndex, searchUsageHolder, clusterSupportsFeature, diff --git a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetSearchAction.java b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetSearchAction.java index e1281f4f20a4c..a6c369734f0e3 100644 --- a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetSearchAction.java +++ b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetSearchAction.java @@ -12,7 +12,6 @@ import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.core.TimeValue; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.rest.BaseRestHandler; @@ -39,16 +38,10 @@ public class RestFleetSearchAction extends BaseRestHandler { private final SearchUsageHolder searchUsageHolder; - private final NamedWriteableRegistry namedWriteableRegistry; private final Predicate clusterSupportsFeature; - public RestFleetSearchAction( - SearchUsageHolder searchUsageHolder, - NamedWriteableRegistry namedWriteableRegistry, - Predicate clusterSupportsFeature - ) { + public RestFleetSearchAction(SearchUsageHolder searchUsageHolder, Predicate clusterSupportsFeature) { this.searchUsageHolder = searchUsageHolder; - this.namedWriteableRegistry = namedWriteableRegistry; this.clusterSupportsFeature = clusterSupportsFeature; } @@ -79,15 +72,7 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli IntConsumer setSize = size -> searchRequest.source().size(size); request.withContentOrSourceParamParserOrNull(parser -> { - RestSearchAction.parseSearchRequest( - searchRequest, - request, - parser, - namedWriteableRegistry, - clusterSupportsFeature, - setSize, - searchUsageHolder - ); + RestSearchAction.parseSearchRequest(searchRequest, request, parser, clusterSupportsFeature, setSize, searchUsageHolder); String[] stringWaitForCheckpoints = request.paramAsStringArray("wait_for_checkpoints", Strings.EMPTY_ARRAY); final long[] waitForCheckpoints = new long[stringWaitForCheckpoints.length]; for (int i = 0; i < stringWaitForCheckpoints.length; ++i) { diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java index 1748c1be86b78..665548c432ca0 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java @@ -97,7 +97,7 @@ public List getRestHandlers( Predicate clusterSupportsFeature ) { return Arrays.asList( - new RestRollupSearchAction(namedWriteableRegistry, clusterSupportsFeature), + new RestRollupSearchAction(clusterSupportsFeature), new RestPutRollupJobAction(), new RestStartRollupJobAction(), new RestStopRollupJobAction(), diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestRollupSearchAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestRollupSearchAction.java index 2e02f1d12fb69..a2e795d07aaf2 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestRollupSearchAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestRollupSearchAction.java @@ -8,7 +8,6 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -28,11 +27,9 @@ public class RestRollupSearchAction extends BaseRestHandler { private static final Set RESPONSE_PARAMS = Set.of(RestSearchAction.TYPED_KEYS_PARAM, RestSearchAction.TOTAL_HITS_AS_INT_PARAM); - private final NamedWriteableRegistry namedWriteableRegistry; private final Predicate clusterSupportsFeature; - public RestRollupSearchAction(NamedWriteableRegistry namedWriteableRegistry, Predicate clusterSupportsFeature) { - this.namedWriteableRegistry = namedWriteableRegistry; + public RestRollupSearchAction(Predicate clusterSupportsFeature) { this.clusterSupportsFeature = clusterSupportsFeature; } @@ -54,7 +51,6 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient searchRequest, restRequest, parser, - namedWriteableRegistry, clusterSupportsFeature, size -> searchRequest.source().size(size) ) From 18be49b327581fa2be70ac792ad441da332741c1 Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Fri, 5 Apr 2024 11:56:28 +0200 Subject: [PATCH 046/173] [Profiling] Use CancellableTask internally (#107139) With this commit we eagerly cast the task provided to our central transport action to a CancellableTask so we can simplify cancellation checks while the action is being executed. Relates #107037 --- .../TransportGetStackTracesAction.java | 26 ++++++++++--------- 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java index 0acdc7c37ce09..d7c9e61b73a3a 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java @@ -149,8 +149,10 @@ public TransportGetStackTracesAction( } @Override - protected void doExecute(Task submitTask, GetStackTracesRequest request, ActionListener submitListener) { + protected void doExecute(Task task, GetStackTracesRequest request, ActionListener submitListener) { licenseChecker.requireSupportedLicense(); + assert task instanceof CancellableTask; + final CancellableTask submitTask = (CancellableTask) task; GetStackTracesResponseBuilder responseBuilder = new GetStackTracesResponseBuilder(request); Client client = new ParentTaskAssigningClient(this.nodeClient, transportService.getLocalNode(), submitTask); if (request.isUserProvidedIndices()) { @@ -161,7 +163,7 @@ protected void doExecute(Task submitTask, GetStackTracesRequest request, ActionL } private void searchProfilingEvents( - Task submitTask, + CancellableTask submitTask, Client client, GetStackTracesRequest request, ActionListener submitListener, @@ -201,7 +203,7 @@ private void searchProfilingEvents( } private void searchGenericEvents( - Task submitTask, + CancellableTask submitTask, Client client, GetStackTracesRequest request, ActionListener submitListener, @@ -240,7 +242,7 @@ private void searchGenericEvents( } private void searchGenericEventGroupedByStackTrace( - Task submitTask, + CancellableTask submitTask, Client client, GetStackTracesRequest request, ActionListener submitListener, @@ -320,7 +322,7 @@ private void searchGenericEventGroupedByStackTrace( } private void searchEventGroupedByStackTrace( - Task submitTask, + CancellableTask submitTask, Client client, GetStackTracesRequest request, ActionListener submitListener, @@ -432,7 +434,7 @@ The same stacktraces may come from different hosts (eventually from different da } private ActionListener handleEventsGroupedByStackTrace( - Task submitTask, + CancellableTask submitTask, Client client, GetStackTracesResponseBuilder responseBuilder, ActionListener submitListener, @@ -471,12 +473,12 @@ private static long getAggValueAsLong(SearchResponse searchResponse, String fiel } private void retrieveStackTraces( - Task submitTask, + CancellableTask submitTask, Client client, GetStackTracesResponseBuilder responseBuilder, ActionListener submitListener ) { - if (submitTask instanceof CancellableTask c && c.notifyIfCancelled(submitListener)) { + if (submitTask.notifyIfCancelled(submitListener)) { return; } List eventIds = new ArrayList<>(responseBuilder.getStackTraceEvents().keySet()); @@ -554,7 +556,7 @@ static List> sliced(List c, int slices) { private class StackTraceHandler { private final AtomicInteger expectedResponses; - private final Task submitTask; + private final CancellableTask submitTask; private final ClusterState clusterState; private final Client client; private final GetStackTracesResponseBuilder responseBuilder; @@ -568,7 +570,7 @@ private class StackTraceHandler { private final Map hostMetadata; private StackTraceHandler( - Task submitTask, + CancellableTask submitTask, ClusterState clusterState, Client client, GetStackTracesResponseBuilder responseBuilder, @@ -691,7 +693,7 @@ public void mayFinish() { } private void retrieveStackTraceDetails( - Task submitTask, + CancellableTask submitTask, ClusterState clusterState, Client client, GetStackTracesResponseBuilder responseBuilder, @@ -699,7 +701,7 @@ private void retrieveStackTraceDetails( List executableIds, ActionListener submitListener ) { - if (submitTask instanceof CancellableTask c && c.notifyIfCancelled(submitListener)) { + if (submitTask.notifyIfCancelled(submitListener)) { return; } List stackFrameIndices = resolver.resolve( From ad77d32271dc77860b84ed38507b6903dde24f56 Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Fri, 5 Apr 2024 12:00:52 +0200 Subject: [PATCH 047/173] ESQL: Fix version test failure on non-SNAPSHOT builds (#107138) Test the snapshot ESQL version separately and take the current build into account. --- .../esql/action/EsqlQueryRequestTests.java | 34 ++++++++++++------- 1 file changed, 22 insertions(+), 12 deletions(-) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java index 44066ff3d091d..6ec1af033f86c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.action; +import org.elasticsearch.Build; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; @@ -158,8 +159,13 @@ public void testRejectUnknownFields() { }""", "unknown field [asdf]"); } - public void testKnownVersionIsValid() throws IOException { + public void testKnownStableVersionIsValid() throws IOException { for (EsqlVersion version : EsqlVersion.values()) { + if (version == EsqlVersion.SNAPSHOT) { + // Not stable, skip. Also avoids breaking the CI as this is invalid for non-SNAPSHOT builds. + continue; + } + String validVersionString = randomBoolean() ? version.versionStringWithoutEmoji() : version.toString(); String json = String.format(Locale.ROOT, """ @@ -209,23 +215,27 @@ public void testSnapshotVersionIsOnlyValidOnSnapshot() throws IOException { "query": "ROW x = 1" } """, esqlVersion); - EsqlQueryRequest request = parseEsqlQueryRequest(json, randomBoolean()); + + String errorOnNonSnapshotBuilds = "[version] with value [" + + esqlVersion + + "] only allowed in snapshot builds, latest available version is [" + + EsqlVersion.latestReleased().versionStringWithoutEmoji() + + "]"; + + if (Build.current().isSnapshot()) { + assertNull(request.validate()); + } else { + assertNotNull(request.validate()); + assertThat(request.validate().getMessage(), containsString(errorOnNonSnapshotBuilds)); + } + request.onSnapshotBuild(true); assertNull(request.validate()); request.onSnapshotBuild(false); assertNotNull(request.validate()); - assertThat( - request.validate().getMessage(), - containsString( - "[version] with value [" - + esqlVersion - + "] only allowed in snapshot builds, latest available version is [" - + EsqlVersion.latestReleased().versionStringWithoutEmoji() - + "]" - ) - ); + assertThat(request.validate().getMessage(), containsString(errorOnNonSnapshotBuilds)); } @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104890") From d9f010abac7ea3aebdbcfc8bf8f68f8fd9ed309c Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Fri, 5 Apr 2024 12:48:23 +0200 Subject: [PATCH 048/173] Fix leak in RestVectorTileAction (#107143) Fix obvious SearchResponse leak in RestVectorTileAction. Test only issue fortunately since the response hits are empty. --- .../xpack/vectortile/rest/RestVectorTileAction.java | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/x-pack/plugin/vector-tile/src/main/java/org/elasticsearch/xpack/vectortile/rest/RestVectorTileAction.java b/x-pack/plugin/vector-tile/src/main/java/org/elasticsearch/xpack/vectortile/rest/RestVectorTileAction.java index ba5b97bbcb062..66fbf2e892b56 100644 --- a/x-pack/plugin/vector-tile/src/main/java/org/elasticsearch/xpack/vectortile/rest/RestVectorTileAction.java +++ b/x-pack/plugin/vector-tile/src/main/java/org/elasticsearch/xpack/vectortile/rest/RestVectorTileAction.java @@ -162,10 +162,14 @@ public RestResponse buildResponse(SearchResponse searchResponse) throws Exceptio searchResponse.getShardFailures(), searchResponse.getClusters() ); - tileBuilder.addLayers(buildMetaLayer(meta, bounds, request, featureFactory)); - ensureOpen(); - tileBuilder.build().writeTo(bytesOut); - return new RestResponse(RestStatus.OK, MIME_TYPE, bytesOut.bytes()); + try { + tileBuilder.addLayers(buildMetaLayer(meta, bounds, request, featureFactory)); + ensureOpen(); + tileBuilder.build().writeTo(bytesOut); + return new RestResponse(RestStatus.OK, MIME_TYPE, bytesOut.bytes()); + } finally { + meta.decRef(); + } } } }); From db802bb6cb517db46df8501ca10d1a4c9a962284 Mon Sep 17 00:00:00 2001 From: Carlos Delgado <6339205+carlosdelest@users.noreply.github.com> Date: Fri, 5 Apr 2024 13:00:52 +0200 Subject: [PATCH 049/173] Move back InferenceServiceRegistry and ModelRegistry to inference plugin, reverting #105012 (#107141) --- .../inference/InferenceServiceRegistry.java | 62 +++++++----- .../InferenceServiceRegistryImpl.java | 64 ------------ .../inference/ModelRegistry.java | 99 ------------------- .../elasticsearch/node/NodeConstruction.java | 15 --- .../plugins/InferenceRegistryPlugin.java | 22 ----- ...gistryImplIT.java => ModelRegistryIT.java} | 52 +++++----- .../xpack/inference/InferencePlugin.java | 31 ++---- .../TransportDeleteInferenceModelAction.java | 2 +- .../TransportGetInferenceModelAction.java | 2 +- .../action/TransportInferenceAction.java | 2 +- .../TransportPutInferenceModelAction.java | 2 +- ...elRegistryImpl.java => ModelRegistry.java} | 82 +++++++++------ ...ImplTests.java => ModelRegistryTests.java} | 34 +++---- 13 files changed, 143 insertions(+), 326 deletions(-) delete mode 100644 server/src/main/java/org/elasticsearch/inference/InferenceServiceRegistryImpl.java delete mode 100644 server/src/main/java/org/elasticsearch/inference/ModelRegistry.java delete mode 100644 server/src/main/java/org/elasticsearch/plugins/InferenceRegistryPlugin.java rename x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/{ModelRegistryImplIT.java => ModelRegistryIT.java} (86%) rename x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/{ModelRegistryImpl.java => ModelRegistry.java} (86%) rename x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/{ModelRegistryImplTests.java => ModelRegistryTests.java} (92%) diff --git a/server/src/main/java/org/elasticsearch/inference/InferenceServiceRegistry.java b/server/src/main/java/org/elasticsearch/inference/InferenceServiceRegistry.java index ce6f1b21b734c..d5973807d9d78 100644 --- a/server/src/main/java/org/elasticsearch/inference/InferenceServiceRegistry.java +++ b/server/src/main/java/org/elasticsearch/inference/InferenceServiceRegistry.java @@ -13,41 +13,49 @@ import java.io.Closeable; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.function.Function; +import java.util.stream.Collectors; + +public class InferenceServiceRegistry implements Closeable { + + private final Map services; + private final List namedWriteables = new ArrayList<>(); + + public InferenceServiceRegistry( + List inferenceServicePlugins, + InferenceServiceExtension.InferenceServiceFactoryContext factoryContext + ) { + // TODO check names are unique + services = inferenceServicePlugins.stream() + .flatMap(r -> r.getInferenceServiceFactories().stream()) + .map(factory -> factory.create(factoryContext)) + .collect(Collectors.toMap(InferenceService::name, Function.identity())); + } -public interface InferenceServiceRegistry extends Closeable { - void init(Client client); - - Map getServices(); - - Optional getService(String serviceName); - - List getNamedWriteables(); - - class NoopInferenceServiceRegistry implements InferenceServiceRegistry { - public NoopInferenceServiceRegistry() {} + public void init(Client client) { + services.values().forEach(s -> s.init(client)); + } - @Override - public void init(Client client) {} + public Map getServices() { + return services; + } - @Override - public Map getServices() { - return Map.of(); - } + public Optional getService(String serviceName) { + return Optional.ofNullable(services.get(serviceName)); + } - @Override - public Optional getService(String serviceName) { - return Optional.empty(); - } + public List getNamedWriteables() { + return namedWriteables; + } - @Override - public List getNamedWriteables() { - return List.of(); + @Override + public void close() throws IOException { + for (var service : services.values()) { + service.close(); } - - @Override - public void close() throws IOException {} } } diff --git a/server/src/main/java/org/elasticsearch/inference/InferenceServiceRegistryImpl.java b/server/src/main/java/org/elasticsearch/inference/InferenceServiceRegistryImpl.java deleted file mode 100644 index f0a990ded98ce..0000000000000 --- a/server/src/main/java/org/elasticsearch/inference/InferenceServiceRegistryImpl.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.inference; - -import org.elasticsearch.client.internal.Client; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.function.Function; -import java.util.stream.Collectors; - -public class InferenceServiceRegistryImpl implements InferenceServiceRegistry { - - private final Map services; - private final List namedWriteables = new ArrayList<>(); - - public InferenceServiceRegistryImpl( - List inferenceServicePlugins, - InferenceServiceExtension.InferenceServiceFactoryContext factoryContext - ) { - // TODO check names are unique - services = inferenceServicePlugins.stream() - .flatMap(r -> r.getInferenceServiceFactories().stream()) - .map(factory -> factory.create(factoryContext)) - .collect(Collectors.toMap(InferenceService::name, Function.identity())); - } - - @Override - public void init(Client client) { - services.values().forEach(s -> s.init(client)); - } - - @Override - public Map getServices() { - return services; - } - - @Override - public Optional getService(String serviceName) { - return Optional.ofNullable(services.get(serviceName)); - } - - @Override - public List getNamedWriteables() { - return namedWriteables; - } - - @Override - public void close() throws IOException { - for (var service : services.values()) { - service.close(); - } - } -} diff --git a/server/src/main/java/org/elasticsearch/inference/ModelRegistry.java b/server/src/main/java/org/elasticsearch/inference/ModelRegistry.java deleted file mode 100644 index fa90d5ba6f756..0000000000000 --- a/server/src/main/java/org/elasticsearch/inference/ModelRegistry.java +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.inference; - -import org.elasticsearch.action.ActionListener; - -import java.util.List; -import java.util.Map; - -public interface ModelRegistry { - - /** - * Get a model. - * Secret settings are not included - * @param inferenceEntityId Model to get - * @param listener Model listener - */ - void getModel(String inferenceEntityId, ActionListener listener); - - /** - * Get a model with its secret settings - * @param inferenceEntityId Model to get - * @param listener Model listener - */ - void getModelWithSecrets(String inferenceEntityId, ActionListener listener); - - /** - * Get all models of a particular task type. - * Secret settings are not included - * @param taskType The task type - * @param listener Models listener - */ - void getModelsByTaskType(TaskType taskType, ActionListener> listener); - - /** - * Get all models. - * Secret settings are not included - * @param listener Models listener - */ - void getAllModels(ActionListener> listener); - - void storeModel(Model model, ActionListener listener); - - void deleteModel(String modelId, ActionListener listener); - - /** - * Semi parsed model where inference entity id, task type and service - * are known but the settings are not parsed. - */ - record UnparsedModel( - String inferenceEntityId, - TaskType taskType, - String service, - Map settings, - Map secrets - ) {} - - class NoopModelRegistry implements ModelRegistry { - @Override - public void getModel(String modelId, ActionListener listener) { - fail(listener); - } - - @Override - public void getModelsByTaskType(TaskType taskType, ActionListener> listener) { - listener.onResponse(List.of()); - } - - @Override - public void getAllModels(ActionListener> listener) { - listener.onResponse(List.of()); - } - - @Override - public void storeModel(Model model, ActionListener listener) { - fail(listener); - } - - @Override - public void deleteModel(String modelId, ActionListener listener) { - fail(listener); - } - - @Override - public void getModelWithSecrets(String inferenceEntityId, ActionListener listener) { - fail(listener); - } - - private static void fail(ActionListener listener) { - listener.onFailure(new IllegalArgumentException("No model registry configured")); - } - } -} diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index 15ebe2752451d..5bf19c4b87157 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -127,8 +127,6 @@ import org.elasticsearch.indices.recovery.plan.PeerOnlyRecoveryPlannerService; import org.elasticsearch.indices.recovery.plan.RecoveryPlannerService; import org.elasticsearch.indices.recovery.plan.ShardSnapshotsService; -import org.elasticsearch.inference.InferenceServiceRegistry; -import org.elasticsearch.inference.ModelRegistry; import org.elasticsearch.ingest.IngestService; import org.elasticsearch.monitor.MonitorService; import org.elasticsearch.monitor.fs.FsHealthService; @@ -147,7 +145,6 @@ import org.elasticsearch.plugins.ClusterPlugin; import org.elasticsearch.plugins.DiscoveryPlugin; import org.elasticsearch.plugins.HealthPlugin; -import org.elasticsearch.plugins.InferenceRegistryPlugin; import org.elasticsearch.plugins.IngestPlugin; import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.MetadataUpgrader; @@ -1114,18 +1111,6 @@ record PluginServiceInstances( ); } - // Register noop versions of inference services if Inference plugin is not available - Optional inferenceRegistryPlugin = getSinglePlugin(InferenceRegistryPlugin.class); - modules.bindToInstance( - InferenceServiceRegistry.class, - inferenceRegistryPlugin.map(InferenceRegistryPlugin::getInferenceServiceRegistry) - .orElse(new InferenceServiceRegistry.NoopInferenceServiceRegistry()) - ); - modules.bindToInstance( - ModelRegistry.class, - inferenceRegistryPlugin.map(InferenceRegistryPlugin::getModelRegistry).orElse(new ModelRegistry.NoopModelRegistry()) - ); - injector = modules.createInjector(); postInjection(clusterModule, actionModule, clusterService, transportService, featureService); diff --git a/server/src/main/java/org/elasticsearch/plugins/InferenceRegistryPlugin.java b/server/src/main/java/org/elasticsearch/plugins/InferenceRegistryPlugin.java deleted file mode 100644 index 696c3a067dad1..0000000000000 --- a/server/src/main/java/org/elasticsearch/plugins/InferenceRegistryPlugin.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.plugins; - -import org.elasticsearch.inference.InferenceServiceRegistry; -import org.elasticsearch.inference.ModelRegistry; - -/** - * Plugins that provide inference services should implement this interface. - * There should be a single one in the classpath, as we currently support a single instance for ModelRegistry / InfereceServiceRegistry. - */ -public interface InferenceRegistryPlugin { - InferenceServiceRegistry getInferenceServiceRegistry(); - - ModelRegistry getModelRegistry(); -} diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryImplIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java similarity index 86% rename from x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryImplIT.java rename to x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java index ccda986a8d280..0f23e0b33d774 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryImplIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java @@ -26,7 +26,7 @@ import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.inference.InferencePlugin; -import org.elasticsearch.xpack.inference.registry.ModelRegistryImpl; +import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.elasticsearch.xpack.inference.services.elser.ElserInternalModel; import org.elasticsearch.xpack.inference.services.elser.ElserInternalService; import org.elasticsearch.xpack.inference.services.elser.ElserInternalServiceSettingsTests; @@ -55,13 +55,13 @@ import static org.hamcrest.Matchers.nullValue; import static org.mockito.Mockito.mock; -public class ModelRegistryImplIT extends ESSingleNodeTestCase { +public class ModelRegistryIT extends ESSingleNodeTestCase { - private ModelRegistryImpl ModelRegistryImpl; + private ModelRegistry modelRegistry; @Before public void createComponents() { - ModelRegistryImpl = new ModelRegistryImpl(client()); + modelRegistry = new ModelRegistry(client()); } @Override @@ -75,7 +75,7 @@ public void testStoreModel() throws Exception { AtomicReference storeModelHolder = new AtomicReference<>(); AtomicReference exceptionHolder = new AtomicReference<>(); - blockingCall(listener -> ModelRegistryImpl.storeModel(model, listener), storeModelHolder, exceptionHolder); + blockingCall(listener -> modelRegistry.storeModel(model, listener), storeModelHolder, exceptionHolder); assertThat(storeModelHolder.get(), is(true)); assertThat(exceptionHolder.get(), is(nullValue())); @@ -87,7 +87,7 @@ public void testStoreModelWithUnknownFields() throws Exception { AtomicReference storeModelHolder = new AtomicReference<>(); AtomicReference exceptionHolder = new AtomicReference<>(); - blockingCall(listener -> ModelRegistryImpl.storeModel(model, listener), storeModelHolder, exceptionHolder); + blockingCall(listener -> modelRegistry.storeModel(model, listener), storeModelHolder, exceptionHolder); assertNull(storeModelHolder.get()); assertNotNull(exceptionHolder.get()); @@ -106,12 +106,12 @@ public void testGetModel() throws Exception { AtomicReference putModelHolder = new AtomicReference<>(); AtomicReference exceptionHolder = new AtomicReference<>(); - blockingCall(listener -> ModelRegistryImpl.storeModel(model, listener), putModelHolder, exceptionHolder); + blockingCall(listener -> modelRegistry.storeModel(model, listener), putModelHolder, exceptionHolder); assertThat(putModelHolder.get(), is(true)); // now get the model - AtomicReference modelHolder = new AtomicReference<>(); - blockingCall(listener -> ModelRegistryImpl.getModelWithSecrets(inferenceEntityId, listener), modelHolder, exceptionHolder); + AtomicReference modelHolder = new AtomicReference<>(); + blockingCall(listener -> modelRegistry.getModelWithSecrets(inferenceEntityId, listener), modelHolder, exceptionHolder); assertThat(exceptionHolder.get(), is(nullValue())); assertThat(modelHolder.get(), not(nullValue())); @@ -133,13 +133,13 @@ public void testStoreModelFailsWhenModelExists() throws Exception { AtomicReference putModelHolder = new AtomicReference<>(); AtomicReference exceptionHolder = new AtomicReference<>(); - blockingCall(listener -> ModelRegistryImpl.storeModel(model, listener), putModelHolder, exceptionHolder); + blockingCall(listener -> modelRegistry.storeModel(model, listener), putModelHolder, exceptionHolder); assertThat(putModelHolder.get(), is(true)); assertThat(exceptionHolder.get(), is(nullValue())); putModelHolder.set(false); // an model with the same id exists - blockingCall(listener -> ModelRegistryImpl.storeModel(model, listener), putModelHolder, exceptionHolder); + blockingCall(listener -> modelRegistry.storeModel(model, listener), putModelHolder, exceptionHolder); assertThat(putModelHolder.get(), is(false)); assertThat(exceptionHolder.get(), not(nullValue())); assertThat( @@ -154,20 +154,20 @@ public void testDeleteModel() throws Exception { Model model = buildElserModelConfig(id, TaskType.SPARSE_EMBEDDING); AtomicReference putModelHolder = new AtomicReference<>(); AtomicReference exceptionHolder = new AtomicReference<>(); - blockingCall(listener -> ModelRegistryImpl.storeModel(model, listener), putModelHolder, exceptionHolder); + blockingCall(listener -> modelRegistry.storeModel(model, listener), putModelHolder, exceptionHolder); assertThat(putModelHolder.get(), is(true)); } AtomicReference deleteResponseHolder = new AtomicReference<>(); AtomicReference exceptionHolder = new AtomicReference<>(); - blockingCall(listener -> ModelRegistryImpl.deleteModel("model1", listener), deleteResponseHolder, exceptionHolder); + blockingCall(listener -> modelRegistry.deleteModel("model1", listener), deleteResponseHolder, exceptionHolder); assertThat(exceptionHolder.get(), is(nullValue())); assertTrue(deleteResponseHolder.get()); // get should fail deleteResponseHolder.set(false); - AtomicReference modelHolder = new AtomicReference<>(); - blockingCall(listener -> ModelRegistryImpl.getModelWithSecrets("model1", listener), modelHolder, exceptionHolder); + AtomicReference modelHolder = new AtomicReference<>(); + blockingCall(listener -> modelRegistry.getModelWithSecrets("model1", listener), modelHolder, exceptionHolder); assertThat(exceptionHolder.get(), not(nullValue())); assertFalse(deleteResponseHolder.get()); @@ -187,13 +187,13 @@ public void testGetModelsByTaskType() throws InterruptedException { AtomicReference putModelHolder = new AtomicReference<>(); AtomicReference exceptionHolder = new AtomicReference<>(); - blockingCall(listener -> ModelRegistryImpl.storeModel(model, listener), putModelHolder, exceptionHolder); + blockingCall(listener -> modelRegistry.storeModel(model, listener), putModelHolder, exceptionHolder); assertThat(putModelHolder.get(), is(true)); } AtomicReference exceptionHolder = new AtomicReference<>(); - AtomicReference> modelHolder = new AtomicReference<>(); - blockingCall(listener -> ModelRegistryImpl.getModelsByTaskType(TaskType.SPARSE_EMBEDDING, listener), modelHolder, exceptionHolder); + AtomicReference> modelHolder = new AtomicReference<>(); + blockingCall(listener -> modelRegistry.getModelsByTaskType(TaskType.SPARSE_EMBEDDING, listener), modelHolder, exceptionHolder); assertThat(modelHolder.get(), hasSize(3)); var sparseIds = sparseAndTextEmbeddingModels.stream() .filter(m -> m.getConfigurations().getTaskType() == TaskType.SPARSE_EMBEDDING) @@ -204,7 +204,7 @@ public void testGetModelsByTaskType() throws InterruptedException { assertThat(m.secrets().keySet(), empty()); }); - blockingCall(listener -> ModelRegistryImpl.getModelsByTaskType(TaskType.TEXT_EMBEDDING, listener), modelHolder, exceptionHolder); + blockingCall(listener -> modelRegistry.getModelsByTaskType(TaskType.TEXT_EMBEDDING, listener), modelHolder, exceptionHolder); assertThat(modelHolder.get(), hasSize(2)); var denseIds = sparseAndTextEmbeddingModels.stream() .filter(m -> m.getConfigurations().getTaskType() == TaskType.TEXT_EMBEDDING) @@ -228,13 +228,13 @@ public void testGetAllModels() throws InterruptedException { var model = createModel(randomAlphaOfLength(5), randomFrom(TaskType.values()), service); createdModels.add(model); - blockingCall(listener -> ModelRegistryImpl.storeModel(model, listener), putModelHolder, exceptionHolder); + blockingCall(listener -> modelRegistry.storeModel(model, listener), putModelHolder, exceptionHolder); assertThat(putModelHolder.get(), is(true)); assertNull(exceptionHolder.get()); } - AtomicReference> modelHolder = new AtomicReference<>(); - blockingCall(listener -> ModelRegistryImpl.getAllModels(listener), modelHolder, exceptionHolder); + AtomicReference> modelHolder = new AtomicReference<>(); + blockingCall(listener -> modelRegistry.getAllModels(listener), modelHolder, exceptionHolder); assertThat(modelHolder.get(), hasSize(modelCount)); var getAllModels = modelHolder.get(); @@ -258,18 +258,18 @@ public void testGetModelWithSecrets() throws InterruptedException { AtomicReference exceptionHolder = new AtomicReference<>(); var modelWithSecrets = createModelWithSecrets(inferenceEntityId, randomFrom(TaskType.values()), service, secret); - blockingCall(listener -> ModelRegistryImpl.storeModel(modelWithSecrets, listener), putModelHolder, exceptionHolder); + blockingCall(listener -> modelRegistry.storeModel(modelWithSecrets, listener), putModelHolder, exceptionHolder); assertThat(putModelHolder.get(), is(true)); assertNull(exceptionHolder.get()); - AtomicReference modelHolder = new AtomicReference<>(); - blockingCall(listener -> ModelRegistryImpl.getModelWithSecrets(inferenceEntityId, listener), modelHolder, exceptionHolder); + AtomicReference modelHolder = new AtomicReference<>(); + blockingCall(listener -> modelRegistry.getModelWithSecrets(inferenceEntityId, listener), modelHolder, exceptionHolder); assertThat(modelHolder.get().secrets().keySet(), hasSize(1)); var secretSettings = (Map) modelHolder.get().secrets().get("secret_settings"); assertThat(secretSettings.get("secret"), equalTo(secret)); // get model without secrets - blockingCall(listener -> ModelRegistryImpl.getModel(inferenceEntityId, listener), modelHolder, exceptionHolder); + blockingCall(listener -> modelRegistry.getModel(inferenceEntityId, listener), modelHolder, exceptionHolder); assertThat(modelHolder.get().secrets().keySet(), empty()); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index c598a58d014f9..c707f99e7eb65 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -24,11 +24,8 @@ import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.inference.InferenceServiceExtension; import org.elasticsearch.inference.InferenceServiceRegistry; -import org.elasticsearch.inference.InferenceServiceRegistryImpl; -import org.elasticsearch.inference.ModelRegistry; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.ExtensiblePlugin; -import org.elasticsearch.plugins.InferenceRegistryPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.SystemIndexPlugin; import org.elasticsearch.rest.RestController; @@ -53,7 +50,7 @@ import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; import org.elasticsearch.xpack.inference.external.http.sender.RequestExecutorServiceSettings; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; -import org.elasticsearch.xpack.inference.registry.ModelRegistryImpl; +import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.elasticsearch.xpack.inference.rest.RestDeleteInferenceModelAction; import org.elasticsearch.xpack.inference.rest.RestGetInferenceModelAction; import org.elasticsearch.xpack.inference.rest.RestInferenceAction; @@ -74,7 +71,7 @@ import java.util.stream.Collectors; import java.util.stream.Stream; -public class InferencePlugin extends Plugin implements ActionPlugin, ExtensiblePlugin, SystemIndexPlugin, InferenceRegistryPlugin { +public class InferencePlugin extends Plugin implements ActionPlugin, ExtensiblePlugin, SystemIndexPlugin { /** * When this setting is true the verification check that @@ -99,8 +96,6 @@ public class InferencePlugin extends Plugin implements ActionPlugin, ExtensibleP private final SetOnce serviceComponents = new SetOnce<>(); private final SetOnce inferenceServiceRegistry = new SetOnce<>(); - private final SetOnce modelRegistry = new SetOnce<>(); - private List inferenceServiceExtensions; public InferencePlugin(Settings settings) { @@ -151,7 +146,7 @@ public Collection createComponents(PluginServices services) { ); httpFactory.set(httpRequestSenderFactory); - ModelRegistry modelReg = new ModelRegistryImpl(services.client()); + ModelRegistry modelRegistry = new ModelRegistry(services.client()); if (inferenceServiceExtensions == null) { inferenceServiceExtensions = new ArrayList<>(); @@ -162,13 +157,11 @@ public Collection createComponents(PluginServices services) { var factoryContext = new InferenceServiceExtension.InferenceServiceFactoryContext(services.client()); // This must be done after the HttpRequestSenderFactory is created so that the services can get the // reference correctly - var inferenceRegistry = new InferenceServiceRegistryImpl(inferenceServices, factoryContext); - inferenceRegistry.init(services.client()); - inferenceServiceRegistry.set(inferenceRegistry); - modelRegistry.set(modelReg); + var registry = new InferenceServiceRegistry(inferenceServices, factoryContext); + registry.init(services.client()); + inferenceServiceRegistry.set(registry); - // Don't return components as they will be registered using InferenceRegistryPlugin methods to retrieve them - return List.of(); + return List.of(modelRegistry, registry); } @Override @@ -266,14 +259,4 @@ public void close() { IOUtils.closeWhileHandlingException(inferenceServiceRegistry.get(), throttlerToClose); } - - @Override - public InferenceServiceRegistry getInferenceServiceRegistry() { - return inferenceServiceRegistry.get(); - } - - @Override - public ModelRegistry getModelRegistry() { - return modelRegistry.get(); - } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceModelAction.java index ad6042581f264..b55e2e6f8ebed 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceModelAction.java @@ -23,12 +23,12 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.inference.InferenceServiceRegistry; -import org.elasticsearch.inference.ModelRegistry; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.inference.action.DeleteInferenceModelAction; +import org.elasticsearch.xpack.inference.registry.ModelRegistry; public class TransportDeleteInferenceModelAction extends AcknowledgedTransportMasterNodeAction { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java index 0f7e48c4f8140..2de1aecea118c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java @@ -17,7 +17,6 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.inference.InferenceServiceRegistry; import org.elasticsearch.inference.ModelConfigurations; -import org.elasticsearch.inference.ModelRegistry; import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; @@ -25,6 +24,7 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.inference.action.GetInferenceModelAction; import org.elasticsearch.xpack.inference.InferencePlugin; +import org.elasticsearch.xpack.inference.registry.ModelRegistry; import java.util.ArrayList; import java.util.List; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java index a480763f33c47..edaf42d7f1fc6 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java @@ -16,11 +16,11 @@ import org.elasticsearch.inference.InferenceService; import org.elasticsearch.inference.InferenceServiceRegistry; import org.elasticsearch.inference.Model; -import org.elasticsearch.inference.ModelRegistry; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.inference.registry.ModelRegistry; public class TransportInferenceAction extends HandledTransportAction { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java index 556acfd89c9c6..85e8481f749d5 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java @@ -29,7 +29,6 @@ import org.elasticsearch.inference.InferenceServiceRegistry; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; -import org.elasticsearch.inference.ModelRegistry; import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; @@ -44,6 +43,7 @@ import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.ml.utils.MlPlatformArchitecturesUtil; import org.elasticsearch.xpack.inference.InferencePlugin; +import org.elasticsearch.xpack.inference.registry.ModelRegistry; import java.io.IOException; import java.util.Map; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistryImpl.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java similarity index 86% rename from x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistryImpl.java rename to x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java index 40921cd38f181..0f3aa5b82b189 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistryImpl.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; @@ -32,7 +31,6 @@ import org.elasticsearch.index.reindex.DeleteByQueryRequest; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; -import org.elasticsearch.inference.ModelRegistry; import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; @@ -57,21 +55,49 @@ import static org.elasticsearch.core.Strings.format; -public class ModelRegistryImpl implements ModelRegistry { +public class ModelRegistry { public record ModelConfigMap(Map config, Map secrets) {} + /** + * Semi parsed model where inference entity id, task type and service + * are known but the settings are not parsed. + */ + public record UnparsedModel( + String inferenceEntityId, + TaskType taskType, + String service, + Map settings, + Map secrets + ) { + + public static UnparsedModel unparsedModelFromMap(ModelConfigMap modelConfigMap) { + if (modelConfigMap.config() == null) { + throw new ElasticsearchStatusException("Missing config map", RestStatus.BAD_REQUEST); + } + String inferenceEntityId = ServiceUtils.removeStringOrThrowIfNull(modelConfigMap.config(), ModelConfigurations.MODEL_ID); + String service = ServiceUtils.removeStringOrThrowIfNull(modelConfigMap.config(), ModelConfigurations.SERVICE); + String taskTypeStr = ServiceUtils.removeStringOrThrowIfNull(modelConfigMap.config(), TaskType.NAME); + TaskType taskType = TaskType.fromString(taskTypeStr); + + return new UnparsedModel(inferenceEntityId, taskType, service, modelConfigMap.config(), modelConfigMap.secrets()); + } + } + private static final String TASK_TYPE_FIELD = "task_type"; private static final String MODEL_ID_FIELD = "model_id"; - private static final Logger logger = LogManager.getLogger(ModelRegistryImpl.class); + private static final Logger logger = LogManager.getLogger(ModelRegistry.class); private final OriginSettingClient client; - @Inject - public ModelRegistryImpl(Client client) { + public ModelRegistry(Client client) { this.client = new OriginSettingClient(client, ClientHelper.INFERENCE_ORIGIN); } - @Override + /** + * Get a model with its secret settings + * @param inferenceEntityId Model to get + * @param listener Model listener + */ public void getModelWithSecrets(String inferenceEntityId, ActionListener listener) { ActionListener searchListener = listener.delegateFailureAndWrap((delegate, searchResponse) -> { // There should be a hit for the configurations and secrets @@ -80,7 +106,7 @@ public void getModelWithSecrets(String inferenceEntityId, ActionListener listener) { ActionListener searchListener = listener.delegateFailureAndWrap((delegate, searchResponse) -> { // There should be a hit for the configurations and secrets @@ -101,7 +132,7 @@ public void getModel(String inferenceEntityId, ActionListener lis return; } - var modelConfigs = parseHitsAsModels(searchResponse.getHits()).stream().map(ModelRegistryImpl::unparsedModelFromMap).toList(); + var modelConfigs = parseHitsAsModels(searchResponse.getHits()).stream().map(UnparsedModel::unparsedModelFromMap).toList(); assert modelConfigs.size() == 1; delegate.onResponse(modelConfigs.get(0)); }); @@ -116,7 +147,12 @@ public void getModel(String inferenceEntityId, ActionListener lis client.search(modelSearch, searchListener); } - @Override + /** + * Get all models of a particular task type. + * Secret settings are not included + * @param taskType The task type + * @param listener Models listener + */ public void getModelsByTaskType(TaskType taskType, ActionListener> listener) { ActionListener searchListener = listener.delegateFailureAndWrap((delegate, searchResponse) -> { // Not an error if no models of this task_type @@ -125,7 +161,7 @@ public void getModelsByTaskType(TaskType taskType, ActionListener> listener) { ActionListener searchListener = listener.delegateFailureAndWrap((delegate, searchResponse) -> { // Not an error if no models of this task_type @@ -150,7 +190,7 @@ public void getAllModels(ActionListener> listener) { return; } - var modelConfigs = parseHitsAsModels(searchResponse.getHits()).stream().map(ModelRegistryImpl::unparsedModelFromMap).toList(); + var modelConfigs = parseHitsAsModels(searchResponse.getHits()).stream().map(UnparsedModel::unparsedModelFromMap).toList(); delegate.onResponse(modelConfigs); }); @@ -217,7 +257,6 @@ private ModelConfigMap createModelConfigMap(SearchHits hits, String inferenceEnt ); } - @Override public void storeModel(Model model, ActionListener listener) { ActionListener bulkResponseActionListener = getStoreModelListener(model, listener); @@ -314,7 +353,6 @@ private static BulkItemResponse.Failure getFirstBulkFailure(BulkResponse bulkRes return null; } - @Override public void deleteModel(String inferenceEntityId, ActionListener listener) { DeleteByQueryRequest request = new DeleteByQueryRequest().setAbortOnVersionConflict(false); request.indices(InferenceIndex.INDEX_PATTERN, InferenceSecretsIndex.INDEX_PATTERN); @@ -339,16 +377,4 @@ private static IndexRequest createIndexRequest(String docId, String indexName, T private QueryBuilder documentIdQuery(String inferenceEntityId) { return QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds(Model.documentId(inferenceEntityId))); } - - private static UnparsedModel unparsedModelFromMap(ModelRegistryImpl.ModelConfigMap modelConfigMap) { - if (modelConfigMap.config() == null) { - throw new ElasticsearchStatusException("Missing config map", RestStatus.BAD_REQUEST); - } - String modelId = ServiceUtils.removeStringOrThrowIfNull(modelConfigMap.config(), ModelConfigurations.MODEL_ID); - String service = ServiceUtils.removeStringOrThrowIfNull(modelConfigMap.config(), ModelConfigurations.SERVICE); - String taskTypeStr = ServiceUtils.removeStringOrThrowIfNull(modelConfigMap.config(), TaskType.NAME); - TaskType taskType = TaskType.fromString(taskTypeStr); - - return new UnparsedModel(modelId, taskType, service, modelConfigMap.config(), modelConfigMap.secrets()); - } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryImplTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java similarity index 92% rename from x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryImplTests.java rename to x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java index 10fd4f09e86ac..768f053295d13 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryImplTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java @@ -46,7 +46,7 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -public class ModelRegistryImplTests extends ESTestCase { +public class ModelRegistryTests extends ESTestCase { private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); @@ -66,9 +66,9 @@ public void testGetUnparsedModelMap_ThrowsResourceNotFound_WhenNoHitsReturned() var client = mockClient(); mockClientExecuteSearch(client, mockSearchResponse(SearchHits.EMPTY)); - var registry = new ModelRegistryImpl(client); + var registry = new ModelRegistry(client); - var listener = new PlainActionFuture(); + var listener = new PlainActionFuture(); registry.getModelWithSecrets("1", listener); ResourceNotFoundException exception = expectThrows(ResourceNotFoundException.class, () -> listener.actionGet(TIMEOUT)); @@ -80,9 +80,9 @@ public void testGetUnparsedModelMap_ThrowsIllegalArgumentException_WhenInvalidIn var unknownIndexHit = SearchResponseUtils.searchHitFromMap(Map.of("_index", "unknown_index")); mockClientExecuteSearch(client, mockSearchResponse(new SearchHit[] { unknownIndexHit })); - var registry = new ModelRegistryImpl(client); + var registry = new ModelRegistry(client); - var listener = new PlainActionFuture(); + var listener = new PlainActionFuture(); registry.getModelWithSecrets("1", listener); IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> listener.actionGet(TIMEOUT)); @@ -97,9 +97,9 @@ public void testGetUnparsedModelMap_ThrowsIllegalStateException_WhenUnableToFind var inferenceSecretsHit = SearchResponseUtils.searchHitFromMap(Map.of("_index", ".secrets-inference")); mockClientExecuteSearch(client, mockSearchResponse(new SearchHit[] { inferenceSecretsHit })); - var registry = new ModelRegistryImpl(client); + var registry = new ModelRegistry(client); - var listener = new PlainActionFuture(); + var listener = new PlainActionFuture(); registry.getModelWithSecrets("1", listener); IllegalStateException exception = expectThrows(IllegalStateException.class, () -> listener.actionGet(TIMEOUT)); @@ -114,9 +114,9 @@ public void testGetUnparsedModelMap_ThrowsIllegalStateException_WhenUnableToFind var inferenceHit = SearchResponseUtils.searchHitFromMap(Map.of("_index", ".inference")); mockClientExecuteSearch(client, mockSearchResponse(new SearchHit[] { inferenceHit })); - var registry = new ModelRegistryImpl(client); + var registry = new ModelRegistry(client); - var listener = new PlainActionFuture(); + var listener = new PlainActionFuture(); registry.getModelWithSecrets("1", listener); IllegalStateException exception = expectThrows(IllegalStateException.class, () -> listener.actionGet(TIMEOUT)); @@ -148,9 +148,9 @@ public void testGetModelWithSecrets() { mockClientExecuteSearch(client, mockSearchResponse(new SearchHit[] { inferenceHit, inferenceSecretsHit })); - var registry = new ModelRegistryImpl(client); + var registry = new ModelRegistry(client); - var listener = new PlainActionFuture(); + var listener = new PlainActionFuture(); registry.getModelWithSecrets("1", listener); var modelConfig = listener.actionGet(TIMEOUT); @@ -177,9 +177,9 @@ public void testGetModelNoSecrets() { mockClientExecuteSearch(client, mockSearchResponse(new SearchHit[] { inferenceHit })); - var registry = new ModelRegistryImpl(client); + var registry = new ModelRegistry(client); - var listener = new PlainActionFuture(); + var listener = new PlainActionFuture(); registry.getModel("1", listener); registry.getModel("1", listener); @@ -202,7 +202,7 @@ public void testStoreModel_ReturnsTrue_WhenNoFailuresOccur() { mockClientExecuteBulk(client, bulkResponse); var model = TestModel.createRandomInstance(); - var registry = new ModelRegistryImpl(client); + var registry = new ModelRegistry(client); var listener = new PlainActionFuture(); registry.storeModel(model, listener); @@ -219,7 +219,7 @@ public void testStoreModel_ThrowsException_WhenBulkResponseIsEmpty() { mockClientExecuteBulk(client, bulkResponse); var model = TestModel.createRandomInstance(); - var registry = new ModelRegistryImpl(client); + var registry = new ModelRegistry(client); var listener = new PlainActionFuture(); registry.storeModel(model, listener); @@ -250,7 +250,7 @@ public void testStoreModel_ThrowsResourceAlreadyExistsException_WhenFailureIsAVe mockClientExecuteBulk(client, bulkResponse); var model = TestModel.createRandomInstance(); - var registry = new ModelRegistryImpl(client); + var registry = new ModelRegistry(client); var listener = new PlainActionFuture(); registry.storeModel(model, listener); @@ -276,7 +276,7 @@ public void testStoreModel_ThrowsException_WhenFailureIsNotAVersionConflict() { mockClientExecuteBulk(client, bulkResponse); var model = TestModel.createRandomInstance(); - var registry = new ModelRegistryImpl(client); + var registry = new ModelRegistry(client); var listener = new PlainActionFuture(); registry.storeModel(model, listener); From 173900f89e0e4eabbacb29b76985d82b84af0708 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Fri, 5 Apr 2024 12:10:03 +0100 Subject: [PATCH 050/173] [ML] Update last usages of model_id to inference_id (#107133) --- docs/reference/inference/put-inference.asciidoc | 2 +- .../xpack/core/inference/action/PutInferenceModelAction.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/inference/put-inference.asciidoc b/docs/reference/inference/put-inference.asciidoc index 87a865b9487e5..6df1993175a0d 100644 --- a/docs/reference/inference/put-inference.asciidoc +++ b/docs/reference/inference/put-inference.asciidoc @@ -18,7 +18,7 @@ or if you want to use non-NLP models, use the <>. [[put-inference-api-request]] ==== {api-request-title} -`PUT /_inference//` +`PUT /_inference//` [discrete] diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/PutInferenceModelAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/PutInferenceModelAction.java index 5cc1c98c6d89b..4617d1f6bccaa 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/PutInferenceModelAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/PutInferenceModelAction.java @@ -86,7 +86,7 @@ public void writeTo(StreamOutput out) throws IOException { public ActionRequestValidationException validate() { ActionRequestValidationException validationException = new ActionRequestValidationException(); if (MlStrings.isValidId(this.inferenceEntityId) == false) { - validationException.addValidationError(Messages.getMessage(Messages.INVALID_ID, "model_id", this.inferenceEntityId)); + validationException.addValidationError(Messages.getMessage(Messages.INVALID_ID, "inference_id", this.inferenceEntityId)); } if (validationException.validationErrors().isEmpty() == false) { From 040eb1e5d68e35a9c58c237329fcfb406b177438 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Fri, 5 Apr 2024 12:56:55 +0100 Subject: [PATCH 051/173] [Ml] Mute frequent items failure (#107151) --- .../resources/rest-api-spec/test/ml/frequent_item_sets_agg.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/frequent_item_sets_agg.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/frequent_item_sets_agg.yml index 4a88762ddb9ea..53f07b35482ed 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/frequent_item_sets_agg.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/frequent_item_sets_agg.yml @@ -433,6 +433,9 @@ setup: --- "Test frequent item sets unsupported types": - do: + skip: + version: "all" + reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/106215" catch: /Field \[geo_point\] of type \[geo_point\] is not supported for aggregation \[frequent_item_sets\]/ search: index: store From 8716188b15189e60b2b115ba67177a5ce0b9bffa Mon Sep 17 00:00:00 2001 From: David Kyle Date: Fri, 5 Apr 2024 13:36:08 +0100 Subject: [PATCH 052/173] Mute Freq Items Test frequent item sets unsupported types (#107153) For https://github.com/elastic/elasticsearch/issues/106215, fixing what was not done properly in #107151 --- .../rest-api-spec/test/ml/frequent_item_sets_agg.yml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/frequent_item_sets_agg.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/frequent_item_sets_agg.yml index 53f07b35482ed..db41e0d0efaa1 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/frequent_item_sets_agg.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/frequent_item_sets_agg.yml @@ -432,10 +432,11 @@ setup: --- "Test frequent item sets unsupported types": + + - skip: + version: "all" + reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/106215" - do: - skip: - version: "all" - reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/106215" catch: /Field \[geo_point\] of type \[geo_point\] is not supported for aggregation \[frequent_item_sets\]/ search: index: store From ee667c40d7a096a99d0c677d9b89e494f76f1158 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Przemys=C5=82aw=20Witek?= Date: Fri, 5 Apr 2024 14:36:37 +0200 Subject: [PATCH 053/173] [Transform] Extract common test code to TransformCommonRestTestCase class (#107103) --- .../plugin/transform/qa/common/build.gradle | 8 ++ .../common/TransformCommonRestTestCase.java | 127 ++++++++++++++++++ .../qa/multi-node-tests/build.gradle | 1 + .../xpack/transform/integration/LatestIT.java | 2 +- .../transform/integration/TransformIT.java | 70 +--------- .../integration/TransformRestTestCase.java | 58 +------- .../TransformUsingSearchRuntimeFieldsIT.java | 4 +- .../continuous/TransformContinuousIT.java | 2 +- .../qa/single-node-tests/build.gradle | 1 + .../integration/TransformRestTestCase.java | 86 +----------- .../integration/TransformRobustnessIT.java | 6 + 11 files changed, 157 insertions(+), 208 deletions(-) create mode 100644 x-pack/plugin/transform/qa/common/build.gradle create mode 100644 x-pack/plugin/transform/qa/common/src/main/java/org/elasticsearch/xpack/transform/integration/common/TransformCommonRestTestCase.java diff --git a/x-pack/plugin/transform/qa/common/build.gradle b/x-pack/plugin/transform/qa/common/build.gradle new file mode 100644 index 0000000000000..9e7abfa2f977e --- /dev/null +++ b/x-pack/plugin/transform/qa/common/build.gradle @@ -0,0 +1,8 @@ +apply plugin: 'elasticsearch.internal-java-rest-test' + +dependencies { + api project(':libs:elasticsearch-x-content') + api project(':test:framework') + api project(xpackModule('core')) +} + diff --git a/x-pack/plugin/transform/qa/common/src/main/java/org/elasticsearch/xpack/transform/integration/common/TransformCommonRestTestCase.java b/x-pack/plugin/transform/qa/common/src/main/java/org/elasticsearch/xpack/transform/integration/common/TransformCommonRestTestCase.java new file mode 100644 index 0000000000000..486dd7c581032 --- /dev/null +++ b/x-pack/plugin/transform/qa/common/src/main/java/org/elasticsearch/xpack/transform/integration/common/TransformCommonRestTestCase.java @@ -0,0 +1,127 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.transform.integration.common; + +import org.apache.logging.log4j.Level; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.xpack.core.transform.TransformField; +import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; + +import java.io.IOException; +import java.time.Instant; +import java.util.ArrayList; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +public abstract class TransformCommonRestTestCase extends ESRestTestCase { + + protected static final String TRANSFORM_ENDPOINT = TransformField.REST_BASE_PATH_TRANSFORMS; + protected static final String AUTH_KEY = "Authorization"; + protected static final String SECONDARY_AUTH_KEY = "es-secondary-authorization"; + + protected static String getTransformEndpoint() { + return TRANSFORM_ENDPOINT; + } + + /** + * Returns the list of transform tasks as reported by the _tasks API. + */ + @SuppressWarnings("unchecked") + protected List getTransformTasks() throws IOException { + Request tasksRequest = new Request("GET", "/_tasks"); + tasksRequest.addParameter("actions", TransformField.TASK_NAME + "*"); + Map tasksResponse = entityAsMap(client().performRequest(tasksRequest)); + + Map nodes = (Map) tasksResponse.get("nodes"); + if (nodes == null) { + return List.of(); + } + + List foundTasks = new ArrayList<>(); + for (Map.Entry node : nodes.entrySet()) { + Map nodeInfo = (Map) node.getValue(); + Map tasks = (Map) nodeInfo.get("tasks"); + if (tasks != null) { + foundTasks.addAll(tasks.keySet()); + } + } + return foundTasks; + } + + /** + * Returns the list of transform tasks for the given transform as reported by the _cluster/state API. + */ + @SuppressWarnings("unchecked") + protected List getTransformTasksFromClusterState(String transformId) throws IOException { + Request request = new Request("GET", "_cluster/state"); + Map response = entityAsMap(adminClient().performRequest(request)); + + List> tasks = (List>) XContentMapValues.extractValue( + response, + "metadata", + "persistent_tasks", + "tasks" + ); + + return tasks.stream().map(t -> (String) t.get("id")).filter(transformId::equals).toList(); + } + + @SuppressWarnings("unchecked") + protected void logAudits() throws Exception { + logger.info("writing audit messages to the log"); + Request searchRequest = new Request("GET", TransformInternalIndexConstants.AUDIT_INDEX + "/_search?ignore_unavailable=true"); + searchRequest.setJsonEntity(""" + { + "size": 100, + "sort": [ { "timestamp": { "order": "asc" } } ] + }"""); + + assertBusy(() -> { + try { + refreshIndex(TransformInternalIndexConstants.AUDIT_INDEX_PATTERN); + Response searchResponse = client().performRequest(searchRequest); + + Map searchResult = entityAsMap(searchResponse); + List> searchHits = (List>) XContentMapValues.extractValue( + "hits.hits", + searchResult + ); + + for (Map hit : searchHits) { + Map source = (Map) XContentMapValues.extractValue("_source", hit); + String level = (String) source.getOrDefault("level", "info"); + logger.log( + Level.getLevel(level.toUpperCase(Locale.ROOT)), + "Transform audit: [{}] [{}] [{}] [{}]", + Instant.ofEpochMilli((long) source.getOrDefault("timestamp", 0)), + source.getOrDefault("transform_id", "n/a"), + source.getOrDefault("message", "n/a"), + source.getOrDefault("node_name", "n/a") + ); + } + } catch (ResponseException e) { + // see gh#54810, wrap temporary 503's as assertion error for retry + if (e.getResponse().getStatusLine().getStatusCode() != 503) { + throw e; + } + throw new AssertionError("Failed to retrieve audit logs", e); + } + }, 5, TimeUnit.SECONDS); + } + + protected void refreshIndex(String index) throws IOException { + Request refreshRequest = new Request("POST", index + "/_refresh"); + assertOK(adminClient().performRequest(refreshRequest)); + } +} diff --git a/x-pack/plugin/transform/qa/multi-node-tests/build.gradle b/x-pack/plugin/transform/qa/multi-node-tests/build.gradle index aab0d16e54f5c..32bb44850df6b 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/build.gradle +++ b/x-pack/plugin/transform/qa/multi-node-tests/build.gradle @@ -3,6 +3,7 @@ apply plugin: 'elasticsearch.legacy-java-rest-test' dependencies { javaRestTestImplementation(testArtifact(project(xpackModule('core')))) javaRestTestImplementation project(path: xpackModule('transform')) + javaRestTestImplementation project(path: xpackModule('transform:qa:common')) } // location for keys and certificates diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/LatestIT.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/LatestIT.java index 07b6bc9bd7770..27695eb080b0a 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/LatestIT.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/LatestIT.java @@ -126,7 +126,7 @@ public void testLatest() throws Exception { waitUntilCheckpoint(transformConfig.getId(), 1L); stopTransform(transformConfig.getId()); - refreshIndex(destIndexName, RequestOptions.DEFAULT); + refreshIndex(destIndexName); var mappings = getIndexMapping(destIndexName, RequestOptions.DEFAULT); assertThat( (Map) XContentMapValues.extractValue(destIndexName + ".mappings", mappings), diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIT.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIT.java index 538479c33b084..e7d54028caa20 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIT.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIT.java @@ -22,7 +22,6 @@ import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.transform.TransformConfigVersion; -import org.elasticsearch.xpack.core.transform.TransformField; import org.elasticsearch.xpack.core.transform.transforms.QueryConfig; import org.elasticsearch.xpack.core.transform.transforms.SettingsConfig; import org.elasticsearch.xpack.core.transform.transforms.SyncConfig; @@ -37,9 +36,7 @@ import java.io.IOException; import java.time.Instant; -import java.util.ArrayList; import java.util.HashMap; -import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; @@ -247,23 +244,23 @@ public void testTransformLifecycleInALoop() throws Exception { // Create the continuous transform putTransform(transformId, config, RequestOptions.DEFAULT); assertThat(getTransformTasks(), is(empty())); - assertThatTransformTaskDoesNotExist(transformId); + assertThat(getTransformTasksFromClusterState(transformId), is(empty())); startTransform(transformId, RequestOptions.DEFAULT); // There is 1 transform task after start assertThat(getTransformTasks(), hasSize(1)); - assertThatTransformTaskExists(transformId); + assertThat(getTransformTasksFromClusterState(transformId), hasSize(1)); Thread.sleep(sleepAfterStartMillis); // There should still be 1 transform task as the transform is continuous assertThat(getTransformTasks(), hasSize(1)); - assertThatTransformTaskExists(transformId); + assertThat(getTransformTasksFromClusterState(transformId), hasSize(1)); // Stop the transform with force set randomly stopTransform(transformId, true, null, false, force); // After the transform is stopped, there should be no transform task left assertThat(getTransformTasks(), is(empty())); - assertThatTransformTaskDoesNotExist(transformId); + assertThat(getTransformTasksFromClusterState(transformId), is(empty())); // Delete the transform deleteTransform(transformId); @@ -303,63 +300,6 @@ private String createConfig(String transformId, String sourceIndex, String destI return Strings.toString(config); } - /** - * Returns the list of transform tasks as reported by _tasks API. - */ - @SuppressWarnings("unchecked") - protected List getTransformTasks() throws IOException { - final Request tasksRequest = new Request("GET", "/_tasks"); - tasksRequest.addParameter("actions", TransformField.TASK_NAME + "*"); - final Map tasksResponse = entityAsMap(client().performRequest(tasksRequest)); - - Map nodes = (Map) tasksResponse.get("nodes"); - if (nodes == null) { - return List.of(); - } - - List foundTasks = new ArrayList<>(); - for (Map.Entry node : nodes.entrySet()) { - Map nodeInfo = (Map) node.getValue(); - Map tasks = (Map) nodeInfo.get("tasks"); - if (tasks != null) { - foundTasks.addAll(tasks.keySet()); - } - } - return foundTasks; - } - - /** - * Verifies that the given transform task exists in cluster state. - */ - private void assertThatTransformTaskExists(String transformId) throws IOException { - assertThatTransformTaskCountIsEqualTo(transformId, 1); - } - - /** - * Verifies that the given transform task does not exist in cluster state. - */ - private void assertThatTransformTaskDoesNotExist(String transformId) throws IOException { - assertThatTransformTaskCountIsEqualTo(transformId, 0); - } - - /** - * Verifies that the number of transform tasks in cluster state for the given transform is as expected. - */ - @SuppressWarnings("unchecked") - private void assertThatTransformTaskCountIsEqualTo(String transformId, int expectedCount) throws IOException { - Request request = new Request("GET", "_cluster/state"); - Map response = entityAsMap(adminClient().performRequest(request)); - - List> tasks = (List>) XContentMapValues.extractValue( - response, - "metadata", - "persistent_tasks", - "tasks" - ); - - assertThat("Tasks were: " + tasks, tasks.stream().filter(t -> transformId.equals(t.get("id"))).toList(), hasSize(expectedCount)); - } - public void testContinuousTransformUpdate() throws Exception { String indexName = "continuous-reviews-update"; createReviewsIndex(indexName, 10, NUM_USERS, TransformIT::getUserIdForRow, TransformIT::getDateStringForRow); @@ -447,7 +387,7 @@ public void testContinuousTransformUpdate() throws Exception { assertOK(searchResponse); var responseMap = entityAsMap(searchResponse); assertThat((Integer) XContentMapValues.extractValue("hits.total.value", responseMap), greaterThan(0)); - refreshIndex(dest, RequestOptions.DEFAULT); + refreshIndex(dest); }, 30, TimeUnit.SECONDS); stopTransform(config.getId()); diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java index 6e13e936f5532..eb1a1258d5a96 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java @@ -10,7 +10,6 @@ import org.apache.http.client.methods.HttpGet; import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; -import org.apache.logging.log4j.Level; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; @@ -27,7 +26,6 @@ import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; -import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -40,22 +38,20 @@ import org.elasticsearch.xpack.core.transform.transforms.QueryConfig; import org.elasticsearch.xpack.core.transform.transforms.SourceConfig; import org.elasticsearch.xpack.core.transform.transforms.TransformConfig; -import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import org.elasticsearch.xpack.core.transform.transforms.pivot.AggregationConfig; import org.elasticsearch.xpack.core.transform.transforms.pivot.DateHistogramGroupSource; import org.elasticsearch.xpack.core.transform.transforms.pivot.GroupConfig; import org.elasticsearch.xpack.core.transform.transforms.pivot.PivotConfig; import org.elasticsearch.xpack.core.transform.transforms.pivot.SingleGroupSource; +import org.elasticsearch.xpack.transform.integration.common.TransformCommonRestTestCase; import java.io.IOException; import java.nio.charset.StandardCharsets; -import java.time.Instant; import java.time.ZoneId; import java.util.Base64; import java.util.Collections; import java.util.HashSet; import java.util.List; -import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; @@ -67,9 +63,8 @@ import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.core.Is.is; -public abstract class TransformRestTestCase extends ESRestTestCase { +public abstract class TransformRestTestCase extends TransformCommonRestTestCase { - protected static String TRANSFORM_ENDPOINT = "/_transform/"; protected static final String AUTH_KEY = "Authorization"; protected static final String SECONDARY_AUTH_KEY = "es-secondary-authorization"; @@ -81,49 +76,6 @@ protected void cleanUp() throws Exception { waitForPendingTasks(); } - @SuppressWarnings("unchecked") - private void logAudits() throws Exception { - logger.info("writing audit messages to the log"); - Request searchRequest = new Request("GET", TransformInternalIndexConstants.AUDIT_INDEX + "/_search?ignore_unavailable=true"); - searchRequest.setJsonEntity(""" - { - "size": 100, - "sort": [ { "timestamp": { "order": "asc" } } ] - }"""); - - assertBusy(() -> { - try { - refreshIndex(TransformInternalIndexConstants.AUDIT_INDEX_PATTERN, RequestOptions.DEFAULT); - Response searchResponse = client().performRequest(searchRequest); - - Map searchResult = entityAsMap(searchResponse); - List> searchHits = (List>) XContentMapValues.extractValue( - "hits.hits", - searchResult - ); - - for (Map hit : searchHits) { - Map source = (Map) XContentMapValues.extractValue("_source", hit); - String level = (String) source.getOrDefault("level", "info"); - logger.log( - Level.getLevel(level.toUpperCase(Locale.ROOT)), - "Transform audit: [{}] [{}] [{}] [{}]", - Instant.ofEpochMilli((long) source.getOrDefault("timestamp", 0)), - source.getOrDefault("transform_id", "n/a"), - source.getOrDefault("message", "n/a"), - source.getOrDefault("node_name", "n/a") - ); - } - } catch (ResponseException e) { - // see gh#54810, wrap temporary 503's as assertion error for retry - if (e.getResponse().getStatusLine().getStatusCode() != 503) { - throw e; - } - throw new AssertionError("Failed to retrieve audit logs", e); - } - }, 5, TimeUnit.SECONDS); - } - protected void cleanUpTransforms() throws IOException { for (String id : createdTransformIds) { try { @@ -140,12 +92,6 @@ protected void cleanUpTransforms() throws IOException { createdTransformIds.clear(); } - protected void refreshIndex(String index, RequestOptions options) throws IOException { - var r = new Request("POST", index + "/_refresh"); - r.setOptions(options); - assertOK(adminClient().performRequest(r)); - } - protected Map getIndexMapping(String index, RequestOptions options) throws IOException { var r = new Request("GET", "/" + index + "/_mapping"); r.setOptions(options); diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformUsingSearchRuntimeFieldsIT.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformUsingSearchRuntimeFieldsIT.java index 2e509bedbce39..d17d9dbd20ffd 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformUsingSearchRuntimeFieldsIT.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformUsingSearchRuntimeFieldsIT.java @@ -138,7 +138,7 @@ public void testPivotTransform() throws Exception { stopTransform(config.getId()); assertBusy(() -> { assertEquals("stopped", getTransformState(config.getId())); }); - refreshIndex(destIndexName, RequestOptions.DEFAULT); + refreshIndex(destIndexName); // Verify destination index mappings var mappings = (Map) XContentMapValues.extractValue( destIndexName + ".mappings", @@ -235,7 +235,7 @@ public void testLatestTransform() throws Exception { stopTransform(configWithRuntimeFields.getId()); assertBusy(() -> { assertEquals("stopped", getTransformState(configWithRuntimeFields.getId())); }); - refreshIndex(destIndexName, RequestOptions.DEFAULT); + refreshIndex(destIndexName); // Verify destination index mappings var destIndexMapping = getIndexMapping(destIndexName, RequestOptions.DEFAULT); diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/continuous/TransformContinuousIT.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/continuous/TransformContinuousIT.java index 5eac2bd2ebdf6..69c0e12ca4b55 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/continuous/TransformContinuousIT.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/continuous/TransformContinuousIT.java @@ -254,7 +254,7 @@ public void testContinuousEvents() throws Exception { source.append("\r\n"); doBulk(source.toString(), false); } - refreshIndex(sourceIndexName, RequestOptions.DEFAULT); + refreshIndex(sourceIndexName); // start all transforms, wait until the processed all data and stop them startTransforms(); diff --git a/x-pack/plugin/transform/qa/single-node-tests/build.gradle b/x-pack/plugin/transform/qa/single-node-tests/build.gradle index d4f84ecb37c9c..7eeb8c97d1ae4 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/build.gradle +++ b/x-pack/plugin/transform/qa/single-node-tests/build.gradle @@ -4,6 +4,7 @@ apply plugin: 'elasticsearch.legacy-java-rest-test' dependencies { javaRestTestImplementation(testArtifact(project(xpackModule('core')))) javaRestTestImplementation project(path: xpackModule('transform')) + javaRestTestImplementation project(path: xpackModule('transform:qa:common')) } testClusters.configureEach { diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java index 7c74e918a039f..09fbea29d4b15 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java @@ -9,33 +9,27 @@ import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; -import org.apache.logging.log4j.Level; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; -import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.transform.TransformField; import org.elasticsearch.xpack.core.transform.transforms.DestAlias; import org.elasticsearch.xpack.core.transform.transforms.SettingsConfig; -import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; +import org.elasticsearch.xpack.transform.integration.common.TransformCommonRestTestCase; import org.junit.After; import org.junit.AfterClass; import java.io.IOException; -import java.time.Instant; -import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; -import java.util.Locale; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; @@ -44,10 +38,9 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; -public abstract class TransformRestTestCase extends ESRestTestCase { +public abstract class TransformRestTestCase extends TransformCommonRestTestCase { protected static final String TEST_PASSWORD = "x-pack-test-password"; - private static final String SECONDARY_AUTH_KEY = "es-secondary-authorization"; protected static final SecureString TEST_PASSWORD_SECURE_STRING = new SecureString(TEST_PASSWORD.toCharArray()); private static final String BASIC_AUTH_VALUE_SUPER_USER = basicAuthHeaderValue("x_pack_rest_user", TEST_PASSWORD_SECURE_STRING); @@ -538,7 +531,7 @@ protected Request createRequestWithSecondaryAuth( RequestOptions.Builder options = request.getOptions().toBuilder(); if (authHeader != null) { - options.addHeader("Authorization", authHeader); + options.addHeader(AUTH_KEY, authHeader); } if (secondaryAuthHeader != null) { options.addHeader(SECONDARY_AUTH_KEY, secondaryAuthHeader); @@ -563,10 +556,6 @@ void waitForTransformCheckpoint(String transformId, long checkpoint) throws Exce }, 30, TimeUnit.SECONDS); } - void refreshIndex(String index) throws IOException { - assertOK(client().performRequest(new Request("POST", index + "/_refresh"))); - } - @SuppressWarnings("unchecked") protected static List> getTransforms(List> expectedErrors) throws IOException { Request request = new Request("GET", getTransformEndpoint() + "_all"); @@ -688,73 +677,4 @@ protected void assertOneCount(String query, String field, int expected) throws I int actual = (Integer) ((List) XContentMapValues.extractValue(field, searchResult)).get(0); assertEquals(expected, actual); } - - protected static String getTransformEndpoint() { - return TransformField.REST_BASE_PATH_TRANSFORMS; - } - - @SuppressWarnings("unchecked") - private void logAudits() throws Exception { - logger.info("writing audit messages to the log"); - Request searchRequest = new Request("GET", TransformInternalIndexConstants.AUDIT_INDEX + "/_search?ignore_unavailable=true"); - searchRequest.setJsonEntity(""" - { - "size": 100, - "sort": [ { "timestamp": { "order": "asc" } } ] - }"""); - - assertBusy(() -> { - try { - refreshIndex(TransformInternalIndexConstants.AUDIT_INDEX_PATTERN); - Response searchResponse = client().performRequest(searchRequest); - - Map searchResult = entityAsMap(searchResponse); - List> searchHits = (List>) XContentMapValues.extractValue( - "hits.hits", - searchResult - ); - - for (Map hit : searchHits) { - Map source = (Map) XContentMapValues.extractValue("_source", hit); - String level = (String) source.getOrDefault("level", "info"); - logger.log( - Level.getLevel(level.toUpperCase(Locale.ROOT)), - "Transform audit: [{}] [{}] [{}] [{}]", - Instant.ofEpochMilli((long) source.getOrDefault("timestamp", 0)), - source.getOrDefault("transform_id", "n/a"), - source.getOrDefault("message", "n/a"), - source.getOrDefault("node_name", "n/a") - ); - } - } catch (ResponseException e) { - // see gh#54810, wrap temporary 503's as assertion error for retry - if (e.getResponse().getStatusLine().getStatusCode() != 503) { - throw e; - } - throw new AssertionError("Failed to retrieve audit logs", e); - } - }, 5, TimeUnit.SECONDS); - } - - @SuppressWarnings("unchecked") - protected List getTransformTasks() throws IOException { - final Request tasksRequest = new Request("GET", "/_tasks"); - tasksRequest.addParameter("actions", TransformField.TASK_NAME + "*"); - Map tasksResponse = entityAsMap(client().performRequest(tasksRequest)); - - Map nodes = (Map) tasksResponse.get("nodes"); - if (nodes == null) { - return List.of(); - } - - List foundTasks = new ArrayList<>(); - for (Map.Entry node : nodes.entrySet()) { - Map nodeInfo = (Map) node.getValue(); - Map tasks = (Map) nodeInfo.get("tasks"); - if (tasks != null) { - foundTasks.addAll(tasks.keySet()); - } - } - return foundTasks; - } } diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRobustnessIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRobustnessIT.java index e537a6f280ac0..0f807fbae45d1 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRobustnessIT.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRobustnessIT.java @@ -95,10 +95,16 @@ public void testCreateAndDeleteTransformInALoop() throws IOException { try { // Create the batch transform createPivotReviewsTransform(transformId, destIndex, null); + assertThat(getTransformTasks(), is(empty())); + assertThat(getTransformTasksFromClusterState(transformId), is(empty())); + // Wait until the transform finishes startAndWaitForTransform(transformId, destIndex); + // After the transform finishes, there should be no transform task left assertThat(getTransformTasks(), is(empty())); + assertThat(getTransformTasksFromClusterState(transformId), is(empty())); + // Delete the transform deleteTransform(transformId); } catch (AssertionError | Exception e) { From 54eeb622d523ff4c1ac901a471f86927444af2bd Mon Sep 17 00:00:00 2001 From: Tommaso Teofili Date: Fri, 5 Apr 2024 15:29:54 +0200 Subject: [PATCH 054/173] Add ES|QL Locate function (#106899) * Add ES|QL Locate function --- docs/changelog/106899.yaml | 6 + .../functions/description/locate.asciidoc | 5 + .../esql/functions/layout/locate.asciidoc | 14 ++ .../esql/functions/parameters/locate.asciidoc | 12 ++ .../esql/functions/signature/locate.svg | 1 + .../esql/functions/types/locate.asciidoc | 12 ++ .../src/main/resources/meta.csv-spec | 6 +- .../src/main/resources/string.csv-spec | 112 +++++++++++ .../scalar/string/LocateEvaluator.java | 166 +++++++++++++++++ .../function/EsqlFunctionRegistry.java | 4 +- .../function/scalar/string/Locate.java | 140 ++++++++++++++ .../xpack/esql/io/stream/PlanNamedTypes.java | 15 ++ .../function/scalar/string/LocateTests.java | 175 ++++++++++++++++++ 13 files changed, 666 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/106899.yaml create mode 100644 docs/reference/esql/functions/description/locate.asciidoc create mode 100644 docs/reference/esql/functions/layout/locate.asciidoc create mode 100644 docs/reference/esql/functions/parameters/locate.asciidoc create mode 100644 docs/reference/esql/functions/signature/locate.svg create mode 100644 docs/reference/esql/functions/types/locate.asciidoc create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Locate.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java diff --git a/docs/changelog/106899.yaml b/docs/changelog/106899.yaml new file mode 100644 index 0000000000000..a2db24236a47e --- /dev/null +++ b/docs/changelog/106899.yaml @@ -0,0 +1,6 @@ +pr: 106899 +summary: Add ES|QL Locate function +area: ES|QL +type: enhancement +issues: + - 106818 diff --git a/docs/reference/esql/functions/description/locate.asciidoc b/docs/reference/esql/functions/description/locate.asciidoc new file mode 100644 index 0000000000000..60a6d435e37b6 --- /dev/null +++ b/docs/reference/esql/functions/description/locate.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns an integer that indicates the position of a keyword substring within another string diff --git a/docs/reference/esql/functions/layout/locate.asciidoc b/docs/reference/esql/functions/layout/locate.asciidoc new file mode 100644 index 0000000000000..1017c7f844dd6 --- /dev/null +++ b/docs/reference/esql/functions/layout/locate.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-locate]] +=== `LOCATE` + +*Syntax* + +[.text-center] +image::esql/functions/signature/locate.svg[Embedded,opts=inline] + +include::../parameters/locate.asciidoc[] +include::../description/locate.asciidoc[] +include::../types/locate.asciidoc[] diff --git a/docs/reference/esql/functions/parameters/locate.asciidoc b/docs/reference/esql/functions/parameters/locate.asciidoc new file mode 100644 index 0000000000000..e48a7a891712c --- /dev/null +++ b/docs/reference/esql/functions/parameters/locate.asciidoc @@ -0,0 +1,12 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Parameters* + +`string`:: +An input string + +`substring`:: +A substring to locate in the input string + +`start`:: +The start index diff --git a/docs/reference/esql/functions/signature/locate.svg b/docs/reference/esql/functions/signature/locate.svg new file mode 100644 index 0000000000000..2b7bc2dac0e86 --- /dev/null +++ b/docs/reference/esql/functions/signature/locate.svg @@ -0,0 +1 @@ +LOCATE(string,substring,start) \ No newline at end of file diff --git a/docs/reference/esql/functions/types/locate.asciidoc b/docs/reference/esql/functions/types/locate.asciidoc new file mode 100644 index 0000000000000..895dce1335813 --- /dev/null +++ b/docs/reference/esql/functions/types/locate.asciidoc @@ -0,0 +1,12 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +string | substring | start | result +keyword | keyword | integer | integer +keyword | text | integer | integer +text | keyword | integer | integer +text | text | integer | integer +|=== diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec index 746684aca3e38..5af5d9d3417de 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -30,6 +30,7 @@ double e() "integer|long|double|boolean|keyword|text|ip|version least(first:integer|long|double|boolean|keyword|text|ip|version, ?rest...:integer|long|double|boolean|keyword|text|ip|version)" "keyword left(string:keyword|text, length:integer)" "integer length(string:keyword|text)" +"integer locate(string:keyword|text, substring:keyword|text, ?start:integer)" "double log(?base:integer|unsigned_long|long|double, number:integer|unsigned_long|long|double)" "double log10(number:double|integer|long|unsigned_long)" "keyword|text ltrim(string:keyword|text)" @@ -138,6 +139,7 @@ greatest |first |"integer|long|double|boolean least |first |"integer|long|double|boolean|keyword|text|ip|version" |[""] left |[string, length] |["keyword|text", integer] |[The string from which to return a substring., The number of characters to return.] length |string |"keyword|text" |[""] +locate |[string, substring, start] |["keyword|text", "keyword|text", "integer"] |[An input string, A substring to locate in the input string, The start index] log |[base, number] |["integer|unsigned_long|long|double", "integer|unsigned_long|long|double"] |["Base of logarithm. If `null`\, the function returns `null`. If not provided\, this function returns the natural logarithm (base e) of a value.", "Numeric expression. If `null`\, the function returns `null`."] log10 |number |"double|integer|long|unsigned_long" |Numeric expression. If `null`, the function returns `null`. ltrim |string |"keyword|text" |[""] @@ -247,6 +249,7 @@ greatest |Returns the maximum value from many columns. least |Returns the minimum value from many columns. left |Returns the substring that extracts 'length' chars from 'string' starting from the left. length |Returns the character length of a string. +locate |Returns an integer that indicates the position of a keyword substring within another string log |Returns the logarithm of a value to a base. The input can be any numeric value, the return value is always a double. Logs of zero, negative numbers, and base of one return `null` as well as a warning. log10 |Returns the logarithm of a value to base 10. The input can be any numeric value, the return value is always a double. Logs of 0 and negative numbers return `null` as well as a warning. ltrim |Removes leading whitespaces from a string. @@ -357,6 +360,7 @@ greatest |"integer|long|double|boolean|keyword|text|ip|version" least |"integer|long|double|boolean|keyword|text|ip|version" |false |true |false left |keyword |[false, false] |false |false length |integer |false |false |false +locate |integer |[false, false, true] |false |false log |double |[true, false] |false |false log10 |double |false |false |false ltrim |"keyword|text" |false |false |false @@ -447,5 +451,5 @@ countFunctions#[skip:-8.13.99] meta functions | stats a = count(*), b = count(*), c = count(*) | mv_expand c; a:long | b:long | c:long -100 | 100 | 100 +101 | 101 | 101 ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec index d9c9e535c2c45..f22e1b2de7f6a 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec @@ -1168,3 +1168,115 @@ from employees | where emp_no == 10001 | eval split = split("fooMbar", gender) | gender:keyword | split:keyword M | [foo, bar] ; + +locate#[skip:-8.13.99,reason:new string function added in 8.14] +row a = "hello" | eval a_ll = locate(a, "ll"); + +a:keyword | a_ll:integer +hello | 3 +; + +locateFail#[skip:-8.13.99,reason:new string function added in 8.14] +row a = "hello" | eval a_ll = locate(a, "int"); + +a:keyword | a_ll:integer +hello | 0 +; + +locateZeroStart#[skip:-8.13.99,reason:new string function added in 8.14] +row a = "hello" | eval a_ll = locate(a, "ll", 0); + +a:keyword | a_ll:integer +hello | 3 +; + +locateExactStart#[skip:-8.13.99,reason:new string function added in 8.14] +row a = "hello" | eval a_ll = locate(a, "ll", 3); + +a:keyword | a_ll:integer +hello | 3 +; + +locateLongerStart#[skip:-8.13.99,reason:new string function added in 8.14] +row a = "hello" | eval a_ll = locate(a, "ll", 10); + +a:keyword | a_ll:integer +hello | 0 +; + +locateLongerSubstr#[skip:-8.13.99,reason:new string function added in 8.14] +row a = "hello" | eval a_ll = locate(a, "farewell"); + +a:keyword | a_ll:integer +hello | 0 +; + +locateSame#[skip:-8.13.99,reason:new string function added in 8.14] +row a = "hello" | eval a_ll = locate(a, "hello"); + +a:keyword | a_ll:integer +hello | 1 +; + +locateWithSubstring#[skip:-8.13.99,reason:new string function added in 8.14] +from employees | where emp_no <= 10010 | eval f_s = substring(last_name, 2) | eval f_l = locate(last_name, f_s) | keep emp_no, last_name, f_s, f_l; +ignoreOrder:true + +emp_no:integer | last_name:keyword | f_s:keyword | f_l:integer +10001 | Facello | acello | 2 +10002 | Simmel | immel | 2 +10003 | Bamford | amford | 2 +10004 | Koblick | oblick | 2 +10005 | Maliniak | aliniak | 2 +10006 | Preusig | reusig | 2 +10007 | Zielinski | ielinski | 2 +10008 | Kalloufi | alloufi | 2 +10009 | Peac | eac | 2 +10010 | Piveteau | iveteau | 2 +; + +locateUtf16Emoji#[skip:-8.13.99,reason:new string function added in 8.14] +row a = "🐱Meow!🐶Woof!" | eval f_s = substring(a, 3) | eval f_l = locate(a, f_s); + +a:keyword | f_s:keyword | f_l:integer +🐱Meow!🐶Woof! | Meow!🐶Woof! | 3 +; + +locateNestedSubstring#[skip:-8.13.99,reason:new string function added in 8.14] +row a = "hello" | eval a_ll = substring(a, locate(a, "ll")); + +a:keyword | a_ll:keyword +hello | llo +; + +locateNestSubstring#[skip:-8.13.99,reason:new string function added in 8.14] +row a = "hello" | eval a_ll = locate(substring(a, 2), "ll"); + +a:keyword | a_ll:integer +hello | 2 +; + +locateStats#[skip:-8.13.99,reason:new string function added in 8.14] +from employees | where emp_no <= 10010 | eval f_l = locate(last_name, "ll") | stats min(f_l), max(f_l) by job_positions | sort job_positions | limit 5; + +min(f_l):integer | max(f_l):integer | job_positions:keyword +5 | 5 | Accountant +0 | 0 | Architect +0 | 0 | Head Human Resources +0 | 3 | Internship +3 | 3 | Junior Developer +; + +locateWarnings#[skip:-8.13.99,reason:new string function added in 8.14] +required_feature: esql.mv_warn + +from hosts | where host=="epsilon" | eval l1 = locate(host_group, "ate"), l2 = locate(description, "ate") | keep l1, l2; +ignoreOrder:true +warning:Line 1:80: evaluation of [locate(description, \"ate\")] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:80: java.lang.IllegalArgumentException: single-value function encountered multi-value + +l1:integer | l2:integer +2 | null +2 | null +null | 0 +; diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateEvaluator.java new file mode 100644 index 0000000000000..24055ad44f624 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateEvaluator.java @@ -0,0 +1,166 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Locate}. + * This class is generated. Do not edit it. + */ +public final class LocateEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator str; + + private final EvalOperator.ExpressionEvaluator substr; + + private final EvalOperator.ExpressionEvaluator start; + + private final DriverContext driverContext; + + public LocateEvaluator(Source source, EvalOperator.ExpressionEvaluator str, + EvalOperator.ExpressionEvaluator substr, EvalOperator.ExpressionEvaluator start, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.str = str; + this.substr = substr; + this.start = start; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock strBlock = (BytesRefBlock) str.eval(page)) { + try (BytesRefBlock substrBlock = (BytesRefBlock) substr.eval(page)) { + try (IntBlock startBlock = (IntBlock) start.eval(page)) { + BytesRefVector strVector = strBlock.asVector(); + if (strVector == null) { + return eval(page.getPositionCount(), strBlock, substrBlock, startBlock); + } + BytesRefVector substrVector = substrBlock.asVector(); + if (substrVector == null) { + return eval(page.getPositionCount(), strBlock, substrBlock, startBlock); + } + IntVector startVector = startBlock.asVector(); + if (startVector == null) { + return eval(page.getPositionCount(), strBlock, substrBlock, startBlock); + } + return eval(page.getPositionCount(), strVector, substrVector, startVector).asBlock(); + } + } + } + } + + public IntBlock eval(int positionCount, BytesRefBlock strBlock, BytesRefBlock substrBlock, + IntBlock startBlock) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + BytesRef strScratch = new BytesRef(); + BytesRef substrScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (strBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (strBlock.getValueCount(p) != 1) { + if (strBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (substrBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (substrBlock.getValueCount(p) != 1) { + if (substrBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (startBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (startBlock.getValueCount(p) != 1) { + if (startBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendInt(Locate.process(strBlock.getBytesRef(strBlock.getFirstValueIndex(p), strScratch), substrBlock.getBytesRef(substrBlock.getFirstValueIndex(p), substrScratch), startBlock.getInt(startBlock.getFirstValueIndex(p)))); + } + return result.build(); + } + } + + public IntVector eval(int positionCount, BytesRefVector strVector, BytesRefVector substrVector, + IntVector startVector) { + try(IntVector.Builder result = driverContext.blockFactory().newIntVectorBuilder(positionCount)) { + BytesRef strScratch = new BytesRef(); + BytesRef substrScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + result.appendInt(Locate.process(strVector.getBytesRef(p, strScratch), substrVector.getBytesRef(p, substrScratch), startVector.getInt(p))); + } + return result.build(); + } + } + + @Override + public String toString() { + return "LocateEvaluator[" + "str=" + str + ", substr=" + substr + ", start=" + start + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(str, substr, start); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory str; + + private final EvalOperator.ExpressionEvaluator.Factory substr; + + private final EvalOperator.ExpressionEvaluator.Factory start; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory str, + EvalOperator.ExpressionEvaluator.Factory substr, + EvalOperator.ExpressionEvaluator.Factory start) { + this.source = source; + this.str = str; + this.substr = substr; + this.start = start; + } + + @Override + public LocateEvaluator get(DriverContext context) { + return new LocateEvaluator(source, str.get(context), substr.get(context), start.get(context), context); + } + + @Override + public String toString() { + return "LocateEvaluator[" + "str=" + str + ", substr=" + substr + ", start=" + start + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index 1a27c7b69c1e6..3db7ae3cac7b5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -90,6 +90,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.string.LTrim; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Left; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Length; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Locate; import org.elasticsearch.xpack.esql.expression.function.scalar.string.RTrim; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Replace; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Right; @@ -174,7 +175,8 @@ private FunctionDefinition[][] functions() { def(StartsWith.class, StartsWith::new, "starts_with"), def(EndsWith.class, EndsWith::new, "ends_with"), def(ToLower.class, ToLower::new, "to_lower"), - def(ToUpper.class, ToUpper::new, "to_upper") }, + def(ToUpper.class, ToUpper::new, "to_upper"), + def(Locate.class, Locate::new, "locate") }, // date new FunctionDefinition[] { def(DateDiff.class, DateDiff::new, "date_diff"), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Locate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Locate.java new file mode 100644 index 0000000000000..a1157fad6c46f --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Locate.java @@ -0,0 +1,140 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.UnicodeUtil; +import org.elasticsearch.compute.ann.Evaluator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.function.OptionalArgument; +import org.elasticsearch.xpack.ql.tree.NodeInfo; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.util.Arrays; +import java.util.List; +import java.util.function.Function; + +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.SECOND; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.THIRD; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isInteger; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isString; + +/** + * Locate function, given a string 'a' and a substring 'b', it returns the index of the first occurrence of the substring 'b' in 'a'. + */ +public class Locate extends EsqlScalarFunction implements OptionalArgument { + + private final Expression str; + private final Expression substr; + private final Expression start; + + @FunctionInfo( + returnType = "integer", + description = "Returns an integer that indicates the position of a keyword substring within another string" + ) + public Locate( + Source source, + @Param(name = "string", type = { "keyword", "text" }, description = "An input string") Expression str, + @Param( + name = "substring", + type = { "keyword", "text" }, + description = "A substring to locate in the input string" + ) Expression substr, + @Param(optional = true, name = "start", type = { "integer" }, description = "The start index") Expression start + ) { + super(source, start == null ? Arrays.asList(str, substr) : Arrays.asList(str, substr, start)); + this.str = str; + this.substr = substr; + this.start = start; + } + + @Override + public DataType dataType() { + return DataTypes.INTEGER; + } + + @Override + protected TypeResolution resolveType() { + if (childrenResolved() == false) { + return new TypeResolution("Unresolved children"); + } + + TypeResolution resolution = isString(str, sourceText(), FIRST); + if (resolution.unresolved()) { + return resolution; + } + resolution = isString(substr, sourceText(), SECOND); + if (resolution.unresolved()) { + return resolution; + } + + return start == null ? TypeResolution.TYPE_RESOLVED : isInteger(start, sourceText(), THIRD); + } + + @Override + public boolean foldable() { + return str.foldable() && substr.foldable() && (start == null || start.foldable()); + } + + @Evaluator + static int process(BytesRef str, BytesRef substr, int start) { + if (str == null || substr == null || str.length < substr.length) { + return 0; + } + int codePointCount = UnicodeUtil.codePointCount(str); + int indexStart = indexStart(codePointCount, start); + String utf8ToString = str.utf8ToString(); + return 1 + utf8ToString.indexOf(substr.utf8ToString(), utf8ToString.offsetByCodePoints(0, indexStart)); + } + + @Evaluator(extraName = "NoStart") + static int process(BytesRef str, BytesRef substr) { + return process(str, substr, 0); + } + + private static int indexStart(int codePointCount, int start) { + // esql is 1-based when it comes to string manipulation. We treat start = 0 and 1 the same + // a negative value is relative to the end of the string + int indexStart; + if (start > 0) { + indexStart = start - 1; + } else if (start < 0) { + indexStart = codePointCount + start; // start is negative, so this is a subtraction + } else { + indexStart = start; // start == 0 + } + return Math.min(Math.max(0, indexStart), codePointCount); // sanitise string start index + } + + @Override + public Expression replaceChildren(List newChildren) { + return new Locate(source(), newChildren.get(0), newChildren.get(1), start == null ? null : newChildren.get(2)); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, Locate::new, str, substr, start); + } + + @Override + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + ExpressionEvaluator.Factory strExpr = toEvaluator.apply(str); + ExpressionEvaluator.Factory substrExpr = toEvaluator.apply(substr); + if (start == null) { + return new LocateNoStartEvaluator.Factory(source(), strExpr, substrExpr); + } + return new LocateEvaluator.Factory(source(), strExpr, substrExpr, toEvaluator.apply(start)); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java index a85ddac532241..4640f1a7168c0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java @@ -113,6 +113,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.string.LTrim; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Left; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Length; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Locate; import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike; import org.elasticsearch.xpack.esql.expression.function.scalar.string.RTrim; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Replace; @@ -400,6 +401,7 @@ public static List namedTypeEntries() { of(ScalarFunction.class, SpatialContains.class, PlanNamedTypes::writeSpatialRelatesFunction, PlanNamedTypes::readContains), of(ScalarFunction.class, SpatialWithin.class, PlanNamedTypes::writeSpatialRelatesFunction, PlanNamedTypes::readWithin), of(ScalarFunction.class, Substring.class, PlanNamedTypes::writeSubstring, PlanNamedTypes::readSubstring), + of(ScalarFunction.class, Locate.class, PlanNamedTypes::writeLocate, PlanNamedTypes::readLocate), of(ScalarFunction.class, Left.class, PlanNamedTypes::writeLeft, PlanNamedTypes::readLeft), of(ScalarFunction.class, Right.class, PlanNamedTypes::writeRight, PlanNamedTypes::readRight), of(ScalarFunction.class, Split.class, PlanNamedTypes::writeSplit, PlanNamedTypes::readSplit), @@ -1592,6 +1594,19 @@ static void writeSubstring(PlanStreamOutput out, Substring substring) throws IOE out.writeOptionalWriteable(fields.size() == 3 ? o -> out.writeExpression(fields.get(2)) : null); } + static Locate readLocate(PlanStreamInput in) throws IOException { + return new Locate(in.readSource(), in.readExpression(), in.readExpression(), in.readOptionalNamed(Expression.class)); + } + + static void writeLocate(PlanStreamOutput out, Locate locate) throws IOException { + out.writeSource(locate.source()); + List fields = locate.children(); + assert fields.size() == 2 || fields.size() == 3; + out.writeExpression(fields.get(0)); + out.writeExpression(fields.get(1)); + out.writeOptionalWriteable(fields.size() == 3 ? o -> out.writeExpression(fields.get(2)) : null); + } + static Replace readReplace(PlanStreamInput in) throws IOException { return new Replace(Source.EMPTY, in.readExpression(), in.readExpression(), in.readExpression()); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java new file mode 100644 index 0000000000000..c1d3df53ece60 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java @@ -0,0 +1,175 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.Literal; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.Supplier; + +import static org.elasticsearch.compute.data.BlockUtils.toJavaObject; +import static org.hamcrest.Matchers.equalTo; + +/** + * Tests for {@link Locate} function. + */ +public class LocateTests extends AbstractFunctionTestCase { + public LocateTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + List suppliers = new ArrayList<>(); + suppliers.add( + supplier( + "keywords", + DataTypes.KEYWORD, + DataTypes.KEYWORD, + () -> randomRealisticUnicodeOfCodepointLength(10), + () -> randomRealisticUnicodeOfCodepointLength(2), + () -> 0 + ) + ); + suppliers.add( + supplier( + "mixed keyword, text", + DataTypes.KEYWORD, + DataTypes.TEXT, + () -> randomRealisticUnicodeOfCodepointLength(10), + () -> randomRealisticUnicodeOfCodepointLength(2), + () -> 0 + ) + ); + suppliers.add( + supplier( + "texts", + DataTypes.TEXT, + DataTypes.TEXT, + () -> randomRealisticUnicodeOfCodepointLength(10), + () -> randomRealisticUnicodeOfCodepointLength(2), + () -> 0 + ) + ); + suppliers.add( + supplier( + "mixed text, keyword", + DataTypes.TEXT, + DataTypes.KEYWORD, + () -> randomRealisticUnicodeOfCodepointLength(10), + () -> randomRealisticUnicodeOfCodepointLength(2), + () -> 0 + ) + ); + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); + } + + public void testToString() { + assertThat( + evaluator( + new Locate( + Source.EMPTY, + field("str", DataTypes.KEYWORD), + field("substr", DataTypes.KEYWORD), + field("start", DataTypes.INTEGER) + ) + ).get(driverContext()).toString(), + equalTo("LocateEvaluator[str=Attribute[channel=0], substr=Attribute[channel=1], start=Attribute[channel=2]]") + ); + } + + @Override + protected Expression build(Source source, List args) { + return new Locate(source, args.get(0), args.get(1), args.size() < 3 ? null : args.get(2)); + } + + public void testPrefixString() { + assertThat(process("a tiger", "a t", 0), equalTo(1)); + assertThat(process("a tiger", "a", 0), equalTo(1)); + assertThat(process("界世", "界", 0), equalTo(1)); + } + + public void testSuffixString() { + assertThat(process("a tiger", "er", 0), equalTo(6)); + assertThat(process("a tiger", "r", 0), equalTo(7)); + assertThat(process("世界", "界", 0), equalTo(2)); + } + + public void testMidString() { + assertThat(process("a tiger", "ti", 0), equalTo(3)); + assertThat(process("a tiger", "ige", 0), equalTo(4)); + assertThat(process("世界世", "界", 0), equalTo(2)); + } + + public void testOutOfRange() { + assertThat(process("a tiger", "tigers", 0), equalTo(0)); + assertThat(process("a tiger", "ipa", 0), equalTo(0)); + assertThat(process("世界世", "\uD83C\uDF0D", 0), equalTo(0)); + } + + public void testExactString() { + assertThat(process("a tiger", "a tiger", 0), equalTo(1)); + assertThat(process("tigers", "tigers", 0), equalTo(1)); + assertThat(process("界世", "界世", 0), equalTo(1)); + } + + private Integer process(String str, String substr, Integer start) { + try ( + EvalOperator.ExpressionEvaluator eval = evaluator( + new Locate( + Source.EMPTY, + field("str", DataTypes.KEYWORD), + field("substr", DataTypes.KEYWORD), + new Literal(Source.EMPTY, start, DataTypes.INTEGER) + ) + ).get(driverContext()); + Block block = eval.eval(row(List.of(new BytesRef(str), new BytesRef(substr)))) + ) { + return block.isNull(0) ? Integer.valueOf(0) : ((Integer) toJavaObject(block, 0)); + } + } + + private static TestCaseSupplier supplier( + String name, + DataType firstType, + DataType secondType, + Supplier strValueSupplier, + Supplier substrValueSupplier, + Supplier startSupplier + ) { + return new TestCaseSupplier(name, List.of(firstType, secondType), () -> { + List values = new ArrayList<>(); + String expectedToString = "LocateEvaluator[str=Attribute[channel=0], substr=Attribute[channel=1], start=Attribute[channel=2]]"; + + String value = strValueSupplier.get(); + values.add(new TestCaseSupplier.TypedData(new BytesRef(value), firstType, "0")); + + String substrValue = substrValueSupplier.get(); + values.add(new TestCaseSupplier.TypedData(new BytesRef(substrValue), secondType, "1")); + + Integer startValue = startSupplier.get(); + values.add(new TestCaseSupplier.TypedData(startValue, DataTypes.INTEGER, "2")); + + int expectedValue = 1 + value.indexOf(substrValue); + return new TestCaseSupplier.TestCase(values, expectedToString, DataTypes.INTEGER, equalTo(expectedValue)); + }); + } +} From 2cfcefba9c39e7927ff672ffb0c6ec38a138a34f Mon Sep 17 00:00:00 2001 From: Moritz Mack Date: Fri, 5 Apr 2024 15:46:22 +0200 Subject: [PATCH 055/173] Update 8.13 known issues with JDK 22 bug / recommendation to downgrade (#107156) Update 8.13 known issues with JDK 22 bug / recommendation to downgrade. I'll follow up adding this to 8.13.1 as well once backported. --- docs/reference/release-notes/8.13.0.asciidoc | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/reference/release-notes/8.13.0.asciidoc b/docs/reference/release-notes/8.13.0.asciidoc index 47855773d0543..ed3c159386a8a 100644 --- a/docs/reference/release-notes/8.13.0.asciidoc +++ b/docs/reference/release-notes/8.13.0.asciidoc @@ -7,6 +7,9 @@ Also see <>. [float] === Known issues +* Due to a bug in the bundled JDK 22 nodes might crash abruptly under high memory pressure. + We recommend https://www.elastic.co/guide/en/elasticsearch/reference/current/install-elasticsearch.html#jvm-version[downgrading to JDK 21.0.2] asap to mitigate the issue. + * Nodes upgraded to 8.13.0 fail to load downsampling persistent tasks. This prevents them from joining the cluster, blocking its upgrade (issue: {es-issue}106880[#106880]) + This affects clusters running version 8.10 or later, with an active downsampling From cd6af63022b159c837d4a9ea03699c68217c938d Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Fri, 5 Apr 2024 16:25:42 +0200 Subject: [PATCH 056/173] [Profiling] Annotate TODOs for 9.0.0 upgrade (#107150) As suggested by David in #106592, we're adding the annotation `@UpdateForV9` to all places in the Universal Profiling plugin that can be removed once we move to 9.0.0. Closes #106592 Closes #106593 Closes #106596 Closes #106597 Closes #106681 --- .../java/org/elasticsearch/xpack/profiling/CO2Calculator.java | 3 +++ .../elasticsearch/xpack/profiling/GetFlamegraphResponse.java | 3 +++ .../elasticsearch/xpack/profiling/GetStackTracesResponse.java | 3 +++ .../java/org/elasticsearch/xpack/profiling/HostMetadata.java | 2 ++ .../java/org/elasticsearch/xpack/profiling/InstanceType.java | 2 ++ 5 files changed, 13 insertions(+) diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java index d69178f158a88..fcdc116cab725 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.profiling; +import org.elasticsearch.core.UpdateForV9; + import java.util.Map; final class CO2Calculator { @@ -52,6 +54,7 @@ public double getAnnualCO2Tons(String hostID, long samples) { return getKiloWattsPerCore(host) * getCO2TonsPerKWH(host) * annualCoreHours * getDatacenterPUE(host); } + @UpdateForV9 // only allow OTEL semantic conventions private double getKiloWattsPerCore(HostMetadata host) { return switch (host.hostArchitecture) { // For the OTEL donation of the profiling agent, we switch to OTEL semantic conventions, diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphResponse.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphResponse.java index 65b342abddd9d..c851b372cb2db 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphResponse.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphResponse.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.xcontent.ToXContent; import java.io.IOException; @@ -24,7 +25,9 @@ public class GetFlamegraphResponse extends ActionResponse implements ChunkedToXC private final int size; private final double samplingRate; private final long selfCPU; + @UpdateForV9 // remove this field - it is unused in Kibana private final long totalCPU; + @UpdateForV9 // remove this field - it is unused in Kibana private final long totalSamples; private final List> edges; private final List fileIds; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesResponse.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesResponse.java index 89c0b4ab6b0fb..4cad1104f783b 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesResponse.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesResponse.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.xcontent.ToXContent; import java.util.Collections; @@ -29,8 +30,10 @@ public class GetStackTracesResponse extends ActionResponse implements ChunkedToX private final Map stackFrames; @Nullable private final Map executables; + @UpdateForV9 // remove this field - it is unused in Kibana @Nullable private final Map stackTraceEvents; + @UpdateForV9 // remove this field - it is unused in Kibana private final int totalFrames; private final double samplingRate; private final long totalSamples; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/HostMetadata.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/HostMetadata.java index e1e3e27e951bf..aae6615114f43 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/HostMetadata.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/HostMetadata.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.profiling; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -30,6 +31,7 @@ final class HostMetadata implements ToXContentObject { this.profilingNumCores = profilingNumCores != null ? profilingNumCores : DEFAULT_PROFILING_NUM_CORES; } + @UpdateForV9 // remove fallback to the "profiling.host.machine" field and remove it from the component template "profiling-hosts". public static HostMetadata fromSource(Map source) { if (source != null) { String hostID = (String) source.get("host.id"); diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/InstanceType.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/InstanceType.java index 3aa0a79df13bc..d694ffd2cbebc 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/InstanceType.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/InstanceType.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.profiling; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -73,6 +74,7 @@ public static InstanceType fromHostSource(Map source) { return new InstanceType(provider, region, null); } + @UpdateForV9 // remove this method private static InstanceType fromObsoleteHostSource(Map source) { // Check and handle AWS. String region = (String) source.get("ec2.placement.region"); From 95c7c0978020de5bac685802655bfab3f475e628 Mon Sep 17 00:00:00 2001 From: Chris Hegarty <62058229+ChrisHegarty@users.noreply.github.com> Date: Fri, 5 Apr 2024 15:32:16 +0100 Subject: [PATCH 057/173] Downgrade the bundled JDK to JDK 21.0.2 (#107137) This commit downgrades the bundled JDK to JDK 21.0.2. --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 26 ++++++++++++------------- 2 files changed, 14 insertions(+), 14 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index a2e8651810042..0883097e75aad 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -2,7 +2,7 @@ elasticsearch = 8.14.0 lucene = 9.10.0 bundled_jdk_vendor = openjdk -bundled_jdk = 22+36@830ec9fcccef480bb3e73fb7ecafe059 +bundled_jdk = 21.0.2+13@f2283984656d49d69e91c558476027ac # optional dependencies spatial4j = 0.7 jts = 1.15.0 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 7e2e781d3ce62..8978274e6df95 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -1694,25 +1694,25 @@ - - - + + + - - + + - - - + + + - - + + - - - + + + From 667a0609714ef429172096f9174e6625713706ae Mon Sep 17 00:00:00 2001 From: Moritz Mack Date: Fri, 5 Apr 2024 17:09:03 +0200 Subject: [PATCH 058/173] Fix link in 8.13 release notes. (#107161) Use id for link instead. Relates to https://github.com/elastic/elasticsearch/pull/107159 --- docs/reference/release-notes/8.13.0.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/release-notes/8.13.0.asciidoc b/docs/reference/release-notes/8.13.0.asciidoc index ed3c159386a8a..99ee4e5fb86e1 100644 --- a/docs/reference/release-notes/8.13.0.asciidoc +++ b/docs/reference/release-notes/8.13.0.asciidoc @@ -8,7 +8,7 @@ Also see <>. === Known issues * Due to a bug in the bundled JDK 22 nodes might crash abruptly under high memory pressure. - We recommend https://www.elastic.co/guide/en/elasticsearch/reference/current/install-elasticsearch.html#jvm-version[downgrading to JDK 21.0.2] asap to mitigate the issue. + We recommend <> asap to mitigate the issue. * Nodes upgraded to 8.13.0 fail to load downsampling persistent tasks. This prevents them from joining the cluster, blocking its upgrade (issue: {es-issue}106880[#106880]) + From da8151023fe6fa672184fde30b062a0a949d59d3 Mon Sep 17 00:00:00 2001 From: Parker Timmins Date: Fri, 5 Apr 2024 09:50:28 -0600 Subject: [PATCH 059/173] GET /_all should return hidden indices with visible aliases (#106975) GET /_all should return hidden indices if they are accessible through a visible alias. This is currently the behavior when resolution occurs in the security layer. This changes adds this behavior to name resolution when security is not used. --- docs/changelog/106975.yaml | 5 + .../metadata/IndexNameExpressionResolver.java | 48 +++---- .../IndexNameExpressionResolverTests.java | 4 +- .../WildcardExpressionResolverTests.java | 123 ++++++++++++++++++ 4 files changed, 156 insertions(+), 24 deletions(-) create mode 100644 docs/changelog/106975.yaml diff --git a/docs/changelog/106975.yaml b/docs/changelog/106975.yaml new file mode 100644 index 0000000000000..bd32b3574c4f9 --- /dev/null +++ b/docs/changelog/106975.yaml @@ -0,0 +1,5 @@ +pr: 106975 +summary: GET /_all should return hidden indices with visible aliases +area: Indices APIs +type: bug +issues: [] diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java index e8e8ca767cc34..b88292d4ed79b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java @@ -1246,32 +1246,36 @@ private WildcardExpressionResolver() { } /** - * Returns all the indices and all the datastreams, considering the open/closed, system, and hidden context parameters. + * Returns all the indices, datastreams, and aliases, considering the open/closed, system, and hidden context parameters. * Depending on the context, returns the names of the datastreams themselves or their backing indices. */ public static Collection resolveAll(Context context) { - List resolvedExpressions = resolveEmptyOrTrivialWildcard(context); - if (context.includeDataStreams() == false) { - return resolvedExpressions; - } else { - Stream dataStreamsAbstractions = context.getState() - .metadata() - .getIndicesLookup() - .values() - .stream() - .filter(indexAbstraction -> indexAbstraction.getType() == Type.DATA_STREAM) - .filter( - indexAbstraction -> indexAbstraction.isSystem() == false - || context.systemIndexAccessPredicate.test(indexAbstraction.getName()) - ); - if (context.getOptions().expandWildcardsHidden() == false) { - dataStreamsAbstractions = dataStreamsAbstractions.filter(indexAbstraction -> indexAbstraction.isHidden() == false); - } - // dedup backing indices if expand hidden indices option is true - Set resolvedIncludingDataStreams = expandToOpenClosed(context, dataStreamsAbstractions).collect(Collectors.toSet()); - resolvedIncludingDataStreams.addAll(resolvedExpressions); - return resolvedIncludingDataStreams; + List concreteIndices = resolveEmptyOrTrivialWildcard(context); + + if (context.includeDataStreams() == false && context.getOptions().ignoreAliases()) { + return concreteIndices; } + + Stream ias = context.getState() + .metadata() + .getIndicesLookup() + .values() + .stream() + .filter(ia -> context.getOptions().expandWildcardsHidden() || ia.isHidden() == false) + .filter(ia -> shouldIncludeIfDataStream(ia, context) || shouldIncludeIfAlias(ia, context)) + .filter(ia -> ia.isSystem() == false || context.systemIndexAccessPredicate.test(ia.getName())); + + Set resolved = expandToOpenClosed(context, ias).collect(Collectors.toSet()); + resolved.addAll(concreteIndices); + return resolved; + } + + private static boolean shouldIncludeIfDataStream(IndexAbstraction ia, IndexNameExpressionResolver.Context context) { + return context.includeDataStreams() && ia.getType() == Type.DATA_STREAM; + } + + private static boolean shouldIncludeIfAlias(IndexAbstraction ia, IndexNameExpressionResolver.Context context) { + return context.getOptions().ignoreAliases() == false && ia.getType() == Type.ALIAS; } /** diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java index a1eeceba8a390..2fba37772ef94 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java @@ -1217,9 +1217,9 @@ public void testHiddenAliasAndHiddenIndexResolution() { indexNames = indexNameExpressionResolver.concreteIndexNames(state, includeHiddenOptions, visibleAlias); assertThat(Arrays.asList(indexNames), containsInAnyOrder(visibleIndex, hiddenIndex)); - // A total wildcards does not resolve the hidden index in this case + // total wildcards should also resolve both visible and hidden indices if there is a visible alias indexNames = indexNameExpressionResolver.concreteIndexNames(state, excludeHiddenOptions, "*"); - assertThat(Arrays.asList(indexNames), containsInAnyOrder(visibleIndex)); + assertThat(Arrays.asList(indexNames), containsInAnyOrder(visibleIndex, hiddenIndex)); } { diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java index 2406eb8e76ab9..9980e1b27e48c 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java @@ -280,6 +280,129 @@ public void testAll() { assertThat(IndexNameExpressionResolver.resolveExpressions(noExpandContext, "_all").size(), equalTo(0)); } + public void testAllAliases() { + { + // hidden index with hidden alias should not be returned + Metadata.Builder mdBuilder = Metadata.builder() + .put( + indexBuilder("index-hidden-alias", true) // index hidden + .state(State.OPEN) + .putAlias(AliasMetadata.builder("alias-hidden").isHidden(true)) // alias hidden + ); + + ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); + + IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context( + state, + IndicesOptions.lenientExpandOpen(), // don't include hidden + SystemIndexAccessLevel.NONE + ); + assertThat(newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context)), equalTo(newHashSet())); + } + + { + // hidden index with visible alias should be returned + Metadata.Builder mdBuilder = Metadata.builder() + .put( + indexBuilder("index-visible-alias", true) // index hidden + .state(State.OPEN) + .putAlias(AliasMetadata.builder("alias-visible").isHidden(false)) // alias visible + ); + + ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); + + IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context( + state, + IndicesOptions.lenientExpandOpen(), // don't include hidden + SystemIndexAccessLevel.NONE + ); + assertThat( + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context)), + equalTo(newHashSet("index-visible-alias")) + ); + } + } + + public void testAllDataStreams() { + + String dataStreamName = "foo_logs"; + long epochMillis = randomLongBetween(1580536800000L, 1583042400000L); + IndexMetadata firstBackingIndexMetadata = createBackingIndex(dataStreamName, 1, epochMillis).build(); + + IndicesOptions indicesAndAliasesOptions = IndicesOptions.fromOptions( + randomBoolean(), + randomBoolean(), + true, + false, + true, + false, + false, + false + ); + + { + // visible data streams should be returned by _all even show backing indices are hidden + Metadata.Builder mdBuilder = Metadata.builder() + .put(firstBackingIndexMetadata, true) + .put(DataStreamTestHelper.newInstance(dataStreamName, List.of(firstBackingIndexMetadata.getIndex()))); + + ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); + + IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context( + state, + indicesAndAliasesOptions, + false, + false, + true, + SystemIndexAccessLevel.NONE, + NONE, + NONE + ); + + assertThat( + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context)), + equalTo(newHashSet(DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis))) + ); + } + + { + // if data stream itself is hidden, backing indices should not be returned + boolean hidden = true; + var dataStream = new DataStream( + dataStreamName, + List.of(firstBackingIndexMetadata.getIndex()), + 1, + null, + hidden, + false, + false, + false, + null, + null, + false, + List.of(), + null + ); + + Metadata.Builder mdBuilder = Metadata.builder().put(firstBackingIndexMetadata, true).put(dataStream); + + ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); + + IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context( + state, + indicesAndAliasesOptions, + false, + false, + true, + SystemIndexAccessLevel.NONE, + NONE, + NONE + ); + + assertThat(newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context)), equalTo(newHashSet())); + } + } + public void testResolveEmpty() { Metadata.Builder mdBuilder = Metadata.builder() .put( From e8747a6f48e0ab695a11bfe534b726da1862077c Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Fri, 5 Apr 2024 09:08:17 -0700 Subject: [PATCH 060/173] Always enable fast path for load values from single segment (#106977) I've been looking to simplify the execution of the enrich lookup. There are several issues we need to address in the enrich process. One of the problems is that we currently perform lookup and extract enrich fields term by term. To ensure that these incoming changes don't degrade performance, we need to enable a fast path for a single segment when the document IDs are not sorted. --- .../elasticsearch/compute/data/DocVector.java | 76 ++++++++++++------ .../lucene/ValuesSourceReaderOperator.java | 77 ++++++++++++------- 2 files changed, 102 insertions(+), 51 deletions(-) diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java index 9893ea1826945..2404217d11f95 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java @@ -84,6 +84,10 @@ public boolean singleSegmentNonDecreasing() { return singleSegmentNonDecreasing; } + public boolean singleSegment() { + return shards.isConstant() && segments.isConstant(); + } + private boolean checkIfSingleSegmentNonDecreasing() { if (getPositionCount() < 2) { return true; @@ -138,35 +142,57 @@ private void buildShardSegmentDocMapIfMissing() { for (int p = 0; p < forwards.length; p++) { forwards[p] = p; } - new IntroSorter() { - int pivot; - - @Override - protected void setPivot(int i) { - pivot = finalForwards[i]; - } - - @Override - protected int comparePivot(int j) { - int cmp = Integer.compare(shards.getInt(pivot), shards.getInt(finalForwards[j])); - if (cmp != 0) { - return cmp; + if (singleSegment()) { + new IntroSorter() { + int pivot; + + @Override + protected void setPivot(int i) { + pivot = finalForwards[i]; + } + + @Override + protected int comparePivot(int j) { + return Integer.compare(docs.getInt(pivot), docs.getInt(finalForwards[j])); } - cmp = Integer.compare(segments.getInt(pivot), segments.getInt(finalForwards[j])); - if (cmp != 0) { - return cmp; + + @Override + protected void swap(int i, int j) { + int tmp = finalForwards[i]; + finalForwards[i] = finalForwards[j]; + finalForwards[j] = tmp; + } + }.sort(0, forwards.length); + } else { + new IntroSorter() { + int pivot; + + @Override + protected void setPivot(int i) { + pivot = finalForwards[i]; } - return Integer.compare(docs.getInt(pivot), docs.getInt(finalForwards[j])); - } - @Override - protected void swap(int i, int j) { - int tmp = finalForwards[i]; - finalForwards[i] = finalForwards[j]; - finalForwards[j] = tmp; - } - }.sort(0, forwards.length); + @Override + protected int comparePivot(int j) { + int cmp = Integer.compare(shards.getInt(pivot), shards.getInt(finalForwards[j])); + if (cmp != 0) { + return cmp; + } + cmp = Integer.compare(segments.getInt(pivot), segments.getInt(finalForwards[j])); + if (cmp != 0) { + return cmp; + } + return Integer.compare(docs.getInt(pivot), docs.getInt(finalForwards[j])); + } + @Override + protected void swap(int i, int j) { + int tmp = finalForwards[i]; + finalForwards[i] = finalForwards[j]; + finalForwards[j] = tmp; + } + }.sort(0, forwards.length); + } backwards = new int[forwards.length]; for (int p = 0; p < forwards.length; p++) { backwards[forwards[p]] = p; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperator.java index 08be21f95786f..eab2a314b2074 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperator.java @@ -137,7 +137,22 @@ protected Page process(Page page) { boolean success = false; try { if (docVector.singleSegmentNonDecreasing()) { - loadFromSingleLeaf(blocks, docVector); + IntVector docs = docVector.docs(); + int shard = docVector.shards().getInt(0); + int segment = docVector.segments().getInt(0); + loadFromSingleLeaf(blocks, shard, segment, new BlockLoader.Docs() { + @Override + public int count() { + return docs.getPositionCount(); + } + + @Override + public int get(int i) { + return docs.getInt(i); + } + }); + } else if (docVector.singleSegment()) { + loadFromSingleLeafUnsorted(blocks, docVector); } else { try (LoadFromMany many = new LoadFromMany(blocks, docVector)) { many.run(); @@ -200,38 +215,24 @@ private boolean positionFieldWorkDocGuarteedAscending(int shard, int segment) { return true; } - private void loadFromSingleLeaf(Block[] blocks, DocVector docVector) throws IOException { - int shard = docVector.shards().getInt(0); - int segment = docVector.segments().getInt(0); - int firstDoc = docVector.docs().getInt(0); + private void loadFromSingleLeaf(Block[] blocks, int shard, int segment, BlockLoader.Docs docs) throws IOException { + int firstDoc = docs.get(0); positionFieldWork(shard, segment, firstDoc); - IntVector docs = docVector.docs(); - BlockLoader.Docs loaderDocs = new BlockLoader.Docs() { - @Override - public int count() { - return docs.getPositionCount(); - } - - @Override - public int get(int i) { - return docs.getInt(i); - } - }; StoredFieldsSpec storedFieldsSpec = StoredFieldsSpec.NO_REQUIREMENTS; List rowStrideReaders = new ArrayList<>(fields.length); - ComputeBlockLoaderFactory loaderBlockFactory = new ComputeBlockLoaderFactory(blockFactory, docs.getPositionCount()); + ComputeBlockLoaderFactory loaderBlockFactory = new ComputeBlockLoaderFactory(blockFactory, docs.count()); LeafReaderContext ctx = ctx(shard, segment); try { for (int f = 0; f < fields.length; f++) { FieldWork field = fields[f]; BlockLoader.ColumnAtATimeReader columnAtATime = field.columnAtATime(ctx); if (columnAtATime != null) { - blocks[f] = (Block) columnAtATime.read(loaderBlockFactory, loaderDocs); + blocks[f] = (Block) columnAtATime.read(loaderBlockFactory, docs); } else { rowStrideReaders.add( new RowStrideReaderWork( field.rowStride(ctx), - (Block.Builder) field.loader.builder(loaderBlockFactory, docs.getPositionCount()), + (Block.Builder) field.loader.builder(loaderBlockFactory, docs.count()), f ) ); @@ -248,7 +249,7 @@ public int get(int i) { ); } StoredFieldLoader storedFieldLoader; - if (useSequentialStoredFieldsReader(docVector.docs())) { + if (useSequentialStoredFieldsReader(docs)) { storedFieldLoader = StoredFieldLoader.fromSpecSequential(storedFieldsSpec); trackStoredFields(storedFieldsSpec, true); } else { @@ -259,8 +260,8 @@ public int get(int i) { storedFieldLoader.getLoader(ctx, null), storedFieldsSpec.requiresSource() ? shardContexts.get(shard).newSourceLoader.get().leaf(ctx.reader(), null) : null ); - for (int p = 0; p < docs.getPositionCount(); p++) { - int doc = docs.getInt(p); + for (int p = 0; p < docs.count(); p++) { + int doc = docs.get(p); if (storedFields != null) { storedFields.advanceTo(doc); } @@ -278,6 +279,30 @@ public int get(int i) { } } + private void loadFromSingleLeafUnsorted(Block[] blocks, DocVector docVector) throws IOException { + IntVector docs = docVector.docs(); + int[] forwards = docVector.shardSegmentDocMapForwards(); + int shard = docVector.shards().getInt(0); + int segment = docVector.segments().getInt(0); + loadFromSingleLeaf(blocks, shard, segment, new BlockLoader.Docs() { + @Override + public int count() { + return docs.getPositionCount(); + } + + @Override + public int get(int i) { + return docs.getInt(forwards[i]); + } + }); + final int[] backwards = docVector.shardSegmentDocMapBackwards(); + for (int i = 0; i < blocks.length; i++) { + Block in = blocks[i]; + blocks[i] = in.filter(backwards); + in.close(); + } + } + private class LoadFromMany implements Releasable { private final Block[] target; private final IntVector shards; @@ -371,9 +396,9 @@ public void close() { * Is it more efficient to use a sequential stored field reader * when reading stored fields for the documents contained in {@code docIds}? */ - private boolean useSequentialStoredFieldsReader(IntVector docIds) { - return docIds.getPositionCount() >= SEQUENTIAL_BOUNDARY - && docIds.getInt(docIds.getPositionCount() - 1) - docIds.getInt(0) == docIds.getPositionCount() - 1; + private boolean useSequentialStoredFieldsReader(BlockLoader.Docs docs) { + int count = docs.count(); + return count >= SEQUENTIAL_BOUNDARY && docs.get(count - 1) - docs.get(0) == count - 1; } private void trackStoredFields(StoredFieldsSpec spec, boolean sequential) { From a9388e16addbb8146888e0cee68d7eea17a7013e Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Fri, 5 Apr 2024 20:16:39 +0300 Subject: [PATCH 061/173] ESQL: Fix bug when combining projections (#107131) Recursive aliases (eval x = 1, x1 = x) were not taken into account when combining projections causing the target field to be lost (and only the immediate intermediate named expression to be used instead which became invalid). Fix #107083 --- docs/changelog/107131.yaml | 6 ++ .../src/main/resources/stats.csv-spec | 34 ++++++++++ .../esql/optimizer/LogicalPlanOptimizer.java | 27 +++++--- .../optimizer/LogicalPlanOptimizerTests.java | 67 +++++++++++++++++++ 4 files changed, 123 insertions(+), 11 deletions(-) create mode 100644 docs/changelog/107131.yaml diff --git a/docs/changelog/107131.yaml b/docs/changelog/107131.yaml new file mode 100644 index 0000000000000..ebb696931777b --- /dev/null +++ b/docs/changelog/107131.yaml @@ -0,0 +1,6 @@ +pr: 107131 +summary: "ESQL: Fix bug when combining projections" +area: ES|QL +type: bug +issues: + - 107083 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index 70d5053c64c45..6ccaf1eb0b6e7 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -1550,3 +1550,37 @@ s2point1:d | s_mv:i | languages:i 2.1 | 3 | 5 2.1 | 3 | null ; + +evalOverridingKey +FROM employees +| EVAL k = languages +| STATS c = COUNT() BY languages, k +| DROP k +| SORT languages +; + +c:l| languages:i +15 | 1 +19 | 2 +17 | 3 +18 | 4 +21 | 5 +10 | null +; + +evalMultipleOverridingKeys#[skip:-8.13.99,reason:supported in 8.14] +FROM employees +| EVAL k = languages, k1 = k +| STATS c = COUNT() BY languages, k, k1, languages +| DROP k +| SORT languages +; + +c:l | k1:i | languages:i +15 | 1 | 1 +19 | 2 | 2 +17 | 3 | 3 +18 | 4 | 4 +21 | 5 | 5 +10 | null | null +; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java index d0375e0b50849..7fb2784bb044f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java @@ -403,11 +403,6 @@ private static List projectAggregations( List upperProjection, List lowerAggregations ) { - AttributeMap lowerAliases = new AttributeMap<>(); - for (NamedExpression ne : lowerAggregations) { - lowerAliases.put(ne.toAttribute(), Alias.unwrap(ne)); - } - AttributeSet seen = new AttributeSet(); for (NamedExpression upper : upperProjection) { Expression unwrapped = Alias.unwrap(upper); @@ -431,11 +426,18 @@ private static List combineProjections( List lower ) { - // collect aliases in the lower list - AttributeMap aliases = new AttributeMap<>(); + // collect named expressions declaration in the lower list + AttributeMap namedExpressions = new AttributeMap<>(); + // while also collecting the alias map for resolving the source (f1 = 1, f2 = f1, etc..) + AttributeMap aliases = new AttributeMap<>(); for (NamedExpression ne : lower) { - if ((ne instanceof Attribute) == false) { - aliases.put(ne.toAttribute(), ne); + // record the alias + aliases.put(ne.toAttribute(), Alias.unwrap(ne)); + + // record named expression as is + if (ne instanceof Alias as) { + Expression child = as.child(); + namedExpressions.put(ne.toAttribute(), as.replaceChild(aliases.resolve(child, child))); } } List replaced = new ArrayList<>(); @@ -443,7 +445,7 @@ private static List combineProjections( // replace any matching attribute with a lower alias (if there's a match) // but clean-up non-top aliases at the end for (NamedExpression ne : upper) { - NamedExpression replacedExp = (NamedExpression) ne.transformUp(Attribute.class, a -> aliases.resolve(a, a)); + NamedExpression replacedExp = (NamedExpression) ne.transformUp(Attribute.class, a -> namedExpressions.resolve(a, a)); replaced.add((NamedExpression) trimNonTopLevelAliases(replacedExp)); } return replaced; @@ -476,7 +478,10 @@ private List replacePrunedAliasesUsedInGroupBy( var newGroupings = new ArrayList(groupings.size()); for (Expression group : groupings) { - newGroupings.add(group.transformUp(Attribute.class, a -> removedAliases.resolve(a, a))); + var transformed = group.transformUp(Attribute.class, a -> removedAliases.resolve(a, a)); + if (Expressions.anyMatch(newGroupings, g -> Expressions.equalsAsAttribute(g, transformed)) == false) { + newGroupings.add(transformed); + } } return newGroupings; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 63c2a33543073..eb3901f37b99a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -324,6 +324,52 @@ public void testCombineProjections() { var relation = as(limit.child(), EsRelation.class); } + /** + * Expects + * Project[[languages{f}#12 AS f2]] + * \_Limit[1000[INTEGER]] + * \_EsRelation[test][_meta_field{f}#15, emp_no{f}#9, first_name{f}#10, g..] + */ + public void testCombineProjectionsWithEvalAndDrop() { + var plan = plan(""" + from test + | eval f1 = languages, f2 = f1 + | keep f2 + """); + + var keep = as(plan, Project.class); + assertThat(Expressions.names(keep.projections()), contains("f2")); + assertThat(Expressions.name(Alias.unwrap(keep.projections().get(0))), is("languages")); + var limit = as(keep.child(), Limit.class); + var relation = as(limit.child(), EsRelation.class); + + } + + /** + * Expects + * Project[[last_name{f}#26, languages{f}#25 AS f2, f4{r}#13]] + * \_Eval[[languages{f}#25 + 3[INTEGER] AS f4]] + * \_Limit[1000[INTEGER]] + * \_EsRelation[test][_meta_field{f}#28, emp_no{f}#22, first_name{f}#23, ..] + */ + public void testCombineProjectionsWithEval() { + var plan = plan(""" + from test + | eval f1 = languages, f2 = f1, f3 = 1 + 2, f4 = f3 + languages + | keep emp_no, *name, salary, f* + | drop f3 + | keep last_name, f2, f4 + """); + + var keep = as(plan, Project.class); + assertThat(Expressions.names(keep.projections()), contains("last_name", "f2", "f4")); + var eval = as(keep.child(), Eval.class); + assertThat(Expressions.names(eval.fields()), contains("f4")); + var add = as(Alias.unwrap(eval.fields().get(0)), Add.class); + var limit = as(eval.child(), Limit.class); + var relation = as(limit.child(), EsRelation.class); + } + public void testCombineProjectionWithFilterInBetween() { var plan = plan(""" from test @@ -366,6 +412,27 @@ public void testCombineProjectionWithAggregation() { assertThat(Expressions.names(agg.groupings()), contains("last_name", "first_name")); } + /** + * Expects + * Limit[1000[INTEGER]] + * \_Aggregate[[last_name{f}#23, first_name{f}#20, k{r}#4],[SUM(salary{f}#24) AS s, last_name{f}#23, first_name{f}#20, first_n + * ame{f}#20 AS k]] + * \_EsRelation[test][_meta_field{f}#25, emp_no{f}#19, first_name{f}#20, ..] + */ + public void testCombineProjectionWithAggregationAndEval() { + var plan = plan(""" + from test + | eval k = first_name, k1 = k + | stats s = sum(salary) by last_name, first_name, k, k1 + | keep s, last_name, first_name, k + """); + + var limit = as(plan, Limit.class); + var agg = as(limit.child(), Aggregate.class); + assertThat(Expressions.names(agg.aggregates()), contains("s", "last_name", "first_name", "k")); + assertThat(Expressions.names(agg.groupings()), contains("last_name", "first_name", "k")); + } + /** * Expects * TopN[[Order[x{r}#10,ASC,LAST]],1000[INTEGER]] From 5a9a9b87ac78556fd3dd0953630a8c6e96e9ea1c Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Fri, 5 Apr 2024 13:55:22 -0400 Subject: [PATCH 062/173] Adding tests and fixing test failure #106964 (#107118) closes: https://github.com/elastic/elasticsearch/issues/106964 --- .../80_dense_vector_indexed_by_default.yml | 27 +++++++++++++++++++ .../vectors/DenseVectorFieldMapperTests.java | 20 +++++++++----- 2 files changed, 41 insertions(+), 6 deletions(-) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/80_dense_vector_indexed_by_default.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/80_dense_vector_indexed_by_default.yml index 784edfdac3469..407313a59c5e8 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/80_dense_vector_indexed_by_default.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/80_dense_vector_indexed_by_default.yml @@ -147,3 +147,30 @@ setup: - match: { test_default_index_options.mappings.properties.vector.index: true } - match: { test_default_index_options.mappings.properties.vector.similarity: cosine } - match: { test_default_index_options.mappings.properties.vector.index_options.type: int8_hnsw } +--- +"Default index options for dense_vector element type byte": + - skip: + version: ' - 8.13.99' + reason: 'dense_vector indexed as int8_hnsw by default was added in 8.14' + - do: + indices.create: + index: test_default_index_options + body: + mappings: + properties: + vector: + element_type: byte + type: dense_vector + dims: 5 + + - match: { acknowledged: true } + + - do: + indices.get_mapping: + index: test_default_index_options + + - match: { test_default_index_options.mappings.properties.vector.type: dense_vector } + - match: { test_default_index_options.mappings.properties.vector.dims: 5 } + - match: { test_default_index_options.mappings.properties.vector.index: true } + - match: { test_default_index_options.mappings.properties.vector.similarity: cosine } + - is_false: test_default_index_options.mappings.properties.vector.index_options.type diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java index e05cc92c8a76b..ec7d0a85f4486 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java @@ -20,7 +20,6 @@ import org.apache.lucene.index.VectorSimilarityFunction; import org.apache.lucene.search.FieldExistsQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.tests.util.LuceneTestCase.AwaitsFix; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.VectorUtil; import org.elasticsearch.common.bytes.BytesReference; @@ -65,7 +64,6 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106964") public class DenseVectorFieldMapperTests extends MapperTestCase { private static final IndexVersion INDEXED_BY_DEFAULT_PREVIOUS_INDEX_VERSION = IndexVersions.V_8_10_0; @@ -81,23 +79,33 @@ public DenseVectorFieldMapperTests() { @Override protected void minimalMapping(XContentBuilder b) throws IOException { - indexMapping(b, true); + indexMapping(b, IndexVersion.current()); } @Override protected void minimalMapping(XContentBuilder b, IndexVersion indexVersion) throws IOException { - indexMapping(b, indexVersion.onOrAfter(DenseVectorFieldMapper.INDEXED_BY_DEFAULT_INDEX_VERSION)); + indexMapping(b, indexVersion); } - private void indexMapping(XContentBuilder b, boolean indexedByDefault) throws IOException { + private void indexMapping(XContentBuilder b, IndexVersion indexVersion) throws IOException { b.field("type", "dense_vector").field("dims", 4); if (elementType != ElementType.FLOAT) { b.field("element_type", elementType.toString()); } - if (indexedByDefault || indexed) { + if (indexVersion.onOrAfter(DenseVectorFieldMapper.INDEXED_BY_DEFAULT_INDEX_VERSION) || indexed) { // Serialize if it's new index version, or it was not the default for previous indices b.field("index", indexed); } + if (indexVersion.onOrAfter(DenseVectorFieldMapper.DEFAULT_TO_INT8) + && indexed + && elementType.equals(ElementType.FLOAT) + && indexOptionsSet == false) { + b.startObject("index_options"); + b.field("type", "int8_hnsw"); + b.field("m", 16); + b.field("ef_construction", 100); + b.endObject(); + } if (indexed) { b.field("similarity", "dot_product"); if (indexOptionsSet) { From 12d819a92325fddf4313cbffc68f690eaf5fd9b2 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Fri, 5 Apr 2024 14:06:21 -0700 Subject: [PATCH 063/173] Revert toolchain changes for Java 22 upgrade. (#107164) This is a follow up to #107137 to also revert the Java toolchain changes necessary to support the required toolchains. --- .../AdoptiumJdkToolchainResolver.java | 40 ++++++++++++++----- .../OracleOpenJdkToolchainResolver.java | 6 ++- .../AdoptiumJdkToolchainResolverSpec.groovy | 8 +++- 3 files changed, 42 insertions(+), 12 deletions(-) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolver.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolver.java index 89a40711c9a19..0270ee22ca8c5 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolver.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolver.java @@ -11,6 +11,7 @@ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.compress.utils.Lists; import org.gradle.jvm.toolchain.JavaLanguageVersion; import org.gradle.jvm.toolchain.JavaToolchainDownload; import org.gradle.jvm.toolchain.JavaToolchainRequest; @@ -20,17 +21,17 @@ import java.io.IOException; import java.net.URI; import java.net.URL; +import java.util.Comparator; import java.util.Map; import java.util.Optional; import java.util.concurrent.ConcurrentHashMap; -import java.util.stream.StreamSupport; import static org.gradle.jvm.toolchain.JavaToolchainDownload.fromUri; public abstract class AdoptiumJdkToolchainResolver extends AbstractCustomJavaToolchainResolver { // package protected for better testing - final Map> CACHED_RELEASES = new ConcurrentHashMap<>(); + final Map> CACHED_SEMVERS = new ConcurrentHashMap<>(); @Override public Optional resolve(JavaToolchainRequest request) { @@ -38,7 +39,7 @@ public Optional resolve(JavaToolchainRequest request) { return Optional.empty(); } AdoptiumVersionRequest versionRequestKey = toVersionRequest(request); - Optional versionInfo = CACHED_RELEASES.computeIfAbsent( + Optional versionInfo = CACHED_SEMVERS.computeIfAbsent( versionRequestKey, (r) -> resolveAvailableVersion(versionRequestKey) ); @@ -53,12 +54,12 @@ private AdoptiumVersionRequest toVersionRequest(JavaToolchainRequest request) { return new AdoptiumVersionRequest(platform, arch, javaLanguageVersion); } - private Optional resolveAvailableVersion(AdoptiumVersionRequest requestKey) { + private Optional resolveAvailableVersion(AdoptiumVersionRequest requestKey) { ObjectMapper mapper = new ObjectMapper(); try { int languageVersion = requestKey.languageVersion.asInt(); URL source = new URL( - "https://api.adoptium.net/v3/info/release_names?architecture=" + "https://api.adoptium.net/v3/info/release_versions?architecture=" + requestKey.arch + "&image_type=jdk&os=" + requestKey.platform @@ -70,8 +71,14 @@ private Optional resolveAvailableVersion(AdoptiumVersionRequest requestK + ")" ); JsonNode jsonNode = mapper.readTree(source); - JsonNode versionsNode = jsonNode.get("releases"); - return StreamSupport.stream(versionsNode.spliterator(), false).map(JsonNode::textValue).findFirst(); + JsonNode versionsNode = jsonNode.get("versions"); + return Optional.of( + Lists.newArrayList(versionsNode.iterator()) + .stream() + .map(this::toVersionInfo) + .max(Comparator.comparing(AdoptiumVersionInfo::semver)) + .get() + ); } catch (FileNotFoundException e) { // request combo not supported (e.g. aarch64 + windows return Optional.empty(); @@ -80,10 +87,21 @@ private Optional resolveAvailableVersion(AdoptiumVersionRequest requestK } } - private URI resolveDownloadURI(AdoptiumVersionRequest request, String version) { + private AdoptiumVersionInfo toVersionInfo(JsonNode node) { + return new AdoptiumVersionInfo( + node.get("build").asInt(), + node.get("major").asInt(), + node.get("minor").asInt(), + node.get("openjdk_version").asText(), + node.get("security").asInt(), + node.get("semver").asText() + ); + } + + private URI resolveDownloadURI(AdoptiumVersionRequest request, AdoptiumVersionInfo versionInfo) { return URI.create( - "https://api.adoptium.net/v3/binary/version/" - + version + "https://api.adoptium.net/v3/binary/version/jdk-" + + versionInfo.semver + "/" + request.platform + "/" @@ -100,5 +118,7 @@ private boolean requestIsSupported(JavaToolchainRequest request) { return anyVendorOr(request.getJavaToolchainSpec().getVendor().get(), JvmVendorSpec.ADOPTIUM); } + record AdoptiumVersionInfo(int build, int major, int minor, String openjdkVersion, int security, String semver) {} + record AdoptiumVersionRequest(String platform, String arch, JavaLanguageVersion languageVersion) {} } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/OracleOpenJdkToolchainResolver.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/OracleOpenJdkToolchainResolver.java index 162895fd486cf..818cb040c172e 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/OracleOpenJdkToolchainResolver.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/OracleOpenJdkToolchainResolver.java @@ -39,7 +39,11 @@ record JdkBuild(JavaLanguageVersion languageVersion, String version, String buil ); // package private so it can be replaced by tests - List builds = List.of(getBundledJdkBuild()); + List builds = List.of( + getBundledJdkBuild(), + // 22 release candidate + new JdkBuild(JavaLanguageVersion.of(22), "22", "36", "830ec9fcccef480bb3e73fb7ecafe059") + ); private JdkBuild getBundledJdkBuild() { String bundledJdkVersion = VersionProperties.getBundledJdkVersion(); diff --git a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolverSpec.groovy b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolverSpec.groovy index fe4a644ddfc1d..6383d577f027f 100644 --- a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolverSpec.groovy +++ b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolverSpec.groovy @@ -11,6 +11,7 @@ package org.elasticsearch.gradle.internal.toolchain import org.gradle.api.services.BuildServiceParameters import org.gradle.jvm.toolchain.JavaLanguageVersion import org.gradle.jvm.toolchain.JavaToolchainResolver +import org.gradle.platform.OperatingSystem import static org.elasticsearch.gradle.internal.toolchain.AbstractCustomJavaToolchainResolver.toArchString import static org.elasticsearch.gradle.internal.toolchain.AbstractCustomJavaToolchainResolver.toOsString @@ -37,7 +38,12 @@ class AdoptiumJdkToolchainResolverSpec extends AbstractToolchainResolverSpec { toOsString(it[2], it[1]), toArchString(it[3]), languageVersion); - resolver.CACHED_RELEASES.put(request, Optional.of('jdk-' + languageVersion.asInt() + '.1.1.1+37.1')) + resolver.CACHED_SEMVERS.put(request, Optional.of(new AdoptiumJdkToolchainResolver.AdoptiumVersionInfo(languageVersion.asInt(), + 1, + 1, + "" + languageVersion.asInt() + ".1.1.1+37", + 0, "" + languageVersion.asInt() + ".1.1.1+37.1" + ))) } return resolver From 0f3ac367ac4a2f90546cfd97d76cb2d31068155b Mon Sep 17 00:00:00 2001 From: Niels Bauman <33722607+nielsbauman@users.noreply.github.com> Date: Sat, 6 Apr 2024 08:46:38 +0200 Subject: [PATCH 064/173] Rename values of `FailureStoreOptions` (#107062) With these new values, there's a better match between selecting failure stores in read and write operations. --- .../ml/anomaly-detection/apis/put-job.asciidoc | 2 +- .../datastreams/FailureStoreQueryParamIT.java | 10 +++++----- .../test/data_stream/200_rollover_failure_store.yml | 6 +++--- .../resources/rest-api-spec/api/indices.rollover.json | 2 +- .../elasticsearch/action/support/IndicesOptions.java | 4 ++-- .../action/admin/indices/RestRolloverIndexAction.java | 2 +- 6 files changed, 13 insertions(+), 13 deletions(-) diff --git a/docs/reference/ml/anomaly-detection/apis/put-job.asciidoc b/docs/reference/ml/anomaly-detection/apis/put-job.asciidoc index 1ab5de76a94b0..e4e10e2ae2fc5 100644 --- a/docs/reference/ml/anomaly-detection/apis/put-job.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/put-job.asciidoc @@ -537,4 +537,4 @@ The API returns the following results: // TESTRESPONSE[s/"job_version" : "8.4.0"/"job_version" : $body.job_version/] // TESTRESPONSE[s/1656087283340/$body.$_path/] // TESTRESPONSE[s/"superuser"/"_es_test_root"/] -// TESTRESPONSE[s/"ignore_throttled" : true/"ignore_throttled" : true,"failure_store":"false"/] +// TESTRESPONSE[s/"ignore_throttled" : true/"ignore_throttled" : true,"failure_store":"exclude"/] diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/FailureStoreQueryParamIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/FailureStoreQueryParamIT.java index a6b235e8d566f..1d8de6b9ac5f6 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/FailureStoreQueryParamIT.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/FailureStoreQueryParamIT.java @@ -75,7 +75,7 @@ public void testGetIndexApi() throws IOException { assertThat(indices.containsKey(failureStoreIndex), is(true)); } { - final Response indicesResponse = client().performRequest(new Request("GET", "/" + DATA_STREAM_NAME + "?failure_store=false")); + final Response indicesResponse = client().performRequest(new Request("GET", "/" + DATA_STREAM_NAME + "?failure_store=exclude")); Map indices = entityAsMap(indicesResponse); assertThat(indices.size(), is(1)); assertThat(indices.containsKey(backingIndex), is(true)); @@ -98,7 +98,7 @@ public void testGetIndexStatsApi() throws IOException { } { final Response statsResponse = client().performRequest( - new Request("GET", "/" + DATA_STREAM_NAME + "/_stats?failure_store=true") + new Request("GET", "/" + DATA_STREAM_NAME + "/_stats?failure_store=include") ); Map indices = (Map) entityAsMap(statsResponse).get("indices"); assertThat(indices.size(), is(2)); @@ -124,7 +124,7 @@ public void testGetIndexSettingsApi() throws IOException { } { final Response indicesResponse = client().performRequest( - new Request("GET", "/" + DATA_STREAM_NAME + "/_settings?failure_store=true") + new Request("GET", "/" + DATA_STREAM_NAME + "/_settings?failure_store=include") ); Map indices = entityAsMap(indicesResponse); assertThat(indices.size(), is(2)); @@ -150,7 +150,7 @@ public void testGetIndexMappingApi() throws IOException { } { final Response indicesResponse = client().performRequest( - new Request("GET", "/" + DATA_STREAM_NAME + "/_mapping?failure_store=true") + new Request("GET", "/" + DATA_STREAM_NAME + "/_mapping?failure_store=include") ); Map indices = entityAsMap(indicesResponse); assertThat(indices.size(), is(2)); @@ -183,7 +183,7 @@ public void testPutIndexMappingApi() throws IOException { assertAcknowledged(client().performRequest(mappingRequest)); } { - final Request mappingRequest = new Request("PUT", "/" + DATA_STREAM_NAME + "/_mapping?failure_store=true"); + final Request mappingRequest = new Request("PUT", "/" + DATA_STREAM_NAME + "/_mapping?failure_store=include"); mappingRequest.setJsonEntity(""" { "properties": { diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/200_rollover_failure_store.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/200_rollover_failure_store.yml index 82c757fc4af76..46d46e8291ae9 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/200_rollover_failure_store.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/200_rollover_failure_store.yml @@ -32,7 +32,7 @@ setup: - do: indices.rollover: alias: "data-stream-for-rollover" - failure_store: true + target_failure_store: true - match: { old_index: "/\\.fs-data-stream-for-rollover-(\\d{4}\\.\\d{2}\\.\\d{2}-)?000001/" } - match: { new_index: "/\\.fs-data-stream-for-rollover-(\\d{4}\\.\\d{2}\\.\\d{2}-)?000002/" } @@ -67,7 +67,7 @@ setup: - do: indices.rollover: alias: "data-stream-for-rollover" - failure_store: true + target_failure_store: true body: conditions: max_docs: 1 @@ -96,7 +96,7 @@ setup: - do: indices.rollover: alias: "data-stream-for-rollover" - failure_store: true + target_failure_store: true body: conditions: max_docs: 1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json index e04786ec14cf7..299c24f987d8d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json @@ -64,7 +64,7 @@ "default":"false", "description":"If set to true, the rollover action will only mark a data stream to signal that it needs to be rolled over at the next write. Only allowed on data streams." }, - "failure_store":{ + "target_failure_store":{ "type":"boolean", "description":"If set to true, the rollover action will be applied on the failure store of the data stream.", "visibility": "feature_flag", diff --git a/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java b/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java index 3b03b1cf0a4f6..e46a7bd5f0ec2 100644 --- a/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java +++ b/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java @@ -417,8 +417,8 @@ public record FailureStoreOptions(boolean includeRegularIndices, boolean include ToXContentFragment { public static final String FAILURE_STORE = "failure_store"; - public static final String INCLUDE_ALL = "true"; - public static final String INCLUDE_ONLY_REGULAR_INDICES = "false"; + public static final String INCLUDE_ALL = "include"; + public static final String INCLUDE_ONLY_REGULAR_INDICES = "exclude"; public static final String INCLUDE_ONLY_FAILURE_INDICES = "only"; public static final FailureStoreOptions DEFAULT = new FailureStoreOptions(true, false); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java index a0796c0f95639..98895a49fae6e 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java @@ -54,7 +54,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC rolloverIndexRequest.timeout(request.paramAsTime("timeout", rolloverIndexRequest.timeout())); rolloverIndexRequest.masterNodeTimeout(request.paramAsTime("master_timeout", rolloverIndexRequest.masterNodeTimeout())); if (DataStream.isFailureStoreEnabled()) { - boolean failureStore = request.paramAsBoolean("failure_store", false); + boolean failureStore = request.paramAsBoolean("target_failure_store", false); if (failureStore) { rolloverIndexRequest.setIndicesOptions( IndicesOptions.builder(rolloverIndexRequest.indicesOptions()) From 0faac52ac49f15d3dc9f3af3a2e46d3b8e5b3b00 Mon Sep 17 00:00:00 2001 From: Volodymyr Krasnikov <129072588+volodk85@users.noreply.github.com> Date: Sun, 7 Apr 2024 21:09:58 -0700 Subject: [PATCH 065/173] Metric for rejected indexing primary operations (per document) (#107080) * Fix number of rejected primary operations * Update docs/changelog/107080.yaml * Update test * Add metric 'es.indexing.primary_operations.document.rejections.ratio' + test * rm useless changelog * update docs * use -1 as a default version on unsupported version --- docs/reference/cluster/stats.asciidoc | 3 +- .../index/IndexingPressureIT.java | 3 +- .../metrics/NodeIndexingMetricsIT.java | 64 ++++++++++++++----- .../org/elasticsearch/TransportVersions.java | 1 + .../cluster/stats/ClusterStatsNodes.java | 5 +- .../elasticsearch/index/IndexingPressure.java | 5 +- .../index/stats/IndexingPressureStats.java | 22 ++++++- .../monitor/metrics/NodeMetrics.java | 12 ++-- .../cluster/node/stats/NodeStatsTests.java | 1 + .../cluster/stats/ClusterStatsNodesTests.java | 10 ++- .../ClusterStatsMonitoringDocTests.java | 3 +- 11 files changed, 99 insertions(+), 30 deletions(-) diff --git a/docs/reference/cluster/stats.asciidoc b/docs/reference/cluster/stats.asciidoc index 6d8a8f748fa0e..bdd3e166c22d6 100644 --- a/docs/reference/cluster/stats.asciidoc +++ b/docs/reference/cluster/stats.asciidoc @@ -1821,7 +1821,8 @@ The API returns the following response: "all_in_bytes": 0, "coordinating_rejections": 0, "primary_rejections": 0, - "replica_rejections": 0 + "replica_rejections": 0, + "primary_document_rejections": 0 }, "limit" : "0b", "limit_in_bytes": 0 diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/IndexingPressureIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/IndexingPressureIT.java index 206aa57bc84b3..da89f3252bec0 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/IndexingPressureIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/IndexingPressureIT.java @@ -301,7 +301,8 @@ public void testWriteCanBeRejectedAtCoordinatingLevel() throws Exception { public void testWriteCanBeRejectedAtPrimaryLevel() throws Exception { final BulkRequest bulkRequest = new BulkRequest(); long totalRequestSize = 0; - for (int i = 0; i < 80; ++i) { + int numberOfIndexRequests = randomIntBetween(50, 100); + for (int i = 0; i < numberOfIndexRequests; ++i) { IndexRequest request = new IndexRequest(INDEX_NAME).id(UUIDs.base64UUID()) .source(Collections.singletonMap("key", randomAlphaOfLength(50))); totalRequestSize += request.ramBytesUsed(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/NodeIndexingMetricsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/NodeIndexingMetricsIT.java index 6cca0ccb3fdf3..97f052367fbc6 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/NodeIndexingMetricsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/NodeIndexingMetricsIT.java @@ -164,11 +164,11 @@ public void testNodeIndexingMetricsArePublishing() throws Exception { ); assertThat(primaryOperationsRejectionsTotal.getLong(), equalTo(0L)); - var primaryOperationsRejectionsRatio = getRecordedMetric( + var primaryOperationsDocumentRejectionsRatio = getRecordedMetric( plugin::getDoubleGaugeMeasurement, - "es.indexing.primary_operations.rejections.ratio" + "es.indexing.primary_operations.document.rejections.ratio" ); - assertThat(primaryOperationsRejectionsRatio.getDouble(), equalTo(0.0)); + assertThat(primaryOperationsDocumentRejectionsRatio.getDouble(), equalTo(0.0)); }); @@ -207,13 +207,19 @@ public void testCoordinatingRejectionMetricsArePublishing() throws Exception { "es.indexing.coordinating_operations.rejections.total" ); assertThat(coordinatingOperationsRejectionsTotal.getLong(), equalTo(1L)); + + var coordinatingOperationsRejectionsRatio = getRecordedMetric( + plugin::getDoubleGaugeMeasurement, + "es.indexing.coordinating_operations.rejections.ratio" + ); + assertThat(coordinatingOperationsRejectionsRatio.getDouble(), equalTo(1.0)); }); } - public void testPrimaryRejectionMetricsArePublishing() throws Exception { + public void testPrimaryDocumentRejectionMetricsArePublishing() throws Exception { // setting low Indexing Pressure limits to trigger primary rejections - final String dataNode = internalCluster().startNode(Settings.builder().put(MAX_INDEXING_BYTES.getKey(), "1KB").build()); + final String dataNode = internalCluster().startNode(Settings.builder().put(MAX_INDEXING_BYTES.getKey(), "2KB").build()); // setting high Indexing Pressure limits to pass coordinating checks final String coordinatingNode = internalCluster().startCoordinatingOnlyNode( Settings.builder().put(MAX_INDEXING_BYTES.getKey(), "10MB").build() @@ -227,19 +233,32 @@ public void testPrimaryRejectionMetricsArePublishing() throws Exception { plugin.resetMeter(); final int numberOfShards = randomIntBetween(1, 5); - assertAcked(prepareCreate("test", Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numberOfShards)).get()); + assertAcked(prepareCreate("test-one", Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numberOfShards)).get()); + assertAcked(prepareCreate("test-two", Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)).get()); - final BulkRequest bulkRequest = new BulkRequest(); - final int batchCount = randomIntBetween(50, 100); - for (int i = 0; i < batchCount; i++) { - bulkRequest.add(new IndexRequest("test").source("field", randomAlphaOfLength(2048))); + final BulkRequest bulkRequestOne = new BulkRequest(); + final int batchCountOne = randomIntBetween(50, 100); + for (int i = 0; i < batchCountOne; i++) { + bulkRequestOne.add(new IndexRequest("test-one").source("field", randomAlphaOfLength(3096))); } - // big batch should pass thru coordinating limit check but fail on primary - // note the bulk request is sent to coordinating client - final BulkResponse bulkResponse = client(coordinatingNode).bulk(bulkRequest).actionGet(); - assertThat(bulkResponse.hasFailures(), equalTo(true)); - assertThat(Arrays.stream(bulkResponse.getItems()).allMatch(item -> item.status() == RestStatus.TOO_MANY_REQUESTS), equalTo(true)); + final BulkRequest bulkRequestTwo = new BulkRequest(); + final int batchCountTwo = randomIntBetween(1, 5); + for (int i = 0; i < batchCountTwo; i++) { + bulkRequestTwo.add(new IndexRequest("test-two").source("field", randomAlphaOfLength(1))); + } + + // big batch should pass through coordinating gate but trip on primary gate + // note the bulk request is sent to coordinating node + final BulkResponse bulkResponseOne = client(coordinatingNode).bulk(bulkRequestOne).actionGet(); + assertThat(bulkResponseOne.hasFailures(), equalTo(true)); + assertThat( + Arrays.stream(bulkResponseOne.getItems()).allMatch(item -> item.status() == RestStatus.TOO_MANY_REQUESTS), + equalTo(true) + ); + // small bulk request is expected to pass through primary indexing pressure gate + final BulkResponse bulkResponseTwo = client(coordinatingNode).bulk(bulkRequestTwo).actionGet(); + assertThat(bulkResponseTwo.hasFailures(), equalTo(false)); // simulate async apm `polling` call for metrics plugin.collect(); @@ -251,6 +270,16 @@ public void testPrimaryRejectionMetricsArePublishing() throws Exception { "es.indexing.primary_operations.rejections.total" ); assertThat(primaryOperationsRejectionsTotal.getLong(), equalTo((long) numberOfShards)); + + var primaryOperationsDocumentRejectionsRatio = getRecordedMetric( + plugin::getDoubleGaugeMeasurement, + "es.indexing.primary_operations.document.rejections.ratio" + ); + // ratio of rejected documents vs all indexing documents + assertThat( + equals(primaryOperationsDocumentRejectionsRatio.getDouble(), (double) batchCountOne / (batchCountOne + batchCountTwo)), + equalTo(true) + ); }); } @@ -261,4 +290,9 @@ private static Measurement getRecordedMetric(Function> assertThat(measurements.size(), equalTo(1)); return measurements.get(0); } + + private static boolean equals(double expected, double actual) { + final double eps = .0000001; + return Math.abs(expected - actual) < eps; + } } diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 57a3afe083707..4a1bf691ea1b0 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -163,6 +163,7 @@ static TransportVersion def(int id) { public static final TransportVersion CCR_STATS_API_TIMEOUT_PARAM = def(8_622_00_0); public static final TransportVersion ESQL_ORDINAL_BLOCK = def(8_623_00_0); public static final TransportVersion ML_INFERENCE_COHERE_RERANK = def(8_624_00_0); + public static final TransportVersion INDEXING_PRESSURE_DOCUMENT_REJECTIONS_COUNT = def(8_625_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java index c421cdefbdbf4..6ffe7ac390260 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java @@ -782,6 +782,7 @@ static class IndexPressureStats implements ToXContentFragment { long coordinatingRejections = 0; long primaryRejections = 0; long replicaRejections = 0; + long primaryDocumentRejections = 0; long memoryLimit = 0; long totalCoordinatingOps = 0; @@ -811,6 +812,7 @@ static class IndexPressureStats implements ToXContentFragment { currentCoordinatingOps += nodeStatIndexingPressureStats.getCurrentCoordinatingOps(); currentPrimaryOps += nodeStatIndexingPressureStats.getCurrentPrimaryOps(); currentReplicaOps += nodeStatIndexingPressureStats.getCurrentReplicaOps(); + primaryDocumentRejections += nodeStatIndexingPressureStats.getPrimaryDocumentRejections(); } } indexingPressureStats = new IndexingPressureStats( @@ -831,7 +833,8 @@ static class IndexPressureStats implements ToXContentFragment { totalReplicaOps, currentCoordinatingOps, currentPrimaryOps, - currentReplicaOps + currentReplicaOps, + primaryDocumentRejections ); } diff --git a/server/src/main/java/org/elasticsearch/index/IndexingPressure.java b/server/src/main/java/org/elasticsearch/index/IndexingPressure.java index d0bc8ad980dde..7696cf99b75cd 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexingPressure.java +++ b/server/src/main/java/org/elasticsearch/index/IndexingPressure.java @@ -52,6 +52,7 @@ public class IndexingPressure { private final AtomicLong coordinatingRejections = new AtomicLong(0); private final AtomicLong primaryRejections = new AtomicLong(0); private final AtomicLong replicaRejections = new AtomicLong(0); + private final AtomicLong primaryDocumentRejections = new AtomicLong(0); private final long primaryAndCoordinatingLimits; private final long replicaLimits; @@ -136,6 +137,7 @@ public Releasable markPrimaryOperationStarted(int operations, long bytes, boolea long totalBytesWithoutOperation = totalBytes - bytes; this.currentCombinedCoordinatingAndPrimaryBytes.getAndAdd(-bytes); this.primaryRejections.getAndIncrement(); + this.primaryDocumentRejections.addAndGet(operations); throw new EsRejectedExecutionException( "rejected execution of primary operation [" + "coordinating_and_primary_bytes=" @@ -218,7 +220,8 @@ public IndexingPressureStats stats() { totalReplicaOps.get(), currentCoordinatingOps.get(), currentPrimaryOps.get(), - currentReplicaOps.get() + currentReplicaOps.get(), + primaryDocumentRejections.get() ); } } diff --git a/server/src/main/java/org/elasticsearch/index/stats/IndexingPressureStats.java b/server/src/main/java/org/elasticsearch/index/stats/IndexingPressureStats.java index 81eb40e6f6f61..608fa3128bf09 100644 --- a/server/src/main/java/org/elasticsearch/index/stats/IndexingPressureStats.java +++ b/server/src/main/java/org/elasticsearch/index/stats/IndexingPressureStats.java @@ -32,6 +32,7 @@ public class IndexingPressureStats implements Writeable, ToXContentFragment { private final long coordinatingRejections; private final long primaryRejections; private final long replicaRejections; + private final long primaryDocumentRejections; private final long memoryLimit; // These fields will be used for additional back-pressure and metrics in the future @@ -70,6 +71,12 @@ public IndexingPressureStats(StreamInput in) throws IOException { this.currentCoordinatingOps = 0; this.currentPrimaryOps = 0; this.currentReplicaOps = 0; + + if (in.getTransportVersion().onOrAfter(TransportVersions.INDEXING_PRESSURE_DOCUMENT_REJECTIONS_COUNT)) { + primaryDocumentRejections = in.readVLong(); + } else { + primaryDocumentRejections = -1L; + } } public IndexingPressureStats( @@ -90,7 +97,8 @@ public IndexingPressureStats( long totalReplicaOps, long currentCoordinatingOps, long currentPrimaryOps, - long currentReplicaOps + long currentReplicaOps, + long primaryDocumentRejections ) { this.totalCombinedCoordinatingAndPrimaryBytes = totalCombinedCoordinatingAndPrimaryBytes; this.totalCoordinatingBytes = totalCoordinatingBytes; @@ -111,6 +119,8 @@ public IndexingPressureStats( this.currentCoordinatingOps = currentCoordinatingOps; this.currentPrimaryOps = currentPrimaryOps; this.currentReplicaOps = currentReplicaOps; + + this.primaryDocumentRejections = primaryDocumentRejections; } @Override @@ -132,6 +142,10 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_10_0)) { out.writeVLong(memoryLimit); } + + if (out.getTransportVersion().onOrAfter(TransportVersions.INDEXING_PRESSURE_DOCUMENT_REJECTIONS_COUNT)) { + out.writeVLong(primaryDocumentRejections); + } } public long getTotalCombinedCoordinatingAndPrimaryBytes() { @@ -206,6 +220,10 @@ public long getMemoryLimit() { return memoryLimit; } + public long getPrimaryDocumentRejections() { + return primaryDocumentRejections; + } + private static final String COMBINED = "combined_coordinating_and_primary"; private static final String COMBINED_IN_BYTES = "combined_coordinating_and_primary_in_bytes"; private static final String COORDINATING = "coordinating"; @@ -219,6 +237,7 @@ public long getMemoryLimit() { private static final String COORDINATING_REJECTIONS = "coordinating_rejections"; private static final String PRIMARY_REJECTIONS = "primary_rejections"; private static final String REPLICA_REJECTIONS = "replica_rejections"; + private static final String PRIMARY_DOCUMENT_REJECTIONS = "primary_document_rejections"; private static final String LIMIT = "limit"; private static final String LIMIT_IN_BYTES = "limit_in_bytes"; @@ -246,6 +265,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(COORDINATING_REJECTIONS, coordinatingRejections); builder.field(PRIMARY_REJECTIONS, primaryRejections); builder.field(REPLICA_REJECTIONS, replicaRejections); + builder.field(PRIMARY_DOCUMENT_REJECTIONS, primaryDocumentRejections); builder.endObject(); builder.humanReadableField(LIMIT_IN_BYTES, LIMIT, ByteSizeValue.ofBytes(memoryLimit)); builder.endObject(); diff --git a/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java b/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java index 527acb8d4fcbc..e689898b05da6 100644 --- a/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java +++ b/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java @@ -621,7 +621,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerDoubleGauge( - "es.indexing.primary_operations.rejections.ratio", + "es.indexing.primary_operations.document.rejections.ratio", "Ratio of rejected primary operations", "ratio", () -> { @@ -629,13 +629,13 @@ private void registerAsyncMetrics(MeterRegistry registry) { .map(NodeStats::getIndexingPressureStats) .map(IndexingPressureStats::getTotalPrimaryOps) .orElse(0L); - var totalPrimaryRejections = Optional.ofNullable(stats.getOrRefresh()) + var totalPrimaryDocumentRejections = Optional.ofNullable(stats.getOrRefresh()) .map(NodeStats::getIndexingPressureStats) - .map(IndexingPressureStats::getPrimaryRejections) + .map(IndexingPressureStats::getPrimaryDocumentRejections) .orElse(0L); - // rejections do not count towards `totalPrimaryOperations` - var totalOps = totalPrimaryOperations + totalPrimaryRejections; - return new DoubleWithAttributes(totalOps != 0 ? (double) totalPrimaryRejections / totalOps : 0.0); + // primary document rejections do not count towards `totalPrimaryOperations` + var totalOps = totalPrimaryOperations + totalPrimaryDocumentRejections; + return new DoubleWithAttributes(totalOps != 0 ? (double) totalPrimaryDocumentRejections / totalOps : 0.0); } ) ); diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java index e4b821fba7634..b91ea304c5da6 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java @@ -1038,6 +1038,7 @@ public static NodeStats createNodeStats() { randomLongBetween(0, maxStatValue), randomLongBetween(0, maxStatValue), randomLongBetween(0, maxStatValue), + randomLongBetween(0, maxStatValue), randomLongBetween(0, maxStatValue) ); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodesTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodesTests.java index 0ee3b244ecf45..adba547f9b2ab 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodesTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodesTests.java @@ -113,7 +113,7 @@ public void testIndexPressureStats() throws Exception { randomValueOtherThanMany(n -> n.getIndexingPressureStats() == null, NodeStatsTests::createNodeStats), randomValueOtherThanMany(n -> n.getIndexingPressureStats() == null, NodeStatsTests::createNodeStats) ); - long[] expectedStats = new long[12]; + long[] expectedStats = new long[13]; for (NodeStats nodeStat : nodeStats) { IndexingPressureStats indexingPressureStats = nodeStat.getIndexingPressureStats(); if (indexingPressureStats != null) { @@ -130,8 +130,9 @@ public void testIndexPressureStats() throws Exception { expectedStats[8] += indexingPressureStats.getCoordinatingRejections(); expectedStats[9] += indexingPressureStats.getPrimaryRejections(); expectedStats[10] += indexingPressureStats.getReplicaRejections(); + expectedStats[11] += indexingPressureStats.getPrimaryDocumentRejections(); - expectedStats[11] += indexingPressureStats.getMemoryLimit(); + expectedStats[12] += indexingPressureStats.getMemoryLimit(); } } @@ -181,9 +182,12 @@ public void testIndexPressureStats() throws Exception { + "," + "\"replica_rejections\":" + expectedStats[10] + + "," + + "\"primary_document_rejections\":" + + expectedStats[11] + "}," + "\"limit_in_bytes\":" - + expectedStats[11] + + expectedStats[12] + "}" + "}}" ) diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java index cb270c7f19ae8..2c5485b8d467f 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java @@ -730,7 +730,8 @@ public void testToXContent() throws IOException { "all_in_bytes": 0, "coordinating_rejections": 0, "primary_rejections": 0, - "replica_rejections": 0 + "replica_rejections": 0, + "primary_document_rejections": 0 }, "limit_in_bytes": 0 } From 29888ff7ef815f543b8e6f2650746182dec8346d Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Mon, 8 Apr 2024 08:09:35 +0200 Subject: [PATCH 066/173] [Profiling] Fix test assumption re. serialization (#107134) With this commit we use the same XContent API (`rawValue`) in the test as in the actual code in order to ensure the test expects the correct value (i.e. this was a test bug). Closes #107117 --- .../elasticsearch/xpack/profiling/TopNFunctionTests.java | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TopNFunctionTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TopNFunctionTests.java index 3a91550767094..f30fd18443550 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TopNFunctionTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TopNFunctionTests.java @@ -22,7 +22,6 @@ public class TopNFunctionTests extends ESTestCase { - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107117") public void testToXContent() throws IOException { String fileID = "6tVKI4mSYDEJ-ABAIpYXcg"; int frameType = 1; @@ -56,8 +55,10 @@ public void testToXContent() throws IOException { .rawValue("2.2000") .field("total_annual_co2_tons") .rawValue("22.0000") - .field("self_annual_costs_usd", "12.0000") - .field("total_annual_costs_usd", "120.0000") + .field("self_annual_costs_usd") + .rawValue("12.0000") + .field("total_annual_costs_usd") + .rawValue("120.0000") .endObject(); XContentBuilder actualRequest = XContentFactory.contentBuilder(contentType); From f66ca2d697a3a6c958cec20883ef696ade7311b1 Mon Sep 17 00:00:00 2001 From: Mary Gouseti Date: Mon, 8 Apr 2024 11:26:24 +0300 Subject: [PATCH 067/173] Introduce new node feature for renaming health endpoint (#107154) The health API was available for experimentation under the [`_internal/_health`](https://www.elastic.co/guide/en/elasticsearch/reference/8.6/health-api.html) before it [became GA](https://www.elastic.co/guide/en/elasticsearch/reference/8.7/health-api.html) at `8.7.0`. For this reason we introduce another node feature to capture this change. Fixes https://github.com/elastic/elasticsearch/issues/106933 --- .../elasticsearch/upgrades/HealthNodeUpgradeIT.java | 5 ++--- .../java/org/elasticsearch/health/HealthFeatures.java | 10 +++++++++- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java index 695f5d2a64bc7..0f210ee4b2450 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java @@ -11,7 +11,6 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import org.apache.http.util.EntityUtils; -import org.apache.lucene.tests.util.LuceneTestCase.AwaitsFix; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.hamcrest.Matchers; @@ -21,7 +20,6 @@ import static org.hamcrest.CoreMatchers.equalTo; -@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106933") public class HealthNodeUpgradeIT extends ParameterizedRollingUpgradeTestCase { public HealthNodeUpgradeIT(@Name("upgradedNodes") int upgradedNodes) { @@ -36,7 +34,8 @@ public void testHealthNode() throws Exception { assertThat(tasks, Matchers.containsString("health-node")); }); assertBusy(() -> { - Response response = client().performRequest(new Request("GET", "_health_report")); + String path = clusterHasFeature("health.supports_health_report_api") ? "_health_report" : "_internal/_health"; + Response response = client().performRequest(new Request("GET", path)); Map health_report = entityAsMap(response.getEntity()); assertThat(health_report.get("status"), equalTo("green")); }); diff --git a/server/src/main/java/org/elasticsearch/health/HealthFeatures.java b/server/src/main/java/org/elasticsearch/health/HealthFeatures.java index 4b3bcf7e7278f..32e6c8f5ca849 100644 --- a/server/src/main/java/org/elasticsearch/health/HealthFeatures.java +++ b/server/src/main/java/org/elasticsearch/health/HealthFeatures.java @@ -18,6 +18,7 @@ public class HealthFeatures implements FeatureSpecification { public static final NodeFeature SUPPORTS_HEALTH = new NodeFeature("health.supports_health"); + public static final NodeFeature SUPPORTS_HEALTH_REPORT_API = new NodeFeature("health.supports_health_report_api"); public static final NodeFeature SUPPORTS_SHARDS_CAPACITY_INDICATOR = new NodeFeature("health.shards_capacity_indicator"); public static final NodeFeature SUPPORTS_EXTENDED_REPOSITORY_INDICATOR = new NodeFeature("health.extended_repository_indicator"); @@ -28,6 +29,13 @@ public Set getFeatures() { @Override public Map getHistoricalFeatures() { - return Map.of(SUPPORTS_HEALTH, Version.V_8_5_0, SUPPORTS_SHARDS_CAPACITY_INDICATOR, Version.V_8_8_0); + return Map.of( + SUPPORTS_HEALTH, + Version.V_8_5_0, // health accessible via /_internal/_health + SUPPORTS_HEALTH_REPORT_API, + Version.V_8_7_0, // health accessible via /_health_report + SUPPORTS_SHARDS_CAPACITY_INDICATOR, + Version.V_8_8_0 + ); } } From 8ca6f506399eab246ceddc6bace404ee28491354 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Mon, 8 Apr 2024 11:19:25 +0200 Subject: [PATCH 068/173] Remove unused buildShardSearchRequest from SearchPhaseContext (#107180) This method isn't used on the interface, it's only used as part of the abstract async action. --- .../action/search/AbstractSearchAsyncAction.java | 10 ++++++++-- .../action/search/SearchPhaseContext.java | 10 ---------- .../action/search/MockSearchPhaseContext.java | 7 ------- 3 files changed, 8 insertions(+), 19 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java index 1da114adb34f6..1f8470b3bcd01 100644 --- a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java @@ -769,8 +769,14 @@ public final void onFailure(Exception e) { listener.onFailure(e); } - @Override - public final ShardSearchRequest buildShardSearchRequest(SearchShardIterator shardIt, int shardIndex) { + /** + * Builds an request for the initial search phase. + * + * @param shardIt the target {@link SearchShardIterator} + * @param shardIndex the index of the shard that is used in the coordinator node to + * tiebreak results with identical sort values + */ + protected final ShardSearchRequest buildShardSearchRequest(SearchShardIterator shardIt, int shardIndex) { AliasFilter filter = aliasFilter.get(shardIt.shardId().getIndex().getUUID()); assert filter != null; float indexBoost = concreteIndexBoosts.getOrDefault(shardIt.shardId().getIndex().getUUID(), DEFAULT_INDEX_BOOST); diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java index af9bcac8e3a33..c77c7e58efc7d 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java @@ -15,7 +15,6 @@ import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.internal.ShardSearchContextId; -import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.transport.Transport; import java.util.concurrent.Executor; @@ -115,15 +114,6 @@ default void sendReleaseSearchContext( } } - /** - * Builds an request for the initial search phase. - * - * @param shardIt the target {@link SearchShardIterator} - * @param shardIndex the index of the shard that is used in the coordinator node to - * tiebreak results with identical sort values - */ - ShardSearchRequest buildShardSearchRequest(SearchShardIterator shardIt, int shardIndex); - /** * Processes the phase transition from on phase to another. This method handles all errors that happen during the initial run execution * of the next phase. If there are no successful operations in the context when this method is executed the search is aborted and diff --git a/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java b/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java index ed807091ae9a2..8bfd61b8d5b32 100644 --- a/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java +++ b/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java @@ -17,7 +17,6 @@ import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.internal.ShardSearchContextId; -import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.transport.Transport; import org.junit.Assert; @@ -127,12 +126,6 @@ public SearchTransportService getSearchTransport() { return searchTransport; } - @Override - public ShardSearchRequest buildShardSearchRequest(SearchShardIterator shardIt, int shardIndex) { - Assert.fail("should not be called"); - return null; - } - @Override public void executeNextPhase(SearchPhase currentPhase, SearchPhase nextPhase) { try { From 88306379ba3f6728af272886c0a953ef08df2fcf Mon Sep 17 00:00:00 2001 From: Nikolaj Volgushev Date: Mon, 8 Apr 2024 11:28:12 +0200 Subject: [PATCH 069/173] Increase KDF iteration count in `KeyStoreWrapper` (#107107) This PR increases the KDF iteration count for the keystore password. Additional context in ES-8063. --- .../cli/keystore/KeyStoreWrapperTests.java | 20 ++++++++++++++ .../keystore/UpgradeKeyStoreCommandTests.java | 22 +++++++++++++-- ...at-v5-with-password-elasticsearch.keystore | Bin 0 -> 199 bytes docs/changelog/107107.yaml | 5 ++++ .../common/settings/KeyStoreWrapper.java | 25 +++++++++++------- 5 files changed, 60 insertions(+), 12 deletions(-) create mode 100644 distribution/tools/keystore-cli/src/test/resources/format-v5-with-password-elasticsearch.keystore create mode 100644 docs/changelog/107107.yaml diff --git a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/KeyStoreWrapperTests.java b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/KeyStoreWrapperTests.java index f6e3578811688..3004494262e6b 100644 --- a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/KeyStoreWrapperTests.java +++ b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/KeyStoreWrapperTests.java @@ -457,6 +457,26 @@ public void testLegacyV3() throws GeneralSecurityException, IOException { assertThat(toByteArray(wrapper.getFile("file_setting")), equalTo("file_value".getBytes(StandardCharsets.UTF_8))); } + public void testLegacyV5() throws GeneralSecurityException, IOException { + final Path configDir = createTempDir(); + final Path keystore = configDir.resolve("elasticsearch.keystore"); + try ( + InputStream is = KeyStoreWrapperTests.class.getResourceAsStream("/format-v5-with-password-elasticsearch.keystore"); + OutputStream os = Files.newOutputStream(keystore) + ) { + final byte[] buffer = new byte[4096]; + int readBytes; + while ((readBytes = is.read(buffer)) > 0) { + os.write(buffer, 0, readBytes); + } + } + final KeyStoreWrapper wrapper = KeyStoreWrapper.load(configDir); + assertNotNull(wrapper); + wrapper.decrypt("keystorepassword".toCharArray()); + assertThat(wrapper.getFormatVersion(), equalTo(5)); + assertThat(wrapper.getSettingNames(), equalTo(Set.of("keystore.seed"))); + } + public void testSerializationNewlyCreated() throws Exception { final KeyStoreWrapper wrapper = KeyStoreWrapper.create(); wrapper.setString("string_setting", "string_value".toCharArray()); diff --git a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/UpgradeKeyStoreCommandTests.java b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/UpgradeKeyStoreCommandTests.java index ae19fa0b94b83..979b118a887e5 100644 --- a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/UpgradeKeyStoreCommandTests.java +++ b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/UpgradeKeyStoreCommandTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.cli.ProcessInfo; import org.elasticsearch.cli.UserException; import org.elasticsearch.common.settings.KeyStoreWrapper; +import org.elasticsearch.core.Nullable; import org.elasticsearch.env.Environment; import java.io.InputStream; @@ -46,8 +47,20 @@ public void testKeystoreUpgradeV4() throws Exception { assertKeystoreUpgrade("/format-v4-elasticsearch.keystore", KeyStoreWrapper.V4_VERSION); } + public void testKeystoreUpgradeV5() throws Exception { + assertKeystoreUpgradeWithPassword("/format-v5-with-password-elasticsearch.keystore", KeyStoreWrapper.LE_VERSION); + } + private void assertKeystoreUpgrade(String file, int version) throws Exception { assumeFalse("Cannot open unprotected keystore on FIPS JVM", inFipsJvm()); + assertKeystoreUpgrade(file, version, null); + } + + private void assertKeystoreUpgradeWithPassword(String file, int version) throws Exception { + assertKeystoreUpgrade(file, version, "keystorepassword"); + } + + private void assertKeystoreUpgrade(String file, int version, @Nullable String password) throws Exception { final Path keystore = KeyStoreWrapper.keystorePath(env.configFile()); try (InputStream is = KeyStoreWrapperTests.class.getResourceAsStream(file); OutputStream os = Files.newOutputStream(keystore)) { is.transferTo(os); @@ -56,11 +69,17 @@ private void assertKeystoreUpgrade(String file, int version) throws Exception { assertNotNull(beforeUpgrade); assertThat(beforeUpgrade.getFormatVersion(), equalTo(version)); } + if (password != null) { + terminal.addSecretInput(password); + terminal.addSecretInput(password); + } execute(); + terminal.reset(); + try (KeyStoreWrapper afterUpgrade = KeyStoreWrapper.load(env.configFile())) { assertNotNull(afterUpgrade); assertThat(afterUpgrade.getFormatVersion(), equalTo(KeyStoreWrapper.CURRENT_VERSION)); - afterUpgrade.decrypt(new char[0]); + afterUpgrade.decrypt(password != null ? password.toCharArray() : new char[0]); assertThat(afterUpgrade.getSettingNames(), hasItem(KeyStoreWrapper.SEED_SETTING.getKey())); } } @@ -69,5 +88,4 @@ public void testKeystoreDoesNotExist() { final UserException e = expectThrows(UserException.class, this::execute); assertThat(e, hasToString(containsString("keystore not found at [" + KeyStoreWrapper.keystorePath(env.configFile()) + "]"))); } - } diff --git a/distribution/tools/keystore-cli/src/test/resources/format-v5-with-password-elasticsearch.keystore b/distribution/tools/keystore-cli/src/test/resources/format-v5-with-password-elasticsearch.keystore new file mode 100644 index 0000000000000000000000000000000000000000..0547db46eb1ef2f8270b72b34cb2551585a971b6 GIT binary patch literal 199 zcmcD&o+B=nnv+;ul9^nbnpl*ap_iRnSzMA|l*+)sz{)roNIC%Vlj)|G46`OpP@B2w z_KHsPxC3n(<+YD~A5612T9q0nn)0a5Cv@|t#hKFe@qeS!?|(gOZ*$|%CZ5CA-C3OP zRyD5Y0h(d+LG_z3SB1L4BOYFBAiq%YRFpH%x+CHWwQh11R(sC5C}kHf*vM(LV2R?~ wFcXK0c>S=C)(e(Q;$QdLq3inrey+=A@?9^ZnHXOm(3t!JXaX3-1vqa40Q;Rzng9R* literal 0 HcmV?d00001 diff --git a/docs/changelog/107107.yaml b/docs/changelog/107107.yaml new file mode 100644 index 0000000000000..5ca611befeb5d --- /dev/null +++ b/docs/changelog/107107.yaml @@ -0,0 +1,5 @@ +pr: 107107 +summary: Increase KDF iteration count in `KeyStoreWrapper` +area: Infra/CLI +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java b/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java index 6bdec2380c344..276775a868665 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java +++ b/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java @@ -114,19 +114,18 @@ public void writeTo(StreamOutput out) throws IOException { /** The oldest metadata format version that can be read. */ private static final int MIN_FORMAT_VERSION = 3; - /** Legacy versions of the metadata written before the keystore data. */ - public static final int V2_VERSION = 2; public static final int V3_VERSION = 3; public static final int V4_VERSION = 4; /** The version where lucene directory API changed from BE to LE. */ public static final int LE_VERSION = 5; - public static final int CURRENT_VERSION = LE_VERSION; + public static final int HIGHER_KDF_ITERATION_COUNT_VERSION = 6; + public static final int CURRENT_VERSION = HIGHER_KDF_ITERATION_COUNT_VERSION; /** The algorithm used to derive the cipher key from a password. */ private static final String KDF_ALGO = "PBKDF2WithHmacSHA512"; /** The number of iterations to derive the cipher key. */ - private static final int KDF_ITERS = 10000; + private static final int KDF_ITERS = 210000; /** * The number of bits for the cipher key. @@ -155,6 +154,7 @@ public void writeTo(StreamOutput out) throws IOException { // 3: FIPS compliant algos, ES 6.3 // 4: remove distinction between string/files, ES 6.8/7.1 // 5: Lucene directory API changed to LE, ES 8.0 + // 6: increase KDF iteration count, ES 8.14 /** The metadata format version used to read the current keystore wrapper. */ private final int formatVersion; @@ -317,8 +317,8 @@ public boolean hasPassword() { return hasPassword; } - private static Cipher createCipher(int opmode, char[] password, byte[] salt, byte[] iv) throws GeneralSecurityException { - PBEKeySpec keySpec = new PBEKeySpec(password, salt, KDF_ITERS, CIPHER_KEY_BITS); + private static Cipher createCipher(int opmode, char[] password, byte[] salt, byte[] iv, int kdfIters) throws GeneralSecurityException { + PBEKeySpec keySpec = new PBEKeySpec(password, salt, kdfIters, CIPHER_KEY_BITS); SecretKeyFactory keyFactory = SecretKeyFactory.getInstance(KDF_ALGO); SecretKey secretKey; try { @@ -337,6 +337,11 @@ private static Cipher createCipher(int opmode, char[] password, byte[] salt, byt return cipher; } + private static int getKdfIterationCountForVersion(int formatVersion) { + // iteration count was increased in version 6; it was 10,000 in previous versions + return formatVersion < HIGHER_KDF_ITERATION_COUNT_VERSION ? 10000 : KDF_ITERS; + } + /** * Decrypts the underlying keystore data. * @@ -365,7 +370,7 @@ public void decrypt(char[] password) throws GeneralSecurityException, IOExceptio throw new SecurityException("Keystore has been corrupted or tampered with", e); } - Cipher cipher = createCipher(Cipher.DECRYPT_MODE, password, salt, iv); + Cipher cipher = createCipher(Cipher.DECRYPT_MODE, password, salt, iv, getKdfIterationCountForVersion(formatVersion)); try ( ByteArrayInputStream bytesStream = new ByteArrayInputStream(encryptedBytes); CipherInputStream cipherStream = new CipherInputStream(bytesStream, cipher); @@ -403,11 +408,11 @@ private static byte[] readByteArray(DataInput input) throws IOException { } /** Encrypt the keystore entries and return the encrypted data. */ - private byte[] encrypt(char[] password, byte[] salt, byte[] iv) throws GeneralSecurityException, IOException { + private byte[] encrypt(char[] password, byte[] salt, byte[] iv, int kdfIterationCount) throws GeneralSecurityException, IOException { assert isLoaded(); ByteArrayOutputStream bytes = new ByteArrayOutputStream(); - Cipher cipher = createCipher(Cipher.ENCRYPT_MODE, password, salt, iv); + Cipher cipher = createCipher(Cipher.ENCRYPT_MODE, password, salt, iv, kdfIterationCount); try ( CipherOutputStream cipherStream = new CipherOutputStream(bytes, cipher); DataOutputStream output = new DataOutputStream(cipherStream) @@ -450,7 +455,7 @@ public synchronized void save(Path configDir, char[] password, boolean preserveP byte[] iv = new byte[12]; random.nextBytes(iv); // encrypted data - byte[] encryptedBytes = encrypt(password, salt, iv); + byte[] encryptedBytes = encrypt(password, salt, iv, getKdfIterationCountForVersion(CURRENT_VERSION)); // size of data block output.writeInt(4 + salt.length + 4 + iv.length + 4 + encryptedBytes.length); From 887d48dfc290a61229311899041234b64a8c8066 Mon Sep 17 00:00:00 2001 From: Niels Bauman <33722607+nielsbauman@users.noreply.github.com> Date: Mon, 8 Apr 2024 12:06:08 +0200 Subject: [PATCH 070/173] Avoid unintentionally clearing the `DataStream.rolloverOnWrite` flag (#107122) A lot of places in the code use a `DataStream` constructor that sets the `rolloverOnWrite` flag to `false`. For some places, this was intentional, but for others, this was erroneous (and for most tests, it didn't matter much). This PR fixes the erroneous spots and avoids similar unintentional behavior in the future by removing the constructor in question altogether. Most use cases just want to copy the flag over and if you _do_ want to set the flag to false, it makes more sense to do so explicitly yourself rather than letting the constructor do it for you. An additional small bonus is that we have one less constructor for the `DataStream` class :). Follow up of [this](https://github.com/elastic/elasticsearch/pull/107035#discussion_r1549299287) discussion. --- docs/changelog/107122.yaml | 5 ++ .../datastreams/DataStreamIT.java | 3 +- .../DataStreamIndexSettingsProviderTests.java | 3 +- .../UpdateTimeSeriesRangeServiceTests.java | 3 +- .../action/GetDataStreamsResponseTests.java | 2 + .../DataStreamLifecycleServiceTests.java | 3 +- .../cluster/metadata/DataStream.java | 58 ++++++++----------- .../MetadataCreateDataStreamService.java | 1 + .../metadata/MetadataDataStreamsService.java | 1 + .../snapshots/RestoreService.java | 1 + .../DataStreamAutoShardingServiceTests.java | 1 + .../cluster/metadata/DataStreamTests.java | 47 ++++++++++----- .../MetadataDataStreamsServiceTests.java | 1 + .../WildcardExpressionResolverTests.java | 1 + .../metadata/DataStreamTestHelper.java | 7 ++- .../ccr/action/TransportPutFollowAction.java | 4 ++ ...StreamLifecycleUsageTransportActionIT.java | 4 +- 17 files changed, 88 insertions(+), 57 deletions(-) create mode 100644 docs/changelog/107122.yaml diff --git a/docs/changelog/107122.yaml b/docs/changelog/107122.yaml new file mode 100644 index 0000000000000..e227bfd45b939 --- /dev/null +++ b/docs/changelog/107122.yaml @@ -0,0 +1,5 @@ +pr: 107122 +summary: Avoid unintentionally clearing the `DataStream.rolloverOnWrite` flag +area: Data streams +type: bug +issues: [] diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java index 6c06511ccfbd1..a0a391a0f019b 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java @@ -1791,7 +1791,8 @@ public ClusterState execute(ClusterState currentState) throws Exception { original.getLifecycle(), original.isFailureStore(), original.getFailureIndices(), - null + original.rolloverOnWrite(), + original.getAutoShardingEvent() ); brokenDataStreamHolder.set(broken); return ClusterState.builder(currentState) diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProviderTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProviderTests.java index 01ad1bb09b20f..11446a2a2a761 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProviderTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProviderTests.java @@ -315,7 +315,8 @@ public void testGetAdditionalIndexSettingsDataStreamAlreadyCreatedTimeSettingsMi ds.getLifecycle(), ds.isFailureStore(), ds.getFailureIndices(), - null + ds.rolloverOnWrite(), + ds.getAutoShardingEvent() ) ); Metadata metadata = mb.build(); diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/UpdateTimeSeriesRangeServiceTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/UpdateTimeSeriesRangeServiceTests.java index abd5132edde16..1c63deadf92a4 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/UpdateTimeSeriesRangeServiceTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/UpdateTimeSeriesRangeServiceTests.java @@ -154,7 +154,8 @@ public void testUpdateTimeSeriesTemporalRange_NoUpdateBecauseReplicated() { d.getLifecycle(), d.isFailureStore(), d.getFailureIndices(), - null + false, + d.getAutoShardingEvent() ) ) .build(); diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java index 2118c98b377bc..9fc646995bc0e 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java @@ -89,6 +89,7 @@ public void testResponseIlmAndDataStreamLifecycleRepresentation() throws Excepti new DataStreamLifecycle(), true, failureStores, + false, null ); @@ -199,6 +200,7 @@ public void testResponseIlmAndDataStreamLifecycleRepresentation() throws Excepti new DataStreamLifecycle(null, null, false), true, failureStores, + false, null ); diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java index d0456d669663d..a67fa72cb3079 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java @@ -296,7 +296,8 @@ public void testRetentionNotExecutedForTSIndicesWithinTimeBounds() { DataStreamLifecycle.newBuilder().dataRetention(0L).build(), dataStream.isFailureStore(), dataStream.getFailureIndices(), - null + dataStream.rolloverOnWrite(), + dataStream.getAutoShardingEvent() ) ); clusterState = ClusterState.builder(clusterState).metadata(builder).build(); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java index 57ab7c431f7ea..364a1b31ceeba 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java @@ -119,40 +119,6 @@ public static boolean isFailureStoreEnabled() { @Nullable private final DataStreamAutoShardingEvent autoShardingEvent; - public DataStream( - String name, - List indices, - long generation, - Map metadata, - boolean hidden, - boolean replicated, - boolean system, - boolean allowCustomRouting, - IndexMode indexMode, - DataStreamLifecycle lifecycle, - boolean failureStore, - List failureIndices, - @Nullable DataStreamAutoShardingEvent autoShardingEvent - ) { - this( - name, - indices, - generation, - metadata, - hidden, - replicated, - system, - System::currentTimeMillis, - allowCustomRouting, - indexMode, - lifecycle, - failureStore, - failureIndices, - false, - autoShardingEvent - ); - } - public DataStream( String name, List indices, @@ -222,6 +188,7 @@ public DataStream( this.failureStore = failureStore; this.failureIndices = failureIndices; assert assertConsistent(this.indices); + assert replicated == false || rolloverOnWrite == false : "replicated data streams cannot be marked for lazy rollover"; this.rolloverOnWrite = rolloverOnWrite; this.autoShardingEvent = autoShardingEvent; } @@ -238,7 +205,22 @@ public DataStream( boolean allowCustomRouting, IndexMode indexMode ) { - this(name, indices, generation, metadata, hidden, replicated, system, allowCustomRouting, indexMode, null, false, List.of(), null); + this( + name, + indices, + generation, + metadata, + hidden, + replicated, + system, + allowCustomRouting, + indexMode, + null, + false, + List.of(), + false, + null + ); } private static boolean assertConsistent(List indices) { @@ -507,6 +489,7 @@ public DataStream unsafeRollover(Index writeIndex, long generation, boolean time lifecycle, failureStore, failureIndices, + false, autoShardingEvent ); } @@ -544,6 +527,7 @@ public DataStream unsafeRolloverFailureStore(Index writeIndex, long generation) lifecycle, failureStore, failureIndices, + false, autoShardingEvent ); } @@ -646,6 +630,7 @@ public DataStream removeBackingIndex(Index index) { lifecycle, failureStore, failureIndices, + rolloverOnWrite, autoShardingEvent ); } @@ -692,6 +677,7 @@ public DataStream replaceBackingIndex(Index existingBackingIndex, Index newBacki lifecycle, failureStore, failureIndices, + rolloverOnWrite, autoShardingEvent ); } @@ -753,6 +739,7 @@ public DataStream addBackingIndex(Metadata clusterMetadata, Index index) { lifecycle, failureStore, failureIndices, + rolloverOnWrite, autoShardingEvent ); } @@ -810,6 +797,7 @@ public DataStream snapshot(Collection indicesInSnapshot) { lifecycle, failureStore, failureIndices, + rolloverOnWrite, autoShardingEvent ); } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java index 6d0b424cad8f2..3c3ff0d130f0a 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java @@ -339,6 +339,7 @@ static ClusterState createDataStream( lifecycle == null && isDslOnlyMode ? DataStreamLifecycle.DEFAULT : lifecycle, template.getDataStreamTemplate().hasFailureStore(), failureIndices, + false, null ); Metadata.Builder builder = Metadata.builder(currentState.metadata()).put(newDataStream); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsService.java index 4006bc8d1a94a..c03d40984e11c 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsService.java @@ -213,6 +213,7 @@ static ClusterState updateDataLifecycle( lifecycle, dataStream.isFailureStore(), dataStream.getFailureIndices(), + dataStream.rolloverOnWrite(), dataStream.getAutoShardingEvent() ) ); diff --git a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java index 4b6e3f30fe6fa..a597901d4600e 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java @@ -717,6 +717,7 @@ static DataStream updateDataStream(DataStream dataStream, Metadata.Builder metad dataStream.getLifecycle(), dataStream.isFailureStore(), dataStream.getFailureIndices(), + dataStream.rolloverOnWrite(), dataStream.getAutoShardingEvent() ); } diff --git a/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java b/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java index 0d1104279d3ce..70e291afcaf32 100644 --- a/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java +++ b/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java @@ -794,6 +794,7 @@ private DataStream createDataStream( null, false, List.of(), + false, autoShardingEvent ); } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java index 3187a3e391691..f086b52c1b491 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java @@ -96,8 +96,9 @@ protected DataStream mutateInstance(DataStream instance) { var lifecycle = instance.getLifecycle(); var failureStore = instance.isFailureStore(); var failureIndices = instance.getFailureIndices(); + var rolloverOnWrite = instance.rolloverOnWrite(); var autoShardingEvent = instance.getAutoShardingEvent(); - switch (between(0, 11)) { + switch (between(0, 12)) { case 0 -> name = randomAlphaOfLength(10); case 1 -> indices = randomNonEmptyIndexInstances(); case 2 -> generation = instance.getGeneration() + randomIntBetween(1, 10); @@ -110,7 +111,11 @@ protected DataStream mutateInstance(DataStream instance) { isHidden = true; } } - case 5 -> isReplicated = isReplicated == false; + case 5 -> { + isReplicated = isReplicated == false; + // Replicated data streams cannot be marked for lazy rollover. + rolloverOnWrite = isReplicated == false && rolloverOnWrite; + } case 6 -> { if (isSystem == false) { isSystem = true; @@ -131,6 +136,10 @@ protected DataStream mutateInstance(DataStream instance) { failureStore = failureIndices.isEmpty() == false; } case 11 -> { + rolloverOnWrite = rolloverOnWrite == false; + isReplicated = rolloverOnWrite == false && isReplicated; + } + case 12 -> { autoShardingEvent = randomBoolean() && autoShardingEvent != null ? null : new DataStreamAutoShardingEvent( @@ -154,6 +163,7 @@ protected DataStream mutateInstance(DataStream instance) { lifecycle, failureStore, failureIndices, + rolloverOnWrite, autoShardingEvent ); } @@ -212,6 +222,7 @@ public void testRolloverUpgradeToTsdbDataStream() { ds.getLifecycle(), ds.isFailureStore(), ds.getFailureIndices(), + ds.rolloverOnWrite(), ds.getAutoShardingEvent() ); var newCoordinates = ds.nextWriteIndexAndGeneration(Metadata.EMPTY_METADATA); @@ -240,6 +251,7 @@ public void testRolloverDowngradeToRegularDataStream() { ds.getLifecycle(), ds.isFailureStore(), ds.getFailureIndices(), + ds.rolloverOnWrite(), ds.getAutoShardingEvent() ); var newCoordinates = ds.nextWriteIndexAndGeneration(Metadata.EMPTY_METADATA); @@ -616,19 +628,21 @@ public void testSnapshot() { postSnapshotIndices.removeAll(indicesToRemove); postSnapshotIndices.addAll(indicesToAdd); + var replicated = preSnapshotDataStream.isReplicated() && randomBoolean(); var postSnapshotDataStream = new DataStream( preSnapshotDataStream.getName(), postSnapshotIndices, preSnapshotDataStream.getGeneration() + randomIntBetween(0, 5), preSnapshotDataStream.getMetadata() == null ? null : new HashMap<>(preSnapshotDataStream.getMetadata()), preSnapshotDataStream.isHidden(), - preSnapshotDataStream.isReplicated() && randomBoolean(), + replicated, preSnapshotDataStream.isSystem(), preSnapshotDataStream.isAllowCustomRouting(), preSnapshotDataStream.getIndexMode(), preSnapshotDataStream.getLifecycle(), preSnapshotDataStream.isFailureStore(), preSnapshotDataStream.getFailureIndices(), + replicated == false && preSnapshotDataStream.rolloverOnWrite(), preSnapshotDataStream.getAutoShardingEvent() ); @@ -670,6 +684,7 @@ public void testSnapshotWithAllBackingIndicesRemoved() { preSnapshotDataStream.getLifecycle(), preSnapshotDataStream.isFailureStore(), preSnapshotDataStream.getFailureIndices(), + preSnapshotDataStream.rolloverOnWrite(), preSnapshotDataStream.getAutoShardingEvent() ); @@ -1896,13 +1911,14 @@ private IndexMetadata createIndexMetadata(String indexName, IndexWriteLoad index public void testWriteFailureIndex() { boolean hidden = randomBoolean(); boolean system = hidden && randomBoolean(); + boolean replicated = randomBoolean(); DataStream noFailureStoreDataStream = new DataStream( randomAlphaOfLength(10), randomNonEmptyIndexInstances(), randomNonNegativeInt(), null, hidden, - randomBoolean(), + replicated, system, System::currentTimeMillis, randomBoolean(), @@ -1910,7 +1926,7 @@ public void testWriteFailureIndex() { DataStreamLifecycleTests.randomLifecycle(), false, null, - randomBoolean(), + replicated == false && randomBoolean(), null ); assertThat(noFailureStoreDataStream.getFailureStoreWriteIndex(), nullValue()); @@ -1921,7 +1937,7 @@ public void testWriteFailureIndex() { randomNonNegativeInt(), null, hidden, - randomBoolean(), + replicated, system, System::currentTimeMillis, randomBoolean(), @@ -1929,7 +1945,7 @@ public void testWriteFailureIndex() { DataStreamLifecycleTests.randomLifecycle(), true, List.of(), - randomBoolean(), + replicated == false && randomBoolean(), null ); assertThat(failureStoreDataStreamWithEmptyFailureIndices.getFailureStoreWriteIndex(), nullValue()); @@ -1947,7 +1963,7 @@ public void testWriteFailureIndex() { randomNonNegativeInt(), null, hidden, - randomBoolean(), + replicated, system, System::currentTimeMillis, randomBoolean(), @@ -1955,7 +1971,7 @@ public void testWriteFailureIndex() { DataStreamLifecycleTests.randomLifecycle(), true, failureIndices, - randomBoolean(), + replicated == false && randomBoolean(), null ); assertThat(failureStoreDataStream.getFailureStoreWriteIndex(), is(writeFailureIndex)); @@ -1965,13 +1981,14 @@ public void testIsFailureIndex() { boolean hidden = randomBoolean(); boolean system = hidden && randomBoolean(); List backingIndices = randomNonEmptyIndexInstances(); + boolean replicated = randomBoolean(); DataStream noFailureStoreDataStream = new DataStream( randomAlphaOfLength(10), backingIndices, randomNonNegativeInt(), null, hidden, - randomBoolean(), + replicated, system, System::currentTimeMillis, randomBoolean(), @@ -1979,7 +1996,7 @@ public void testIsFailureIndex() { DataStreamLifecycleTests.randomLifecycle(), false, null, - randomBoolean(), + replicated == false && randomBoolean(), null ); assertThat( @@ -1994,7 +2011,7 @@ public void testIsFailureIndex() { randomNonNegativeInt(), null, hidden, - randomBoolean(), + replicated, system, System::currentTimeMillis, randomBoolean(), @@ -2002,7 +2019,7 @@ public void testIsFailureIndex() { DataStreamLifecycleTests.randomLifecycle(), true, List.of(), - randomBoolean(), + replicated == false && randomBoolean(), null ); assertThat( @@ -2026,7 +2043,7 @@ public void testIsFailureIndex() { randomNonNegativeInt(), null, hidden, - randomBoolean(), + replicated, system, System::currentTimeMillis, randomBoolean(), @@ -2034,7 +2051,7 @@ public void testIsFailureIndex() { DataStreamLifecycleTests.randomLifecycle(), true, failureIndices, - randomBoolean(), + replicated == false && randomBoolean(), null ); assertThat(failureStoreDataStream.isFailureStoreIndex(writeFailureIndex.getName()), is(true)); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsServiceTests.java index 71306d7fe0aef..1fe1f6350445c 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsServiceTests.java @@ -357,6 +357,7 @@ public void testRemoveBrokenBackingIndexReference() { original.getLifecycle(), original.isFailureStore(), original.getFailureIndices(), + original.rolloverOnWrite(), original.getAutoShardingEvent() ); var brokenState = ClusterState.builder(state).metadata(Metadata.builder(state.getMetadata()).put(broken).build()).build(); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java index 9980e1b27e48c..c7a30e3eae548 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java @@ -381,6 +381,7 @@ public void testAllDataStreams() { null, false, List.of(), + false, null ); diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java index 2980b8a48636a..c83caa617e16e 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java @@ -143,6 +143,7 @@ public static DataStream newInstance( lifecycle, false, List.of(), + false, autoShardingEvent ); } @@ -169,6 +170,7 @@ public static DataStream newInstance( lifecycle, failureStores.size() > 0, failureStores, + false, null ); } @@ -352,13 +354,14 @@ public static DataStream randomInstance(String dataStreamName, LongSupplier time ); } + boolean replicated = randomBoolean(); return new DataStream( dataStreamName, indices, generation, metadata, randomBoolean(), - randomBoolean(), + replicated, false, // Some tests don't work well with system data streams, since these data streams require special handling timeProvider, randomBoolean(), @@ -366,7 +369,7 @@ public static DataStream randomInstance(String dataStreamName, LongSupplier time randomBoolean() ? DataStreamLifecycle.newBuilder().dataRetention(randomMillisUpToYear9999()).build() : null, failureStore, failureIndices, - randomBoolean(), + replicated == false && randomBoolean(), randomBoolean() ? new DataStreamAutoShardingEvent( indices.get(indices.size() - 1).getName(), diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java index 446e9abcd3e26..b3c059e933fcf 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java @@ -341,6 +341,9 @@ static DataStream updateLocalDataStream( remoteDataStream.getLifecycle(), remoteDataStream.isFailureStore(), remoteDataStream.getFailureIndices(), + // Replicated data streams can't be rolled over, so having the `rolloverOnWrite` flag set to `true` wouldn't make sense + // (and potentially even break things). + false, remoteDataStream.getAutoShardingEvent() ); } else { @@ -395,6 +398,7 @@ static DataStream updateLocalDataStream( localDataStream.getLifecycle(), localDataStream.isFailureStore(), localDataStream.getFailureIndices(), + localDataStream.rolloverOnWrite(), localDataStream.getAutoShardingEvent() ); } diff --git a/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/core/action/DataStreamLifecycleUsageTransportActionIT.java b/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/core/action/DataStreamLifecycleUsageTransportActionIT.java index bc97623c76970..c1b4a4bf27890 100644 --- a/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/core/action/DataStreamLifecycleUsageTransportActionIT.java +++ b/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/core/action/DataStreamLifecycleUsageTransportActionIT.java @@ -122,19 +122,21 @@ public void testAction() throws Exception { indices.add(index); } boolean systemDataStream = randomBoolean(); + boolean replicated = randomBoolean(); DataStream dataStream = new DataStream( randomAlphaOfLength(50), indices, randomLongBetween(0, 1000), Map.of(), systemDataStream || randomBoolean(), - randomBoolean(), + replicated, systemDataStream, randomBoolean(), IndexMode.STANDARD, lifecycle, false, List.of(), + replicated == false && randomBoolean(), null ); dataStreamMap.put(dataStream.getName(), dataStream); From a7b38394d9f12dcb930616f0162a11c5cad68961 Mon Sep 17 00:00:00 2001 From: Craig Taverner Date: Mon, 8 Apr 2024 12:26:26 +0200 Subject: [PATCH 071/173] ESQL: Support ST_DISJOINT (#107007) * WIP Started developing ST_DISJOINT Initially based on ST_INTERSECTS * Fix functions list and add spatial point integration tests * Update docs/changelog/107007.yaml * More tests for shapes and cartesian-multigeoms * Some more tests to highlight issues with DISJOINT on cartesian point indices * Disable Lucene push-down for DISJOINT on cartesian point indices * Added docs for ST_DISJOINT * Support DISJOINT in the lucene-pushdown code for cartesian point indexes * Re-enable push-to-source for DISJOINT on cartesian_point indices * Fix docs example * Try fix internal docs links which are not being rendered * Fixed disjoint on empty geometry * Added tests on empty linestring, and changed lucene push-down to exception In lucene code only LineString can be empty, but in Elasticsearch even that is not allowed, resulting in parsing errors. So we cannot get to this code in the lucene push-down and now throw an error instead. The tests now assert on the warnings. Note that for any predicate DISJOINT and INTERSECTS alike, the predicate fails, because the parsing error results in null, the function returns null, the predicate interprets this as false, and no documents match. This null-in-null-out rule means that DISJOINT and INTERSECTS give the same answer on invalid geometries. --- docs/changelog/107007.yaml | 5 + .../description/st_contains.asciidoc | 2 - .../description/st_disjoint.asciidoc | 5 + .../description/st_intersects.asciidoc | 2 - .../functions/description/st_within.asciidoc | 2 - .../functions/examples/st_disjoint.asciidoc | 13 + .../functions/layout/st_disjoint.asciidoc | 15 ++ .../functions/parameters/st_disjoint.asciidoc | 9 + .../esql/functions/signature/st_disjoint.svg | 1 + .../esql/functions/spatial-functions.asciidoc | 2 + .../esql/functions/st_contains.asciidoc | 2 +- .../esql/functions/st_disjoint.asciidoc | 27 ++ .../esql/functions/st_intersects.asciidoc | 1 + .../esql/functions/st_within.asciidoc | 2 +- .../esql/functions/types/st_disjoint.asciidoc | 16 ++ .../cartesian_multipolygons.csv-spec | 68 +++++ .../src/main/resources/meta.csv-spec | 6 +- .../src/main/resources/spatial.csv-spec | 227 +++++++++++++++++ .../main/resources/spatial_shapes.csv-spec | 52 ++++ ...ianPointDocValuesAndConstantEvaluator.java | 128 ++++++++++ ...esianPointDocValuesAndSourceEvaluator.java | 142 +++++++++++ ...ntCartesianSourceAndConstantEvaluator.java | 132 ++++++++++ ...ointCartesianSourceAndSourceEvaluator.java | 152 +++++++++++ ...GeoPointDocValuesAndConstantEvaluator.java | 128 ++++++++++ ...ntGeoPointDocValuesAndSourceEvaluator.java | 151 +++++++++++ ...DisjointGeoSourceAndConstantEvaluator.java | 132 ++++++++++ ...alDisjointGeoSourceAndSourceEvaluator.java | 152 +++++++++++ .../function/EsqlFunctionRegistry.java | 2 + .../scalar/spatial/SpatialContains.java | 2 - .../scalar/spatial/SpatialDisjoint.java | 239 ++++++++++++++++++ .../scalar/spatial/SpatialIntersects.java | 2 - .../spatial/SpatialRelatesFunction.java | 6 +- .../scalar/spatial/SpatialWithin.java | 2 - .../xpack/esql/io/stream/PlanNamedTypes.java | 6 + .../xpack/esql/plugin/EsqlFeatures.java | 8 +- .../querydsl/query/SpatialRelatesQuery.java | 39 ++- .../scalar/spatial/SpatialDisjointTests.java | 46 ++++ .../optimizer/PhysicalPlanOptimizerTests.java | 13 + 38 files changed, 1912 insertions(+), 27 deletions(-) create mode 100644 docs/changelog/107007.yaml create mode 100644 docs/reference/esql/functions/description/st_disjoint.asciidoc create mode 100644 docs/reference/esql/functions/examples/st_disjoint.asciidoc create mode 100644 docs/reference/esql/functions/layout/st_disjoint.asciidoc create mode 100644 docs/reference/esql/functions/parameters/st_disjoint.asciidoc create mode 100644 docs/reference/esql/functions/signature/st_disjoint.svg create mode 100644 docs/reference/esql/functions/st_disjoint.asciidoc create mode 100644 docs/reference/esql/functions/types/st_disjoint.asciidoc create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndConstantEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndSourceEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndConstantEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndSourceEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndConstantEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndSourceEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndConstantEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndSourceEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjoint.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointTests.java diff --git a/docs/changelog/107007.yaml b/docs/changelog/107007.yaml new file mode 100644 index 0000000000000..b2a755171725b --- /dev/null +++ b/docs/changelog/107007.yaml @@ -0,0 +1,5 @@ +pr: 107007 +summary: "ESQL: Support ST_DISJOINT" +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/reference/esql/functions/description/st_contains.asciidoc b/docs/reference/esql/functions/description/st_contains.asciidoc index ed79fe3d9c1f3..678fde7f5d98b 100644 --- a/docs/reference/esql/functions/description/st_contains.asciidoc +++ b/docs/reference/esql/functions/description/st_contains.asciidoc @@ -3,5 +3,3 @@ *Description* Returns whether the first geometry contains the second geometry. - -NOTE: The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters. diff --git a/docs/reference/esql/functions/description/st_disjoint.asciidoc b/docs/reference/esql/functions/description/st_disjoint.asciidoc new file mode 100644 index 0000000000000..95ab02a39614a --- /dev/null +++ b/docs/reference/esql/functions/description/st_disjoint.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns whether the two geometries or geometry columns are disjoint. diff --git a/docs/reference/esql/functions/description/st_intersects.asciidoc b/docs/reference/esql/functions/description/st_intersects.asciidoc index 3a36d79cbd123..b736ba29a6c8b 100644 --- a/docs/reference/esql/functions/description/st_intersects.asciidoc +++ b/docs/reference/esql/functions/description/st_intersects.asciidoc @@ -3,5 +3,3 @@ *Description* Returns whether the two geometries or geometry columns intersect. - -NOTE: The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters. diff --git a/docs/reference/esql/functions/description/st_within.asciidoc b/docs/reference/esql/functions/description/st_within.asciidoc index be52db3f694bf..890f28cb769b0 100644 --- a/docs/reference/esql/functions/description/st_within.asciidoc +++ b/docs/reference/esql/functions/description/st_within.asciidoc @@ -3,5 +3,3 @@ *Description* Returns whether the first geometry is within the second geometry. - -NOTE: The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters. diff --git a/docs/reference/esql/functions/examples/st_disjoint.asciidoc b/docs/reference/esql/functions/examples/st_disjoint.asciidoc new file mode 100644 index 0000000000000..192553e528a24 --- /dev/null +++ b/docs/reference/esql/functions/examples/st_disjoint.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/spatial_shapes.csv-spec[tag=st_disjoint-airport_city_boundaries] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/spatial_shapes.csv-spec[tag=st_disjoint-airport_city_boundaries-result] +|=== + diff --git a/docs/reference/esql/functions/layout/st_disjoint.asciidoc b/docs/reference/esql/functions/layout/st_disjoint.asciidoc new file mode 100644 index 0000000000000..a1eef41006f3e --- /dev/null +++ b/docs/reference/esql/functions/layout/st_disjoint.asciidoc @@ -0,0 +1,15 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-st_disjoint]] +=== `ST_DISJOINT` + +*Syntax* + +[.text-center] +image::esql/functions/signature/st_disjoint.svg[Embedded,opts=inline] + +include::../parameters/st_disjoint.asciidoc[] +include::../description/st_disjoint.asciidoc[] +include::../types/st_disjoint.asciidoc[] +include::../examples/st_disjoint.asciidoc[] diff --git a/docs/reference/esql/functions/parameters/st_disjoint.asciidoc b/docs/reference/esql/functions/parameters/st_disjoint.asciidoc new file mode 100644 index 0000000000000..e87a0d0eb94f0 --- /dev/null +++ b/docs/reference/esql/functions/parameters/st_disjoint.asciidoc @@ -0,0 +1,9 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Parameters* + +`geomA`:: +Geometry column name or variable of geometry type + +`geomB`:: +Geometry column name or variable of geometry type diff --git a/docs/reference/esql/functions/signature/st_disjoint.svg b/docs/reference/esql/functions/signature/st_disjoint.svg new file mode 100644 index 0000000000000..becd0be37e441 --- /dev/null +++ b/docs/reference/esql/functions/signature/st_disjoint.svg @@ -0,0 +1 @@ +ST_DISJOINT(geomA,geomB) \ No newline at end of file diff --git a/docs/reference/esql/functions/spatial-functions.asciidoc b/docs/reference/esql/functions/spatial-functions.asciidoc index 739d6b2d6f58f..b6d178ddd624d 100644 --- a/docs/reference/esql/functions/spatial-functions.asciidoc +++ b/docs/reference/esql/functions/spatial-functions.asciidoc @@ -9,6 +9,7 @@ // tag::spatial_list[] * experimental:[] <> +* experimental:[] <> * experimental:[] <> * experimental:[] <> * experimental:[] <> @@ -16,6 +17,7 @@ // end::spatial_list[] include::st_intersects.asciidoc[] +include::st_disjoint.asciidoc[] include::st_contains.asciidoc[] include::st_within.asciidoc[] include::st_x.asciidoc[] diff --git a/docs/reference/esql/functions/st_contains.asciidoc b/docs/reference/esql/functions/st_contains.asciidoc index 07b1a11aa7247..110c4fe4ca9ec 100644 --- a/docs/reference/esql/functions/st_contains.asciidoc +++ b/docs/reference/esql/functions/st_contains.asciidoc @@ -20,7 +20,7 @@ The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters. include::description/st_contains.asciidoc[] -This is the inverse of the `<>` function. +This is the inverse of the <> function. include::types/st_contains.asciidoc[] include::examples/st_contains.asciidoc[] diff --git a/docs/reference/esql/functions/st_disjoint.asciidoc b/docs/reference/esql/functions/st_disjoint.asciidoc new file mode 100644 index 0000000000000..db89ca186a0ff --- /dev/null +++ b/docs/reference/esql/functions/st_disjoint.asciidoc @@ -0,0 +1,27 @@ +[discrete] +[[esql-st_disjoint]] +=== `ST_DISJOINT` + +experimental::[] + +*Syntax* + +[.text-center] +image::esql/functions/signature/st_disjoint.svg[Embedded,opts=inline] + +*Parameters* + +`geomA`:: +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. + +`geomB`:: +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. +The second parameter must also have the same coordinate system as the first. +This means it is not possible to combine `geo_*` and `cartesian_*` parameters. + +include::description/st_disjoint.asciidoc[] +This is the inverse of the <> function. +In mathematical terms: ST_Disjoint(A, B) ⇔ A ⋂ B = ∅ + +include::types/st_disjoint.asciidoc[] +include::examples/st_disjoint.asciidoc[] diff --git a/docs/reference/esql/functions/st_intersects.asciidoc b/docs/reference/esql/functions/st_intersects.asciidoc index fbe313d10b0e7..d75a7f3a50e0f 100644 --- a/docs/reference/esql/functions/st_intersects.asciidoc +++ b/docs/reference/esql/functions/st_intersects.asciidoc @@ -24,6 +24,7 @@ This means it is not possible to combine `geo_*` and `cartesian_*` parameters. Returns true if two geometries intersect. They intersect if they have any point in common, including their interior points (points along lines or within polygons). +This is the inverse of the <> function. In mathematical terms: ST_Intersects(A, B) ⇔ A ⋂ B ≠ ∅ include::types/st_intersects.asciidoc[] diff --git a/docs/reference/esql/functions/st_within.asciidoc b/docs/reference/esql/functions/st_within.asciidoc index 64adb91219c4a..0f0190a9de638 100644 --- a/docs/reference/esql/functions/st_within.asciidoc +++ b/docs/reference/esql/functions/st_within.asciidoc @@ -20,7 +20,7 @@ The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters. include::description/st_within.asciidoc[] -This is the inverse of the `<>` function. +This is the inverse of the <> function. include::types/st_within.asciidoc[] include::examples/st_within.asciidoc[] diff --git a/docs/reference/esql/functions/types/st_disjoint.asciidoc b/docs/reference/esql/functions/types/st_disjoint.asciidoc new file mode 100644 index 0000000000000..36bd9cc036ade --- /dev/null +++ b/docs/reference/esql/functions/types/st_disjoint.asciidoc @@ -0,0 +1,16 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +geomA | geomB | result +cartesian_point | cartesian_point | boolean +cartesian_point | cartesian_shape | boolean +cartesian_shape | cartesian_point | boolean +cartesian_shape | cartesian_shape | boolean +geo_point | geo_point | boolean +geo_point | geo_shape | boolean +geo_shape | geo_point | boolean +geo_shape | geo_shape | boolean +|=== diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv-spec index a2411cfd7a335..aa6529c2d4319 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv-spec @@ -52,6 +52,29 @@ id:l | name:keyword | shape:cartesian_shape 16 | Bottom left point | POINT(0.5 0.5) ; +whereDisjointSinglePolygon +required_feature: esql.st_disjoint + +FROM cartesian_multipolygons +| WHERE ST_Disjoint(shape, TO_CARTESIANSHAPE("POLYGON((0 0, 1 0, 1 1, 0 1, 0 0))")) +| SORT id +; + +id:l | name:keyword | shape:cartesian_shape +2 | Bottom right | POLYGON((2 0, 3 0, 3 1, 2 1, 2 0)) +3 | Top right | POLYGON((2 2, 3 2, 3 3, 2 3, 2 2)) +4 | Top left | POLYGON((0 2, 1 2, 1 3, 0 3, 0 2)) +7 | Bottom right with holes | POLYGON((2 0, 3 0, 3 1, 2 1, 2 0), (2.4 0.4, 2.6 0.4, 2.6 0.6, 2.4 0.6, 2.4 0.4)) +8 | Top right with holes | POLYGON((2 2, 3 2, 3 3, 2 3, 2 2), (2.4 2.4, 2.6 2.4, 2.6 2.6, 2.4 2.6, 2.4 2.4)) +9 | Top left with holes | POLYGON((0 2, 1 2, 1 3, 0 3, 0 2), (0.4 2.4, 0.6 2.4, 0.6 2.6, 0.4 2.6, 0.4 2.4)) +12 | Bottom right diagonal | LINESTRING(2 0, 3 1) +13 | Top right diagonal | LINESTRING(2 2, 3 3) +14 | Top left diagonal | LINESTRING(0 2, 1 3) +17 | Bottom right point | POINT(2.5 0.5) +18 | Top right point | POINT(2.5 2.5) +19 | Top left point | POINT(0.5 2.5) +; + #################################################################################################### # Test against a polygon smaller in size to the Bottom Left polygon @@ -99,6 +122,29 @@ id:l | name:keyword | shape:cartesian_shape 16 | Bottom left point | POINT(0.5 0.5) ; +whereDisjointSmallerPolygon +required_feature: esql.st_disjoint + +FROM cartesian_multipolygons +| WHERE ST_Disjoint(shape, TO_CARTESIANSHAPE("POLYGON((0.2 0.2, 0.8 0.2, 0.8 0.8, 0.2 0.8, 0.2 0.2))")) +| SORT id +; + +id:l | name:keyword | shape:cartesian_shape +2 | Bottom right | POLYGON((2 0, 3 0, 3 1, 2 1, 2 0)) +3 | Top right | POLYGON((2 2, 3 2, 3 3, 2 3, 2 2)) +4 | Top left | POLYGON((0 2, 1 2, 1 3, 0 3, 0 2)) +7 | Bottom right with holes | POLYGON((2 0, 3 0, 3 1, 2 1, 2 0), (2.4 0.4, 2.6 0.4, 2.6 0.6, 2.4 0.6, 2.4 0.4)) +8 | Top right with holes | POLYGON((2 2, 3 2, 3 3, 2 3, 2 2), (2.4 2.4, 2.6 2.4, 2.6 2.6, 2.4 2.6, 2.4 2.4)) +9 | Top left with holes | POLYGON((0 2, 1 2, 1 3, 0 3, 0 2), (0.4 2.4, 0.6 2.4, 0.6 2.6, 0.4 2.6, 0.4 2.4)) +12 | Bottom right diagonal | LINESTRING(2 0, 3 1) +13 | Top right diagonal | LINESTRING(2 2, 3 3) +14 | Top left diagonal | LINESTRING(0 2, 1 3) +17 | Bottom right point | POINT(2.5 0.5) +18 | Top right point | POINT(2.5 2.5) +19 | Top left point | POINT(0.5 2.5); +; + #################################################################################################### # Test against a polygon similar in size to the entire test data @@ -175,6 +221,17 @@ id:l | name:keyword | shape:cartesian_shape 19 | Top left point | POINT(0.5 2.5) ; +whereDisjointLargerPolygon +required_feature: esql.st_disjoint + +FROM cartesian_multipolygons +| WHERE ST_Disjoint(shape, TO_CARTESIANSHAPE("POLYGON((0 0, 3 0, 3 3, 0 3, 0 0))")) +| SORT id +; + +id:l | name:keyword | shape:cartesian_shape +; + #################################################################################################### # Test against a polygon larger than all test data @@ -250,3 +307,14 @@ id:l | name:keyword | shape:cartesian_shape 18 | Top right point | POINT(2.5 2.5) 19 | Top left point | POINT(0.5 2.5) ; + +whereDisjointEvenLargerPolygon +required_feature: esql.st_disjoint + +FROM cartesian_multipolygons +| WHERE ST_Disjoint(shape, TO_CARTESIANSHAPE("POLYGON((-1 -1, 4 -1, 4 4, -1 4, -1 -1))")) +| SORT id +; + +id:l | name:keyword | shape:cartesian_shape +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec index 5af5d9d3417de..33b61c95ed0ed 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -66,6 +66,7 @@ double pi() "double sqrt(number:double|integer|long|unsigned_long)" "geo_point|cartesian_point st_centroid(field:geo_point|cartesian_point)" "boolean st_contains(geomA:geo_point|cartesian_point|geo_shape|cartesian_shape, geomB:geo_point|cartesian_point|geo_shape|cartesian_shape)" +"boolean st_disjoint(geomA:geo_point|cartesian_point|geo_shape|cartesian_shape, geomB:geo_point|cartesian_point|geo_shape|cartesian_shape)" "boolean st_intersects(geomA:geo_point|cartesian_point|geo_shape|cartesian_shape, geomB:geo_point|cartesian_point|geo_shape|cartesian_shape)" "boolean st_within(geomA:geo_point|cartesian_point|geo_shape|cartesian_shape, geomB:geo_point|cartesian_point|geo_shape|cartesian_shape)" "double st_x(point:geo_point|cartesian_point)" @@ -175,6 +176,7 @@ split |[string, delim] |["keyword|text", "keyword|te sqrt |number |"double|integer|long|unsigned_long" |[""] st_centroid |field |"geo_point|cartesian_point" |[""] st_contains |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Geometry column name or variable of geometry type, Geometry column name or variable of geometry type] +st_disjoint |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Geometry column name or variable of geometry type, Geometry column name or variable of geometry type] st_intersects |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Geometry column name or variable of geometry type, Geometry column name or variable of geometry type] st_within |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Geometry column name or variable of geometry type, Geometry column name or variable of geometry type] st_x |point |"geo_point|cartesian_point" |[""] @@ -285,6 +287,7 @@ split |Split a single valued string into multiple strings. sqrt |Returns the square root of a number. st_centroid |The centroid of a spatial field. st_contains |Returns whether the first geometry contains the second geometry. +st_disjoint |Returns whether the two geometries or geometry columns are disjoint. st_intersects |Returns whether the two geometries or geometry columns intersect. st_within |Returns whether the first geometry is within the second geometry. st_x |Extracts the x-coordinate from a point geometry. @@ -396,6 +399,7 @@ split |keyword sqrt |double |false |false |false st_centroid |"geo_point|cartesian_point" |false |false |true st_contains |boolean |[false, false] |false |false +st_disjoint |boolean |[false, false] |false |false st_intersects |boolean |[false, false] |false |false st_within |boolean |[false, false] |false |false st_x |double |false |false |false @@ -451,5 +455,5 @@ countFunctions#[skip:-8.13.99] meta functions | stats a = count(*), b = count(*), c = count(*) | mv_expand c; a:long | b:long | c:long -101 | 101 | 101 +102 | 102 | 102 ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec index c1421d91dffa5..843b2674967fe 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec @@ -605,6 +605,91 @@ location:geo_point | city_location:geo_point | count:long POINT (0 0) | POINT (0 0) | 1 ; +############################################### +# Tests for ST_DISJOINT on GEO_POINT type + +literalPolygonDisjointLiteralPoint +required_feature: esql.st_disjoint + +ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] +| MV_EXPAND wkt +| EVAL pt = TO_GEOPOINT(wkt) +| WHERE ST_DISJOINT(TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))"), pt) +; + +wkt:keyword | pt:geo_point +"POINT(-1 -1)" | POINT(-1 -1) +"POINT(-1 1)" | POINT(-1 1) +; + +literalPointDisjointLiteralPolygon +required_feature: esql.st_disjoint + +ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] +| MV_EXPAND wkt +| EVAL pt = TO_GEOPOINT(wkt) +| WHERE ST_DISJOINT(pt, TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) +; + +wkt:keyword | pt:geo_point +"POINT(-1 -1)" | POINT(-1 -1) +"POINT(-1 1)" | POINT(-1 1) +; + +literalPolygonDisjointLiteralPointOneRow +required_feature: esql.st_disjoint + +ROW disjoint = ST_DISJOINT(TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))"), TO_GEOPOINT("POINT(0 0)")) +; + +disjoint:boolean +false +; + +literalPointDisjointLiteralPolygonOneRow +required_feature: esql.st_disjoint + +ROW disjoint = ST_DISJOINT(TO_GEOPOINT("POINT(-1 0)"), TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) +; + +disjoint:boolean +true +; + +pointDisjointLiteralPolygon +required_feature: esql.st_disjoint + +FROM airports +| WHERE ST_DISJOINT(location, TO_GEOSHAPE("POLYGON((-10 -60, 120 -60, 120 60, -10 60, -10 -60))")) +| EVAL x = ST_X(location), y = ST_Y(location) +| EVAL x = FLOOR(x / 100), y = FLOOR(y / 100) +| STATS count=COUNT() BY x, y +| KEEP x, y, count +| SORT x ASC, y ASC +; + +x:double | y:double | count:long +-2 | -1 | 8 +-2 | 0 | 94 +-1 | -1 | 67 +-1 | 0 | 201 +0 | 0 | 15 +1 | -1 | 33 +1 | 0 | 53 +; + +airportCityLocationPointDisjointCentroid +required_feature: esql.st_disjoint + +FROM airports_mp +| WHERE ST_DISJOINT(location, city_location) +| STATS location=ST_CENTROID(location), city_location=ST_CENTROID(city_location), count=COUNT() +; + +location:geo_point | city_location:geo_point | count:long +POINT (67.8581917192787 24.02956652920693) | POINT (67.81638333333332 24.048999999999996) | 6 +; + ############################################### # Tests for ST_CONTAINS on GEO_POINT type @@ -1167,6 +1252,148 @@ centroid:cartesian_point | count:long POINT (4783520.5 1661010.0) | 1 ; +cartesianPointIntersectsLiteralPolygonCount +required_feature: esql.st_intersects + +FROM airports_web +| WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("POLYGON((0 -60000000, 120000000 -60000000, 120000000 60000000, 0 60000000, 0 -60000000))")) +| STATS count=COUNT() +; + +count:long +444 +; + +############################################### +# Tests for ST_DISJOINT on CARTESIAN_POINT type + +literalPolygonDisjointLiteralCartesianPoint +required_feature: esql.st_disjoint + +ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] +| MV_EXPAND wkt +| EVAL pt = TO_CARTESIANPOINT(wkt) +| WHERE ST_DISJOINT(TO_CARTESIANSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))"), pt) +; + +wkt:keyword | pt:cartesian_point +"POINT(-1 -1)" | POINT(-1 -1) +"POINT(-1 1)" | POINT(-1 1) +; + +literalCartesianPointDisjointLiteralPolygon +required_feature: esql.st_disjoint + +ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] +| MV_EXPAND wkt +| EVAL pt = TO_CARTESIANPOINT(wkt) +| WHERE ST_DISJOINT(pt, TO_CARTESIANSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) +; + +wkt:keyword | pt:cartesian_point +"POINT(-1 -1)" | POINT(-1 -1) +"POINT(-1 1)" | POINT(-1 1) +; + +literalPolygonDisjointLiteralCartesianPointOneRow +required_feature: esql.st_disjoint + +ROW disjoint = ST_DISJOINT(TO_CARTESIANSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))"), TO_CARTESIANPOINT("POINT(0 0)")) +; + +disjoint:boolean +false +; + +literalCartesianPointDisjointLiteralPolygonOneRow +required_feature: esql.st_disjoint + +ROW disjoint = ST_DISJOINT(TO_CARTESIANPOINT("POINT(-1 0)"), TO_CARTESIANSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) +; + +disjoint:boolean +true +; + +cartesianPointDisjointLiteralPolygonCount +required_feature: esql.st_disjoint + +FROM airports_web +| WHERE ST_DISJOINT(location, TO_CARTESIANSHAPE("POLYGON((0 -60000000, 120000000 -60000000, 120000000 60000000, 0 60000000, 0 -60000000))")) +| STATS count=COUNT() +; + +count:long +405 +; + +cartesianPointIntersectsDisjointLiteralPolygonCount +required_feature: esql.st_disjoint + +FROM airports_web +| EVAL intersects = ST_INTERSECTS(location, TO_CARTESIANSHAPE("POLYGON((0 -60000000, 120000000 -60000000, 120000000 60000000, 0 60000000, 0 -60000000))")) +| EVAL disjoint = ST_DISJOINT(location, TO_CARTESIANSHAPE("POLYGON((0 -60000000, 120000000 -60000000, 120000000 60000000, 0 60000000, 0 -60000000))")) +| STATS count=COUNT() BY intersects, disjoint +| SORT intersects DESC, disjoint DESC +| KEEP intersects, disjoint, count +; + +intersects:boolean | disjoint:boolean | count:long +true | false | 444 +false | true | 405 +; + +cartesianPointDisjointLiteralPolygon +required_feature: esql.st_disjoint + +FROM airports_web +| WHERE ST_DISJOINT(location, TO_CARTESIANSHAPE("POLYGON((0 -60000000, 120000000 -60000000, 120000000 60000000, 0 60000000, 0 -60000000))")) +| EVAL x = ST_X(location), y = ST_Y(location) +| EVAL x = FLOOR(x / 10000000), y = FLOOR(y / 10000000) +| STATS count=COUNT() BY x, y +| KEEP x, y, count +| SORT x ASC, y ASC +; + +x:double | y:double | count:long +-2 | -1 | 8 +-2 | 0 | 136 +-2 | 1 | 3 +-1 | -1 | 64 +-1 | 0 | 192 +-1 | 1 | 2 +; + +cartesianPointDisjointEmptyGeometry +required_feature: esql.st_disjoint + +FROM airports_web +| WHERE ST_DISJOINT(location, TO_CARTESIANSHAPE("LINESTRING()")) +| STATS count=COUNT() +; + +warning:Line 2:31: evaluation of [TO_CARTESIANSHAPE(\"LINESTRING()\")] failed, treating result as null. Only first 20 failures recorded. +warning:Line 2:31: java.lang.IllegalArgumentException: Failed to parse WKT: expected number but found: ')' + +count:long +0 +; + +cartesianPointDisjointInvalidGeometry +required_feature: esql.st_disjoint + +FROM airports_web +| WHERE ST_DISJOINT(location, TO_CARTESIANSHAPE("Invalid Geometry")) +| STATS count=COUNT() +; + +warning:Line 2:31: evaluation of [TO_CARTESIANSHAPE(\"Invalid Geometry\")] failed, treating result as null. Only first 20 failures recorded. +warning:Line 2:31: java.lang.IllegalArgumentException: Failed to parse WKT: Unknown geometry type: invalid + +count:long +0 +; + ############################################### # Tests for ST_CONTAINS on CARTESIAN_POINT type diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial_shapes.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial_shapes.csv-spec index f010ed13370e0..6d0d15c398986 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial_shapes.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial_shapes.csv-spec @@ -213,6 +213,27 @@ intersects:boolean true ; +############################################### +# Tests for ST_DISJOINT with GEO_SHAPE + +polygonDisjointLiteralPolygon +required_feature: esql.st_disjoint + +// tag::st_disjoint-airport_city_boundaries[] +FROM airport_city_boundaries +| WHERE ST_DISJOINT(city_boundary, TO_GEOSHAPE("POLYGON((-10 -60, 120 -60, 120 60, -10 60, -10 -60))")) +| KEEP abbrev, airport, region, city, city_location +// end::st_disjoint-airport_city_boundaries[] +| SORT abbrev +| LIMIT 1 +; + +// tag::st_disjoint-airport_city_boundaries-result[] +abbrev:keyword | airport:text | region:text | city:keyword | city_location:geo_point +ACA | General Juan N Alvarez Int'l | Acapulco de Juárez | Acapulco de Juárez | POINT (-99.8825 16.8636) +// end::st_disjoint-airport_city_boundaries-result[] +; + ############################################### # Tests for ST_CONTAINS and ST_WITHIN with GEO_SHAPE @@ -422,6 +443,37 @@ wkt:keyword | shape:ca "POLYGON((-2000 6000, -600 6000, -600 6600, -2000 6600, -2000 6000))" | POLYGON((-2000 6000, -600 6000, -600 6600, -2000 6600, -2000 6000)) | POLYGON((-1500 6400, -1000 6400, -1000 6600, -1500 6600, -1500 6400)) ; +############################################### +# Tests for ST_DISJOINT with CARTESIAN_SHAPE + +cartesianPolygonDisjointLiteralPolygon +required_feature: esql.st_disjoint + +FROM countries_bbox_web +| WHERE ST_DISJOINT(shape, TO_CARTESIANSHAPE("POLYGON((3100000 -3400000, 3500000 -3400000, 3500000 -3150000, 3100000 -3150000, 3100000 -3400000))")) +| SORT id DESC +| LIMIT 1 +; + +id:keyword | name:keyword | shape:cartesian_shape +ZWE | Zimbabwe | BBOX (2809472.180051312, 3681512.6693309383, -1760356.671722378, -2561396.0054164226) +; + +cartesianPolygonDisjointEmptyGeometry +required_feature: esql.st_disjoint + +FROM countries_bbox_web +| WHERE ST_DISJOINT(shape, TO_CARTESIANSHAPE("LINESTRING()")) +| STATS count=COUNT() +; + +warning:Line 2:28: evaluation of [TO_CARTESIANSHAPE(\"LINESTRING()\")] failed, treating result as null. Only first 20 failures recorded. +warning:Line 2:28: java.lang.IllegalArgumentException: Failed to parse WKT: expected number but found: ')' + +count:long +0 +; + ############################################### # Tests for ST_CONTAINS and ST_WITHIN with CARTESIAN_SHAPE diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndConstantEvaluator.java new file mode 100644 index 0000000000000..62b5761cfd655 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndConstantEvaluator.java @@ -0,0 +1,128 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.geo.Component2D; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialDisjoint}. + * This class is generated. Do not edit it. + */ +public final class SpatialDisjointCartesianPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final Component2D rightValue; + + private final DriverContext driverContext; + + public SpatialDisjointCartesianPointDocValuesAndConstantEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock leftValueBlock = (LongBlock) leftValue.eval(page)) { + LongVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock); + } + return eval(page.getPositionCount(), leftValueVector); + } + } + + public BooleanBlock eval(int positionCount, LongBlock leftValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialDisjoint.processCartesianPointDocValuesAndConstant(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValue)); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, LongVector leftValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialDisjoint.processCartesianPointDocValuesAndConstant(leftValueVector.getLong(p), rightValue)); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialDisjointCartesianPointDocValuesAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final Component2D rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + Component2D rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialDisjointCartesianPointDocValuesAndConstantEvaluator get(DriverContext context) { + return new SpatialDisjointCartesianPointDocValuesAndConstantEvaluator(source, leftValue.get(context), rightValue, context); + } + + @Override + public String toString() { + return "SpatialDisjointCartesianPointDocValuesAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndSourceEvaluator.java new file mode 100644 index 0000000000000..4f11da3c474a9 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianPointDocValuesAndSourceEvaluator.java @@ -0,0 +1,142 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialDisjoint}. + * This class is generated. Do not edit it. + */ +public final class SpatialDisjointCartesianPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final EvalOperator.ExpressionEvaluator rightValue; + + private final DriverContext driverContext; + + public SpatialDisjointCartesianPointDocValuesAndSourceEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock leftValueBlock = (LongBlock) leftValue.eval(page)) { + try (BytesRefBlock rightValueBlock = (BytesRefBlock) rightValue.eval(page)) { + LongVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + BytesRefVector rightValueVector = rightValueBlock.asVector(); + if (rightValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + return eval(page.getPositionCount(), leftValueVector, rightValueVector).asBlock(); + } + } + } + + public BooleanBlock eval(int positionCount, LongBlock leftValueBlock, + BytesRefBlock rightValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rightValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rightValueBlock.getValueCount(p) != 1) { + if (rightValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendBoolean(SpatialDisjoint.processCartesianPointDocValuesAndSource(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValueBlock.getBytesRef(rightValueBlock.getFirstValueIndex(p), rightValueScratch))); + } + return result.build(); + } + } + + public BooleanVector eval(int positionCount, LongVector leftValueVector, + BytesRefVector rightValueVector) { + try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + result.appendBoolean(SpatialDisjoint.processCartesianPointDocValuesAndSource(leftValueVector.getLong(p), rightValueVector.getBytesRef(p, rightValueScratch))); + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialDisjointCartesianPointDocValuesAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue, rightValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final EvalOperator.ExpressionEvaluator.Factory rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + EvalOperator.ExpressionEvaluator.Factory rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialDisjointCartesianPointDocValuesAndSourceEvaluator get(DriverContext context) { + return new SpatialDisjointCartesianPointDocValuesAndSourceEvaluator(source, leftValue.get(context), rightValue.get(context), context); + } + + @Override + public String toString() { + return "SpatialDisjointCartesianPointDocValuesAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndConstantEvaluator.java new file mode 100644 index 0000000000000..adb5a33b83f3b --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndConstantEvaluator.java @@ -0,0 +1,132 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.io.IOException; +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.geo.Component2D; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialDisjoint}. + * This class is generated. Do not edit it. + */ +public final class SpatialDisjointCartesianSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final Component2D rightValue; + + private final DriverContext driverContext; + + public SpatialDisjointCartesianSourceAndConstantEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock leftValueBlock = (BytesRefBlock) leftValue.eval(page)) { + BytesRefVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock); + } + return eval(page.getPositionCount(), leftValueVector); + } + } + + public BooleanBlock eval(int positionCount, BytesRefBlock leftValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialDisjoint.processCartesianSourceAndConstant(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValue)); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, BytesRefVector leftValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialDisjoint.processCartesianSourceAndConstant(leftValueVector.getBytesRef(p, leftValueScratch), rightValue)); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialDisjointCartesianSourceAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final Component2D rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + Component2D rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialDisjointCartesianSourceAndConstantEvaluator get(DriverContext context) { + return new SpatialDisjointCartesianSourceAndConstantEvaluator(source, leftValue.get(context), rightValue, context); + } + + @Override + public String toString() { + return "SpatialDisjointCartesianSourceAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndSourceEvaluator.java new file mode 100644 index 0000000000000..186a1299a4a98 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointCartesianSourceAndSourceEvaluator.java @@ -0,0 +1,152 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.io.IOException; +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialDisjoint}. + * This class is generated. Do not edit it. + */ +public final class SpatialDisjointCartesianSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final EvalOperator.ExpressionEvaluator rightValue; + + private final DriverContext driverContext; + + public SpatialDisjointCartesianSourceAndSourceEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock leftValueBlock = (BytesRefBlock) leftValue.eval(page)) { + try (BytesRefBlock rightValueBlock = (BytesRefBlock) rightValue.eval(page)) { + BytesRefVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + BytesRefVector rightValueVector = rightValueBlock.asVector(); + if (rightValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + return eval(page.getPositionCount(), leftValueVector, rightValueVector); + } + } + } + + public BooleanBlock eval(int positionCount, BytesRefBlock leftValueBlock, + BytesRefBlock rightValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rightValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rightValueBlock.getValueCount(p) != 1) { + if (rightValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialDisjoint.processCartesianSourceAndSource(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValueBlock.getBytesRef(rightValueBlock.getFirstValueIndex(p), rightValueScratch))); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, BytesRefVector leftValueVector, + BytesRefVector rightValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialDisjoint.processCartesianSourceAndSource(leftValueVector.getBytesRef(p, leftValueScratch), rightValueVector.getBytesRef(p, rightValueScratch))); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialDisjointCartesianSourceAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue, rightValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final EvalOperator.ExpressionEvaluator.Factory rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + EvalOperator.ExpressionEvaluator.Factory rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialDisjointCartesianSourceAndSourceEvaluator get(DriverContext context) { + return new SpatialDisjointCartesianSourceAndSourceEvaluator(source, leftValue.get(context), rightValue.get(context), context); + } + + @Override + public String toString() { + return "SpatialDisjointCartesianSourceAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndConstantEvaluator.java new file mode 100644 index 0000000000000..675b6cc58197e --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndConstantEvaluator.java @@ -0,0 +1,128 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.geo.Component2D; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialDisjoint}. + * This class is generated. Do not edit it. + */ +public final class SpatialDisjointGeoPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final Component2D rightValue; + + private final DriverContext driverContext; + + public SpatialDisjointGeoPointDocValuesAndConstantEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock leftValueBlock = (LongBlock) leftValue.eval(page)) { + LongVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock); + } + return eval(page.getPositionCount(), leftValueVector); + } + } + + public BooleanBlock eval(int positionCount, LongBlock leftValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialDisjoint.processGeoPointDocValuesAndConstant(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValue)); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, LongVector leftValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialDisjoint.processGeoPointDocValuesAndConstant(leftValueVector.getLong(p), rightValue)); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialDisjointGeoPointDocValuesAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final Component2D rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + Component2D rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialDisjointGeoPointDocValuesAndConstantEvaluator get(DriverContext context) { + return new SpatialDisjointGeoPointDocValuesAndConstantEvaluator(source, leftValue.get(context), rightValue, context); + } + + @Override + public String toString() { + return "SpatialDisjointGeoPointDocValuesAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndSourceEvaluator.java new file mode 100644 index 0000000000000..1b22e67d11b25 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoPointDocValuesAndSourceEvaluator.java @@ -0,0 +1,151 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialDisjoint}. + * This class is generated. Do not edit it. + */ +public final class SpatialDisjointGeoPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final EvalOperator.ExpressionEvaluator rightValue; + + private final DriverContext driverContext; + + public SpatialDisjointGeoPointDocValuesAndSourceEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock leftValueBlock = (LongBlock) leftValue.eval(page)) { + try (BytesRefBlock rightValueBlock = (BytesRefBlock) rightValue.eval(page)) { + LongVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + BytesRefVector rightValueVector = rightValueBlock.asVector(); + if (rightValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + return eval(page.getPositionCount(), leftValueVector, rightValueVector); + } + } + } + + public BooleanBlock eval(int positionCount, LongBlock leftValueBlock, + BytesRefBlock rightValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rightValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rightValueBlock.getValueCount(p) != 1) { + if (rightValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialDisjoint.processGeoPointDocValuesAndSource(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValueBlock.getBytesRef(rightValueBlock.getFirstValueIndex(p), rightValueScratch))); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, LongVector leftValueVector, + BytesRefVector rightValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialDisjoint.processGeoPointDocValuesAndSource(leftValueVector.getLong(p), rightValueVector.getBytesRef(p, rightValueScratch))); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialDisjointGeoPointDocValuesAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue, rightValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final EvalOperator.ExpressionEvaluator.Factory rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + EvalOperator.ExpressionEvaluator.Factory rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialDisjointGeoPointDocValuesAndSourceEvaluator get(DriverContext context) { + return new SpatialDisjointGeoPointDocValuesAndSourceEvaluator(source, leftValue.get(context), rightValue.get(context), context); + } + + @Override + public String toString() { + return "SpatialDisjointGeoPointDocValuesAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndConstantEvaluator.java new file mode 100644 index 0000000000000..1df80cf90bd10 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndConstantEvaluator.java @@ -0,0 +1,132 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.io.IOException; +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.geo.Component2D; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialDisjoint}. + * This class is generated. Do not edit it. + */ +public final class SpatialDisjointGeoSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final Component2D rightValue; + + private final DriverContext driverContext; + + public SpatialDisjointGeoSourceAndConstantEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock leftValueBlock = (BytesRefBlock) leftValue.eval(page)) { + BytesRefVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock); + } + return eval(page.getPositionCount(), leftValueVector); + } + } + + public BooleanBlock eval(int positionCount, BytesRefBlock leftValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialDisjoint.processGeoSourceAndConstant(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValue)); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, BytesRefVector leftValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialDisjoint.processGeoSourceAndConstant(leftValueVector.getBytesRef(p, leftValueScratch), rightValue)); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialDisjointGeoSourceAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final Component2D rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + Component2D rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialDisjointGeoSourceAndConstantEvaluator get(DriverContext context) { + return new SpatialDisjointGeoSourceAndConstantEvaluator(source, leftValue.get(context), rightValue, context); + } + + @Override + public String toString() { + return "SpatialDisjointGeoSourceAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndSourceEvaluator.java new file mode 100644 index 0000000000000..9bdc60813ad67 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointGeoSourceAndSourceEvaluator.java @@ -0,0 +1,152 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.io.IOException; +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialDisjoint}. + * This class is generated. Do not edit it. + */ +public final class SpatialDisjointGeoSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final EvalOperator.ExpressionEvaluator rightValue; + + private final DriverContext driverContext; + + public SpatialDisjointGeoSourceAndSourceEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock leftValueBlock = (BytesRefBlock) leftValue.eval(page)) { + try (BytesRefBlock rightValueBlock = (BytesRefBlock) rightValue.eval(page)) { + BytesRefVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + BytesRefVector rightValueVector = rightValueBlock.asVector(); + if (rightValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + return eval(page.getPositionCount(), leftValueVector, rightValueVector); + } + } + } + + public BooleanBlock eval(int positionCount, BytesRefBlock leftValueBlock, + BytesRefBlock rightValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rightValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rightValueBlock.getValueCount(p) != 1) { + if (rightValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialDisjoint.processGeoSourceAndSource(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValueBlock.getBytesRef(rightValueBlock.getFirstValueIndex(p), rightValueScratch))); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, BytesRefVector leftValueVector, + BytesRefVector rightValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialDisjoint.processGeoSourceAndSource(leftValueVector.getBytesRef(p, leftValueScratch), rightValueVector.getBytesRef(p, rightValueScratch))); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialDisjointGeoSourceAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue, rightValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final EvalOperator.ExpressionEvaluator.Factory rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + EvalOperator.ExpressionEvaluator.Factory rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialDisjointGeoSourceAndSourceEvaluator get(DriverContext context) { + return new SpatialDisjointGeoSourceAndSourceEvaluator(source, leftValue.get(context), rightValue.get(context), context); + } + + @Override + public String toString() { + return "SpatialDisjointGeoSourceAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index 3db7ae3cac7b5..178c714950b05 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -81,6 +81,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvZip; import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialContains; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialDisjoint; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialIntersects; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialWithin; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.StX; @@ -189,6 +190,7 @@ private FunctionDefinition[][] functions() { new FunctionDefinition[] { def(SpatialCentroid.class, SpatialCentroid::new, "st_centroid"), def(SpatialContains.class, SpatialContains::new, "st_contains"), + def(SpatialDisjoint.class, SpatialDisjoint::new, "st_disjoint"), def(SpatialIntersects.class, SpatialIntersects::new, "st_intersects"), def(SpatialWithin.class, SpatialWithin::new, "st_within"), def(StX.class, StX::new, "st_x"), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java index 2a4915f38fb48..279f31e34ac95 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java @@ -112,8 +112,6 @@ private boolean pointRelatesGeometries(long encoded, Component2D[] rightComponen @FunctionInfo( returnType = { "boolean" }, description = "Returns whether the first geometry contains the second geometry.", - note = "The second parameter must also have the same coordinate system as the first. " - + "This means it is not possible to combine `geo_*` and `cartesian_*` parameters.", examples = @Example(file = "spatial_shapes", tag = "st_contains-airport_city_boundaries") ) public SpatialContains( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjoint.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjoint.java new file mode 100644 index 0000000000000..7833f93b6270f --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjoint.java @@ -0,0 +1,239 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import org.apache.lucene.document.ShapeField; +import org.apache.lucene.geo.Component2D; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.geo.Orientation; +import org.elasticsearch.compute.ann.Evaluator; +import org.elasticsearch.compute.ann.Fixed; +import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.index.mapper.GeoShapeIndexer; +import org.elasticsearch.lucene.spatial.CartesianShapeIndexer; +import org.elasticsearch.lucene.spatial.CoordinateEncoder; +import org.elasticsearch.lucene.spatial.GeometryDocValueReader; +import org.elasticsearch.xpack.esql.expression.function.Example; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.FieldAttribute; +import org.elasticsearch.xpack.ql.tree.NodeInfo; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils.asGeometryDocValueReader; +import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils.asLuceneComponent2D; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_POINT; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_SHAPE; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_POINT; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_SHAPE; + +/** + * This is the primary class for supporting the function ST_DISJOINT. + * The bulk of the capabilities are within the parent class SpatialRelatesFunction, + * which supports all the relations in the ShapeField.QueryRelation enum. + * Here we simply wire the rules together specific to ST_DISJOINT and QueryRelation.DISJOINT. + */ +public class SpatialDisjoint extends SpatialRelatesFunction { + // public for test access with reflection + public static final SpatialRelations GEO = new SpatialRelations( + ShapeField.QueryRelation.DISJOINT, + SpatialCoordinateTypes.GEO, + CoordinateEncoder.GEO, + new GeoShapeIndexer(Orientation.CCW, "ST_Disjoint") + ); + // public for test access with reflection + public static final SpatialRelations CARTESIAN = new SpatialRelations( + ShapeField.QueryRelation.DISJOINT, + SpatialCoordinateTypes.CARTESIAN, + CoordinateEncoder.CARTESIAN, + new CartesianShapeIndexer("ST_Disjoint") + ); + + @FunctionInfo( + returnType = { "boolean" }, + description = "Returns whether the two geometries or geometry columns are disjoint.", + examples = @Example(file = "spatial_shapes", tag = "st_disjoint-airport_city_boundaries") + ) + public SpatialDisjoint( + Source source, + @Param( + name = "geomA", + type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, + description = "Geometry column name or variable of geometry type" + ) Expression left, + @Param( + name = "geomB", + type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, + description = "Geometry column name or variable of geometry type" + ) Expression right + ) { + this(source, left, right, false, false); + } + + private SpatialDisjoint(Source source, Expression left, Expression right, boolean leftDocValues, boolean rightDocValues) { + super(source, left, right, leftDocValues, rightDocValues); + } + + @Override + public ShapeField.QueryRelation queryRelation() { + return ShapeField.QueryRelation.DISJOINT; + } + + @Override + public SpatialDisjoint withDocValues(Set attributes) { + // Only update the docValues flags if the field is found in the attributes + boolean leftDV = leftDocValues || foundField(left(), attributes); + boolean rightDV = rightDocValues || foundField(right(), attributes); + return new SpatialDisjoint(source(), left(), right(), leftDV, rightDV); + } + + @Override + protected SpatialDisjoint replaceChildren(Expression newLeft, Expression newRight) { + return new SpatialDisjoint(source(), newLeft, newRight, leftDocValues, rightDocValues); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, SpatialDisjoint::new, left(), right()); + } + + @Override + public Object fold() { + try { + GeometryDocValueReader docValueReader = asGeometryDocValueReader(crsType, left()); + Component2D component2D = asLuceneComponent2D(crsType, right()); + return (crsType == SpatialCrsType.GEO) + ? GEO.geometryRelatesGeometry(docValueReader, component2D) + : CARTESIAN.geometryRelatesGeometry(docValueReader, component2D); + } catch (IOException e) { + throw new IllegalArgumentException("Failed to fold constant fields: " + e.getMessage(), e); + } + } + + @Override + Map> evaluatorRules() { + return evaluatorMap; + } + + private static final Map> evaluatorMap = new HashMap<>(); + + static { + // Support geo_point and geo_shape from source and constant combinations + for (DataType spatialType : new DataType[] { GEO_POINT, GEO_SHAPE }) { + for (DataType otherType : new DataType[] { GEO_POINT, GEO_SHAPE }) { + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSources(spatialType, otherType), + new SpatialEvaluatorFactory.SpatialEvaluatorFactoryWithFields(SpatialDisjointGeoSourceAndSourceEvaluator.Factory::new) + ); + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSourceAndConstant(spatialType, otherType), + new SpatialEvaluatorFactory.SpatialEvaluatorWithConstantFactory( + SpatialDisjointGeoSourceAndConstantEvaluator.Factory::new + ) + ); + if (EsqlDataTypes.isSpatialPoint(spatialType)) { + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSources(spatialType, otherType).withLeftDocValues(), + new SpatialEvaluatorFactory.SpatialEvaluatorFactoryWithFields( + SpatialDisjointGeoPointDocValuesAndSourceEvaluator.Factory::new + ) + ); + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSourceAndConstant(spatialType, otherType).withLeftDocValues(), + new SpatialEvaluatorFactory.SpatialEvaluatorWithConstantFactory( + SpatialDisjointGeoPointDocValuesAndConstantEvaluator.Factory::new + ) + ); + } + } + } + + // Support cartesian_point and cartesian_shape from source and constant combinations + for (DataType spatialType : new DataType[] { CARTESIAN_POINT, CARTESIAN_SHAPE }) { + for (DataType otherType : new DataType[] { CARTESIAN_POINT, CARTESIAN_SHAPE }) { + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSources(spatialType, otherType), + new SpatialEvaluatorFactory.SpatialEvaluatorFactoryWithFields( + SpatialDisjointCartesianSourceAndSourceEvaluator.Factory::new + ) + ); + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSourceAndConstant(spatialType, otherType), + new SpatialEvaluatorFactory.SpatialEvaluatorWithConstantFactory( + SpatialDisjointCartesianSourceAndConstantEvaluator.Factory::new + ) + ); + if (EsqlDataTypes.isSpatialPoint(spatialType)) { + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSources(spatialType, otherType).withLeftDocValues(), + new SpatialEvaluatorFactory.SpatialEvaluatorFactoryWithFields( + SpatialDisjointCartesianPointDocValuesAndSourceEvaluator.Factory::new + ) + ); + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSourceAndConstant(spatialType, otherType).withLeftDocValues(), + new SpatialEvaluatorFactory.SpatialEvaluatorWithConstantFactory( + SpatialDisjointCartesianPointDocValuesAndConstantEvaluator.Factory::new + ) + ); + } + } + } + } + + @Evaluator(extraName = "GeoSourceAndConstant", warnExceptions = { IllegalArgumentException.class, IOException.class }) + static boolean processGeoSourceAndConstant(BytesRef leftValue, @Fixed Component2D rightValue) throws IOException { + return GEO.geometryRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "GeoSourceAndSource", warnExceptions = { IllegalArgumentException.class, IOException.class }) + static boolean processGeoSourceAndSource(BytesRef leftValue, BytesRef rightValue) throws IOException { + return GEO.geometryRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "GeoPointDocValuesAndConstant", warnExceptions = { IllegalArgumentException.class }) + static boolean processGeoPointDocValuesAndConstant(long leftValue, @Fixed Component2D rightValue) { + return GEO.pointRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "GeoPointDocValuesAndSource", warnExceptions = { IllegalArgumentException.class }) + static boolean processGeoPointDocValuesAndSource(long leftValue, BytesRef rightValue) { + Geometry geometry = SpatialCoordinateTypes.UNSPECIFIED.wkbToGeometry(rightValue); + return GEO.pointRelatesGeometry(leftValue, geometry); + } + + @Evaluator(extraName = "CartesianSourceAndConstant", warnExceptions = { IllegalArgumentException.class, IOException.class }) + static boolean processCartesianSourceAndConstant(BytesRef leftValue, @Fixed Component2D rightValue) throws IOException { + return CARTESIAN.geometryRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "CartesianSourceAndSource", warnExceptions = { IllegalArgumentException.class, IOException.class }) + static boolean processCartesianSourceAndSource(BytesRef leftValue, BytesRef rightValue) throws IOException { + return CARTESIAN.geometryRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "CartesianPointDocValuesAndConstant", warnExceptions = { IllegalArgumentException.class }) + static boolean processCartesianPointDocValuesAndConstant(long leftValue, @Fixed Component2D rightValue) { + return CARTESIAN.pointRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "CartesianPointDocValuesAndSource") + static boolean processCartesianPointDocValuesAndSource(long leftValue, BytesRef rightValue) { + Geometry geometry = SpatialCoordinateTypes.UNSPECIFIED.wkbToGeometry(rightValue); + return CARTESIAN.pointRelatesGeometry(leftValue, geometry); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java index 93965b0d3e9be..810e3206ada73 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java @@ -66,8 +66,6 @@ public class SpatialIntersects extends SpatialRelatesFunction { @FunctionInfo( returnType = { "boolean" }, description = "Returns whether the two geometries or geometry columns intersect.", - note = "The second parameter must also have the same coordinate system as the first. " - + "This means it is not possible to combine `geo_*` and `cartesian_*` parameters.", examples = @Example(file = "spatial", tag = "st_intersects-airports") ) public SpatialIntersects( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunction.java index b18a3ba4926f4..51109aee29482 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunction.java @@ -116,10 +116,14 @@ protected TypeResolution resolveType( if (resolution.unresolved()) { return resolution; } - crsType = SpatialCrsType.fromDataType(spatialExpression.dataType()); + setCrsType(spatialExpression.dataType()); return TypeResolution.TYPE_RESOLVED; } + protected void setCrsType(DataType dataType) { + crsType = SpatialCrsType.fromDataType(dataType); + } + public static TypeResolution isSameSpatialType( DataType spatialDataType, Expression expression, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithin.java index a5ade4cfeb73c..ca285ca07e27b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithin.java @@ -67,8 +67,6 @@ public class SpatialWithin extends SpatialRelatesFunction implements SurrogateEx @FunctionInfo( returnType = { "boolean" }, description = "Returns whether the first geometry is within the second geometry.", - note = "The second parameter must also have the same coordinate system as the first. " - + "This means it is not possible to combine `geo_*` and `cartesian_*` parameters.", examples = @Example(file = "spatial_shapes", tag = "st_within-airport_city_boundaries") ) public SpatialWithin( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java index 4640f1a7168c0..27e3c95bd123a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java @@ -103,6 +103,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvZip; import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialContains; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialDisjoint; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialIntersects; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesFunction; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialWithin; @@ -398,6 +399,7 @@ public static List namedTypeEntries() { of(ScalarFunction.class, StartsWith.class, PlanNamedTypes::writeStartsWith, PlanNamedTypes::readStartsWith), of(ScalarFunction.class, EndsWith.class, PlanNamedTypes::writeEndsWith, PlanNamedTypes::readEndsWith), of(ScalarFunction.class, SpatialIntersects.class, PlanNamedTypes::writeSpatialRelatesFunction, PlanNamedTypes::readIntersects), + of(ScalarFunction.class, SpatialDisjoint.class, PlanNamedTypes::writeSpatialRelatesFunction, PlanNamedTypes::readDisjoint), of(ScalarFunction.class, SpatialContains.class, PlanNamedTypes::writeSpatialRelatesFunction, PlanNamedTypes::readContains), of(ScalarFunction.class, SpatialWithin.class, PlanNamedTypes::writeSpatialRelatesFunction, PlanNamedTypes::readWithin), of(ScalarFunction.class, Substring.class, PlanNamedTypes::writeSubstring, PlanNamedTypes::readSubstring), @@ -1504,6 +1506,10 @@ static SpatialIntersects readIntersects(PlanStreamInput in) throws IOException { return new SpatialIntersects(Source.EMPTY, in.readExpression(), in.readExpression()); } + static SpatialDisjoint readDisjoint(PlanStreamInput in) throws IOException { + return new SpatialDisjoint(Source.EMPTY, in.readExpression(), in.readExpression()); + } + static SpatialContains readContains(PlanStreamInput in) throws IOException { return new SpatialContains(Source.EMPTY, in.readExpression(), in.readExpression()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java index 29f0e04ef2b94..31c967fc3eee8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java @@ -80,6 +80,11 @@ public class EsqlFeatures implements FeatureSpecification { */ private static final NodeFeature ST_CONTAINS_WITHIN = new NodeFeature("esql.st_contains_within"); + /** + * Support for spatial aggregation {@code ST_DISJOINT}. Done in #107007. + */ + private static final NodeFeature ST_DISJOINT = new NodeFeature("esql.st_disjoint"); + /** * The introduction of the {@code VALUES} agg. */ @@ -108,7 +113,8 @@ public Set getFeatures() { SPATIAL_SHAPES, ST_CENTROID, ST_INTERSECTS, - ST_CONTAINS_WITHIN + ST_CONTAINS_WITHIN, + ST_DISJOINT ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java index a16c227f7f277..30cadb3e19dc8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java @@ -12,6 +12,8 @@ import org.apache.lucene.document.XYPointField; import org.apache.lucene.document.XYShape; import org.apache.lucene.geo.XYGeometry; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.IndexOrDocValuesQuery; import org.apache.lucene.search.MatchNoDocsQuery; @@ -24,6 +26,7 @@ import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.GeoShapeQueryable; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.query.ExistsQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.query.SearchExecutionContext; @@ -221,7 +224,16 @@ org.apache.lucene.search.Query buildShapeQuery(SearchExecutionContext context, M } /** - * This code is based on the ShapeQueryPointProcessor.shapeQuery() method + * This code is based on the ShapeQueryPointProcessor.shapeQuery() method, with additional support for two special cases: + *
      + *
    • + * DISJOINT queries (using {@code EXISTS && !INTERSECTS}, similar to {@code LegacyGeoShapeQueryProcessor.geoShapeQuery()}) + *
    • + *
    • + * CONTAINS queries (if the shape is a point, INTERSECTS is used, otherwise a MatchNoDocsQuery is built, + * similar to {@code LatLonPoint.makeContainsGeometryQuery()}) + *
    • + *
    */ private static org.apache.lucene.search.Query pointShapeQuery( Geometry geometry, @@ -231,20 +243,28 @@ private static org.apache.lucene.search.Query pointShapeQuery( ) { final boolean hasDocValues = context.getFieldType(fieldName).hasDocValues(); if (geometry == null || geometry.isEmpty()) { - // Should never be null, but can be an empty geometry - return new MatchNoDocsQuery(); + throw new QueryShardException(context, "Invalid/empty geometry"); } if (geometry.type() != ShapeType.POINT && relation == ShapeField.QueryRelation.CONTAINS) { - // A point field can never contain a non-point geometry - return new MatchNoDocsQuery(); + return new MatchNoDocsQuery("A point field can never contain a non-point geometry"); } final XYGeometry[] luceneGeometries = LuceneGeometriesUtils.toXYGeometry(geometry, t -> {}); - org.apache.lucene.search.Query query = XYPointField.newGeometryQuery(fieldName, luceneGeometries); + org.apache.lucene.search.Query intersects = XYPointField.newGeometryQuery(fieldName, luceneGeometries); + if (relation == ShapeField.QueryRelation.DISJOINT) { + // XYPointField does not support DISJOINT queries, so we build one as EXISTS && !INTERSECTS + BooleanQuery.Builder bool = new BooleanQuery.Builder(); + org.apache.lucene.search.Query exists = ExistsQueryBuilder.newFilter(context, fieldName, false); + bool.add(exists, BooleanClause.Occur.MUST); + bool.add(intersects, BooleanClause.Occur.MUST_NOT); + return bool.build(); + } + + // Point-Intersects works for all cases except CONTAINS(shape) and DISJOINT, which are handled separately above if (hasDocValues) { final org.apache.lucene.search.Query queryDocValues = XYDocValuesField.newSlowGeometryQuery(fieldName, luceneGeometries); - query = new IndexOrDocValuesQuery(query, queryDocValues); + intersects = new IndexOrDocValuesQuery(intersects, queryDocValues); } - return query; + return intersects; } /** @@ -262,8 +282,7 @@ private static org.apache.lucene.search.Query shapeShapeQuery( throw new QueryShardException(context, relation + " query relation not supported for Field [" + fieldName + "]."); } if (geometry == null || geometry.isEmpty()) { - // Should never be null, but can be an empty geometry - return new MatchNoDocsQuery(); + throw new QueryShardException(context, "Invalid/empty geometry"); } final XYGeometry[] luceneGeometries; try { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointTests.java new file mode 100644 index 0000000000000..6e62af7e964f9 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjointTests.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.xpack.esql.expression.function.FunctionName; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.Supplier; + +@FunctionName("st_disjoint") +public class SpatialDisjointTests extends SpatialRelatesFunctionTestCase { + public SpatialDisjointTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + List suppliers = new ArrayList<>(); + DataType[] geoDataTypes = { EsqlDataTypes.GEO_POINT, EsqlDataTypes.GEO_SHAPE }; + SpatialRelatesFunctionTestCase.addSpatialCombinations(suppliers, geoDataTypes); + DataType[] cartesianDataTypes = { EsqlDataTypes.CARTESIAN_POINT, EsqlDataTypes.CARTESIAN_SHAPE }; + SpatialRelatesFunctionTestCase.addSpatialCombinations(suppliers, cartesianDataTypes); + return parameterSuppliersFromTypedData( + errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers), SpatialDisjointTests::typeErrorMessage) + ); + } + + @Override + protected Expression build(Source source, List args) { + return new SpatialDisjoint(source, args.get(0), args.get(1)); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index 180a8ff16f4eb..fb2362851e43c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -43,6 +43,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToGeoPoint; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Round; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialContains; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialDisjoint; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialIntersects; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesFunction; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialWithin; @@ -2933,6 +2934,7 @@ private record TestSpatialRelation(ShapeRelation relation, TestDataSource index, String function() { return switch (relation) { case INTERSECTS -> "ST_INTERSECTS"; + case DISJOINT -> "ST_DISJOINT"; case WITHIN -> "ST_WITHIN"; case CONTAINS -> "ST_CONTAINS"; default -> throw new IllegalArgumentException("Unsupported relation: " + relation); @@ -2942,6 +2944,7 @@ String function() { Class functionClass() { return switch (relation) { case INTERSECTS -> SpatialIntersects.class; + case DISJOINT -> SpatialDisjoint.class; case WITHIN -> literalRight ? SpatialWithin.class : SpatialContains.class; case CONTAINS -> literalRight ? SpatialContains.class : SpatialWithin.class; default -> throw new IllegalArgumentException("Unsupported relation: " + relation); @@ -2975,12 +2978,16 @@ public void testPushDownSpatialRelatesStringToSource() { TestSpatialRelation[] tests = new TestSpatialRelation[] { new TestSpatialRelation(ShapeRelation.INTERSECTS, airports, true, true), new TestSpatialRelation(ShapeRelation.INTERSECTS, airports, false, true), + new TestSpatialRelation(ShapeRelation.DISJOINT, airports, true, true), + new TestSpatialRelation(ShapeRelation.DISJOINT, airports, false, true), new TestSpatialRelation(ShapeRelation.WITHIN, airports, true, true), new TestSpatialRelation(ShapeRelation.WITHIN, airports, false, true), new TestSpatialRelation(ShapeRelation.CONTAINS, airports, true, true), new TestSpatialRelation(ShapeRelation.CONTAINS, airports, false, true), new TestSpatialRelation(ShapeRelation.INTERSECTS, airportsWeb, true, true), new TestSpatialRelation(ShapeRelation.INTERSECTS, airportsWeb, false, true), + new TestSpatialRelation(ShapeRelation.DISJOINT, airportsWeb, true, true), + new TestSpatialRelation(ShapeRelation.DISJOINT, airportsWeb, false, true), new TestSpatialRelation(ShapeRelation.WITHIN, airportsWeb, true, true), new TestSpatialRelation(ShapeRelation.WITHIN, airportsWeb, false, true), new TestSpatialRelation(ShapeRelation.CONTAINS, airportsWeb, true, true), @@ -3027,10 +3034,16 @@ public void testPushDownSpatialRelatesStringToSourceAndUseDocValuesForCentroid() TestSpatialRelation[] tests = new TestSpatialRelation[] { new TestSpatialRelation(ShapeRelation.INTERSECTS, airports, true, true), new TestSpatialRelation(ShapeRelation.INTERSECTS, airports, false, true), + new TestSpatialRelation(ShapeRelation.DISJOINT, airports, true, true), + new TestSpatialRelation(ShapeRelation.DISJOINT, airports, false, true), new TestSpatialRelation(ShapeRelation.WITHIN, airports, true, true), new TestSpatialRelation(ShapeRelation.WITHIN, airports, false, true), new TestSpatialRelation(ShapeRelation.CONTAINS, airports, true, true), new TestSpatialRelation(ShapeRelation.CONTAINS, airports, false, true), + new TestSpatialRelation(ShapeRelation.INTERSECTS, airportsWeb, true, true), + new TestSpatialRelation(ShapeRelation.INTERSECTS, airportsWeb, false, true), + new TestSpatialRelation(ShapeRelation.DISJOINT, airportsWeb, true, true), + new TestSpatialRelation(ShapeRelation.DISJOINT, airportsWeb, false, true), new TestSpatialRelation(ShapeRelation.WITHIN, airportsWeb, true, true), new TestSpatialRelation(ShapeRelation.WITHIN, airportsWeb, false, true), new TestSpatialRelation(ShapeRelation.CONTAINS, airportsWeb, true, true), From f0d445fea6826c7992488b31fa2e629ced0086db Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 8 Apr 2024 11:26:44 +0100 Subject: [PATCH 072/173] Further reduce usage of `SAME` threadpool name (#107096) Updates another couple of test suites that unnecessarily look up `DIRECT_EXECUTOR_SERVICE` from a `ThreadPool`. Relates #106279 --- .../index/shard/IndexShardTests.java | 23 ++++++----- .../TransportServiceLifecycleTests.java | 38 +++++++++++++------ 2 files changed, 37 insertions(+), 24 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 97bf9f4e380fa..c2706a7a3cf22 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -148,7 +148,6 @@ import java.util.Collections; import java.util.HashSet; import java.util.List; -import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.concurrent.BrokenBarrierException; @@ -401,19 +400,19 @@ public void testRunUnderPrimaryPermitDelaysToExecutorWhenBlocked() throws Except indexShard.acquireAllPrimaryOperationsPermits(onAcquired, new TimeValue(Long.MAX_VALUE, TimeUnit.NANOSECONDS)); final Releasable permit = onAcquired.actionGet(); final CountDownLatch latch = new CountDownLatch(1); - final String executorOnDelay = randomFrom( - ThreadPool.Names.FLUSH, - ThreadPool.Names.GENERIC, - ThreadPool.Names.MANAGEMENT, - ThreadPool.Names.SAME - ); + final String expectedThreadPoolName; + final Executor executorOnDelay; + if (randomBoolean()) { + expectedThreadPoolName = ThreadPool.Names.GENERIC; + executorOnDelay = EsExecutors.DIRECT_EXECUTOR_SERVICE; + } else { + expectedThreadPoolName = randomFrom(ThreadPool.Names.FLUSH, ThreadPool.Names.GENERIC, ThreadPool.Names.MANAGEMENT); + executorOnDelay = threadPool.executor(expectedThreadPoolName); + } indexShard.runUnderPrimaryPermit(() -> { - final String expectedThreadPoolName = executorOnDelay.equals(ThreadPool.Names.SAME) - ? "generic" - : executorOnDelay.toLowerCase(Locale.ROOT); - assertThat(Thread.currentThread().getName(), containsString(expectedThreadPoolName)); + assertThat(Thread.currentThread().getName(), containsString('[' + expectedThreadPoolName + ']')); latch.countDown(); - }, e -> fail(e.toString()), threadPool.executor(executorOnDelay)); + }, e -> fail(e.toString()), executorOnDelay); permit.close(); latch.await(); // we could race and assert on the count before the permit is returned diff --git a/server/src/test/java/org/elasticsearch/transport/TransportServiceLifecycleTests.java b/server/src/test/java/org/elasticsearch/transport/TransportServiceLifecycleTests.java index 87fbf113fc1c9..062cc71c9172d 100644 --- a/server/src/test/java/org/elasticsearch/transport/TransportServiceLifecycleTests.java +++ b/server/src/test/java/org/elasticsearch/transport/TransportServiceLifecycleTests.java @@ -72,13 +72,13 @@ public void testHandlersCompleteAtShutdown() throws Exception { while (keepGoing.get() && requestPermits.tryAcquire()) { nodeB.transportService.sendRequest( randomFrom(random, nodeA, nodeB).transportService.getLocalNode(), - TestNode.ACTION_NAME_PREFIX + randomFrom(random, TestNode.EXECUTOR_NAMES), + TestNode.randomActionName(random), TransportRequest.Empty.INSTANCE, new TransportResponseHandler() { final AtomicBoolean completed = new AtomicBoolean(); - final String executor = randomFrom(random, TestNode.EXECUTOR_NAMES); + final Executor executor = nodeB.randomExecutor(); @Override public void handleResponse(TransportResponse.Empty response) { @@ -99,7 +99,7 @@ public TransportResponse.Empty read(StreamInput in) { @Override public Executor executor() { - return nodeB.transportService.getThreadPool().executor(executor); + return executor; } } ); @@ -130,7 +130,7 @@ public void testInternalSendExceptionForksToHandlerExecutor() { final var future = new PlainActionFuture(); nodeA.transportService.sendRequest( nodeA.getThrowingConnection(), - TestNode.ACTION_NAME_PREFIX + randomFrom(TestNode.EXECUTOR_NAMES), + TestNode.randomActionName(random()), new TransportRequest.Empty(), TransportRequestOptions.EMPTY, new ActionListenerResponseHandler<>(future, unusedReader(), deterministicTaskQueue::scheduleNow) @@ -149,7 +149,7 @@ public void testInternalSendExceptionForksToGenericIfHandlerDoesNotFork() { final var future = new PlainActionFuture(); nodeA.transportService.sendRequest( nodeA.getThrowingConnection(), - TestNode.ACTION_NAME_PREFIX + randomFrom(TestNode.EXECUTOR_NAMES), + TestNode.randomActionName(random()), new TransportRequest.Empty(), TransportRequestOptions.EMPTY, new ActionListenerResponseHandler<>(future.delegateResponse((l, e) -> { @@ -178,7 +178,7 @@ public void testInternalSendExceptionForcesExecutionOnHandlerExecutor() { try { nodeA.transportService.sendRequest( nodeA.getThrowingConnection(), - TestNode.ACTION_NAME_PREFIX + randomFrom(TestNode.EXECUTOR_NAMES), + TestNode.randomActionName(random()), new TransportRequest.Empty(), TransportRequestOptions.EMPTY, new ActionListenerResponseHandler<>(future.delegateResponse((l, e) -> { @@ -197,14 +197,14 @@ public void testInternalSendExceptionForcesExecutionOnHandlerExecutor() { public void testInternalSendExceptionCompletesHandlerOnCallingThreadIfTransportServiceClosed() { final var nodeA = new TestNode("node-A"); - final var executor = nodeA.threadPool.executor(randomFrom(TestNode.EXECUTOR_NAMES)); + final var executor = nodeA.randomExecutor(); nodeA.close(); final var testThread = Thread.currentThread(); final var future = new PlainActionFuture(); nodeA.transportService.sendRequest( nodeA.getThrowingConnection(), - TestNode.ACTION_NAME_PREFIX + randomFrom(TestNode.EXECUTOR_NAMES), + TestNode.randomActionName(random()), new TransportRequest.Empty(), TransportRequestOptions.EMPTY, new ActionListenerResponseHandler<>(future.delegateResponse((l, e) -> { @@ -229,6 +229,7 @@ private static E getSendRequestException(Future future, } private static class Executors { + static final String DIRECT = "direct"; static final String SCALING_DROP_ON_SHUTDOWN = "scaling-drop-on-shutdown"; static final String SCALING_REJECT_ON_SHUTDOWN = "scaling-reject-on-shutdown"; static final String FIXED_BOUNDED_QUEUE = "fixed-bounded-queue"; @@ -238,8 +239,9 @@ private static class Executors { private static class TestNode implements Releasable { static final String ACTION_NAME_PREFIX = "internal:test/"; + static final String[] EXECUTOR_NAMES = new String[] { - ThreadPool.Names.SAME, + Executors.DIRECT, Executors.SCALING_DROP_ON_SHUTDOWN, Executors.SCALING_REJECT_ON_SHUTDOWN, Executors.FIXED_BOUNDED_QUEUE, @@ -293,10 +295,10 @@ public ExecutorService executor(String name) { null, emptySet() ); - for (final var executor : EXECUTOR_NAMES) { + for (final var executorName : EXECUTOR_NAMES) { transportService.registerRequestHandler( - ACTION_NAME_PREFIX + executor, - threadPool.executor(executor), + ACTION_NAME_PREFIX + executorName, + getExecutor(executorName), TransportRequest.Empty::new, (request, channel, task) -> { if (randomBoolean()) { @@ -311,6 +313,18 @@ public ExecutorService executor(String name) { transportService.acceptIncomingRequests(); } + Executor getExecutor(String executorName) { + return executorName.equals(Executors.DIRECT) ? EsExecutors.DIRECT_EXECUTOR_SERVICE : threadPool.executor(executorName); + } + + Executor randomExecutor() { + return getExecutor(randomFrom(TestNode.EXECUTOR_NAMES)); + } + + static String randomActionName(Random random) { + return ACTION_NAME_PREFIX + randomFrom(random, EXECUTOR_NAMES); + } + @Override public void close() { transportService.stop(); From cbdd3cde96675e3f1bc4f55bb0fe63afc9ce6618 Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Mon, 8 Apr 2024 12:32:25 +0200 Subject: [PATCH 073/173] Update FROM option preference (#107199) This updates the preference from an imperative `_shards` to a preferential `_local` to make the test more resilient to different sharding scenarios. --- .../esql/qa/testFixtures/src/main/resources/from.csv-spec | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/from.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/from.csv-spec index 11fb0ab532945..c2c0b82f1a664 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/from.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/from.csv-spec @@ -133,7 +133,7 @@ convertFromDatetimeWithOptions required_feature: esql.from_options // tag::convertFromDatetimeWithOptions[] - FROM employees OPTIONS "allow_no_indices"="false","preference"="_shards:0" + FROM employees OPTIONS "allow_no_indices"="false","preference"="_local" | SORT emp_no | EVAL hire_double = to_double(hire_date) | KEEP emp_no, hire_date, hire_double From 54ca10e13044bc8f5363b4b96b02d107ba3d9001 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Mon, 8 Apr 2024 12:40:04 +0200 Subject: [PATCH 074/173] Delete unused response deserialization code (#107181) With the transport client gone, lots of these constructors have become unused. Removing this dead code also allows making a lot of fields final as an added bonus. --- .../mustache/MultiSearchTemplateResponse.java | 21 --------- .../mustache/SearchTemplateResponse.java | 7 --- .../action/PainlessContextAction.java | 6 --- .../action/PainlessExecuteAction.java | 2 +- .../index/rankeval/RankEvalResponse.java | 6 +-- .../ClusterAllocationExplainResponse.java | 2 +- .../cluster/remote/RemoteInfoResponse.java | 6 --- .../cluster/state/ClusterStateResponse.java | 6 +-- .../GetStoredScriptResponse.java | 4 +- .../action/get/MultiGetItemResponse.java | 11 ----- .../action/get/MultiGetResponse.java | 5 --- .../ingest/SimulatePipelineResponse.java | 4 +- .../action/search/ClearScrollResponse.java | 7 --- .../search/ClosePointInTimeResponse.java | 7 --- .../search/OpenPointInTimeResponse.java | 6 --- .../TransportReplicationAction.java | 4 +- .../support/tasks/BaseTasksResponse.java | 4 +- .../termvectors/MultiTermVectorsResponse.java | 5 --- .../index/reindex/BulkByScrollResponse.java | 8 ++-- .../persistent/PersistentTaskResponse.java | 2 +- .../license/GetBasicStatusResponse.java | 2 +- .../license/GetTrialStatusResponse.java | 2 +- .../license/PostStartTrialResponse.java | 6 +-- .../protocol/xpack/XPackInfoResponse.java | 6 +-- .../xpack/graph/GraphExploreResponse.java | 37 ---------------- .../xpack/watcher/DeleteWatchResponse.java | 8 ---- .../xpack/watcher/PutWatchResponse.java | 43 +++---------------- .../core/action/XPackInfoFeatureResponse.java | 8 +--- .../action/XPackUsageFeatureResponse.java | 2 +- .../core/ilm/ExplainLifecycleResponse.java | 2 +- .../core/ilm/action/GetLifecycleAction.java | 2 +- .../core/ilm/action/GetStatusAction.java | 2 +- .../RemoveIndexLifecyclePolicyAction.java | 2 +- .../ml/action/EvaluateDataFrameAction.java | 4 +- .../xpack/core/ml/action/MlInfoAction.java | 7 +-- .../ml/action/PostCalendarEventsAction.java | 7 +-- .../core/ml/action/PreviewDatafeedAction.java | 5 --- .../core/ml/action/PutCalendarAction.java | 7 +-- .../action/PutDataFrameAnalyticsAction.java | 4 +- .../core/ml/action/PutDatafeedAction.java | 2 +- .../xpack/core/ml/action/PutFilterAction.java | 4 +- .../DelegatePkiAuthenticationResponse.java | 6 +-- .../action/apikey/UpdateApiKeyResponse.java | 6 --- .../OpenIdConnectAuthenticateResponse.java | 22 +++------- .../oidc/OpenIdConnectLogoutResponse.java | 8 +--- ...dConnectPrepareAuthenticationResponse.java | 19 ++------ .../privilege/GetPrivilegesResponse.java | 2 +- .../profile/ActivateProfileResponse.java | 6 --- .../action/profile/GetProfilesResponse.java | 7 --- .../profile/SuggestProfilesResponse.java | 7 --- .../security/action/role/PutRoleResponse.java | 8 +--- .../DeleteRoleMappingResponse.java | 6 --- .../rolemapping/GetRoleMappingsResponse.java | 12 +----- .../rolemapping/PutRoleMappingResponse.java | 8 +--- .../action/saml/SamlAuthenticateResponse.java | 27 +++--------- .../saml/SamlInvalidateSessionResponse.java | 14 ++---- .../action/saml/SamlLogoutResponse.java | 7 --- .../SamlPrepareAuthenticationResponse.java | 12 ++---- .../action/saml/SamlSpMetadataResponse.java | 8 +--- .../DeleteServiceAccountTokenResponse.java | 2 +- .../action/token/CreateTokenResponse.java | 12 +++--- .../action/user/DeleteUserResponse.java | 8 +--- .../action/user/GetUsersResponse.java | 26 ----------- .../action/user/HasPrivilegesResponse.java | 10 ++--- .../user/ProfileHasPrivilegesResponse.java | 2 +- .../security/action/user/PutUserResponse.java | 8 +--- .../ssl/action/GetCertificateInfoAction.java | 12 +----- .../actions/ack/AckWatchResponse.java | 8 +--- .../activate/ActivateWatchResponse.java | 8 +--- .../actions/execute/ExecuteWatchResponse.java | 7 --- .../actions/get/GetWatchResponse.java | 34 +++------------ .../rules/action/GetQueryRulesetAction.java | 1 - .../xpack/fleet/action/GetSecretResponse.java | 2 +- .../SamlInitiateSingleSignOnResponse.java | 20 +++------ .../idp/action/SamlMetadataResponse.java | 6 --- .../SamlValidateAuthnRequestResponse.java | 9 ---- .../sql/action/SqlTranslateResponse.java | 2 +- 77 files changed, 108 insertions(+), 539 deletions(-) diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponse.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponse.java index b867fcfb905ea..11d060002955a 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponse.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponse.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Iterators; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.AbstractRefCounted; @@ -38,16 +37,6 @@ public static class Item implements Writeable { private final SearchTemplateResponse response; private final Exception exception; - private Item(StreamInput in) throws IOException { - if (in.readBoolean()) { - this.response = new SearchTemplateResponse(in); - this.exception = null; - } else { - exception = in.readException(); - this.response = null; - } - } - public Item(SearchTemplateResponse response, Exception exception) { this.response = response; this.exception = exception; @@ -114,16 +103,6 @@ protected void closeInternal() { } }); - MultiSearchTemplateResponse(StreamInput in) throws IOException { - super(in); - items = in.readArray(Item::new, Item[]::new); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_0_0)) { - tookInMillis = in.readVLong(); - } else { - tookInMillis = -1L; - } - } - MultiSearchTemplateResponse(Item[] items, long tookInMillis) { this.items = items; this.tookInMillis = tookInMillis; diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java index 39da4066a7859..2e62f6e9c96f4 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.core.AbstractRefCounted; @@ -46,12 +45,6 @@ protected void closeInternal() { SearchTemplateResponse() {} - SearchTemplateResponse(StreamInput in) throws IOException { - super(in); - source = in.readOptionalBytesReference(); - response = in.readOptionalWriteable(SearchResponse::new); - } - public BytesReference getSource() { return source; } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessContextAction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessContextAction.java index 6d88ff1e8db6a..3c183830afa6d 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessContextAction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessContextAction.java @@ -108,12 +108,6 @@ public Response(List scriptContextNames, PainlessContextInfo painlessCon this.painlessContextInfo = painlessContextInfo; } - public Response(StreamInput in) throws IOException { - super(in); - scriptContextNames = in.readStringCollectionAsList(); - painlessContextInfo = in.readOptionalWriteable(PainlessContextInfo::new); - } - @Override public void writeTo(StreamOutput out) throws IOException { out.writeStringCollection(scriptContextNames); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessExecuteAction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessExecuteAction.java index 7f5f1fe4f84ea..6ab5fc724c711 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessExecuteAction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessExecuteAction.java @@ -462,7 +462,7 @@ static boolean needDocumentAndIndex(ScriptContext scriptContext) { public static class Response extends ActionResponse implements ToXContentObject { - private Object result; + private final Object result; Response(Object result) { this.result = result; diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalResponse.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalResponse.java index 061d8292b3e5f..fe6da7fe1ce68 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalResponse.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalResponse.java @@ -30,11 +30,11 @@ public class RankEvalResponse extends ActionResponse implements ToXContentObject { /** The overall evaluation result. */ - private double metricScore; + private final double metricScore; /** details about individual ranking evaluation queries, keyed by their id */ - private Map details; + private final Map details; /** exceptions for specific ranking evaluation queries, keyed by their id */ - private Map failures; + private final Map failures; public RankEvalResponse(double metricScore, Map partialResults, Map failures) { this.metricScore = metricScore; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainResponse.java index 39baf25f5dada..92413fc104be4 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainResponse.java @@ -22,7 +22,7 @@ */ public class ClusterAllocationExplainResponse extends ActionResponse implements ChunkedToXContentObject { - private ClusterAllocationExplanation cae; + private final ClusterAllocationExplanation cae; public ClusterAllocationExplainResponse(StreamInput in) throws IOException { super(in); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/RemoteInfoResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/RemoteInfoResponse.java index 89e1ede46d9a8..7a19e7b277a08 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/RemoteInfoResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/RemoteInfoResponse.java @@ -9,7 +9,6 @@ package org.elasticsearch.action.admin.cluster.remote; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.transport.RemoteConnectionInfo; import org.elasticsearch.xcontent.ToXContentObject; @@ -23,11 +22,6 @@ public final class RemoteInfoResponse extends ActionResponse implements ToXConte private final List infos; - RemoteInfoResponse(StreamInput in) throws IOException { - super(in); - infos = in.readCollectionAsImmutableList(RemoteConnectionInfo::new); - } - public RemoteInfoResponse(Collection infos) { this.infos = List.copyOf(infos); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/state/ClusterStateResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/state/ClusterStateResponse.java index f4cc4e2f8f5d2..fca9121a3a858 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/state/ClusterStateResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/state/ClusterStateResponse.java @@ -23,9 +23,9 @@ */ public class ClusterStateResponse extends ActionResponse { - private ClusterName clusterName; - private ClusterState clusterState; - private boolean waitForTimedOut = false; + private final ClusterName clusterName; + private final ClusterState clusterState; + private final boolean waitForTimedOut; public ClusterStateResponse(StreamInput in) throws IOException { super(in); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponse.java index 24604a3977096..b33a718daee8a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponse.java @@ -26,8 +26,8 @@ public class GetStoredScriptResponse extends ActionResponse implements ToXConten public static final ParseField FOUND_PARSE_FIELD = new ParseField("found"); public static final ParseField SCRIPT = new ParseField("script"); - private String id; - private StoredScriptSource source; + private final String id; + private final StoredScriptSource source; public GetStoredScriptResponse(StreamInput in) throws IOException { super(in); diff --git a/server/src/main/java/org/elasticsearch/action/get/MultiGetItemResponse.java b/server/src/main/java/org/elasticsearch/action/get/MultiGetItemResponse.java index 9e755cfd7f081..b10471e75fcf8 100644 --- a/server/src/main/java/org/elasticsearch/action/get/MultiGetItemResponse.java +++ b/server/src/main/java/org/elasticsearch/action/get/MultiGetItemResponse.java @@ -8,7 +8,6 @@ package org.elasticsearch.action.get; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -27,16 +26,6 @@ public MultiGetItemResponse(GetResponse response, MultiGetResponse.Failure failu this.failure = failure; } - MultiGetItemResponse(StreamInput in) throws IOException { - if (in.readBoolean()) { - failure = new MultiGetResponse.Failure(in); - response = null; - } else { - response = new GetResponse(in); - failure = null; - } - } - /** * The index name of the document. */ diff --git a/server/src/main/java/org/elasticsearch/action/get/MultiGetResponse.java b/server/src/main/java/org/elasticsearch/action/get/MultiGetResponse.java index 4f548e227dcfb..3306ac874243c 100644 --- a/server/src/main/java/org/elasticsearch/action/get/MultiGetResponse.java +++ b/server/src/main/java/org/elasticsearch/action/get/MultiGetResponse.java @@ -109,11 +109,6 @@ public MultiGetResponse(MultiGetItemResponse[] responses) { this.responses = responses; } - MultiGetResponse(StreamInput in) throws IOException { - super(in); - responses = in.readArray(MultiGetItemResponse::new, MultiGetItemResponse[]::new); - } - public MultiGetItemResponse[] getResponses() { return this.responses; } diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineResponse.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineResponse.java index 396a5b63b3cd5..e7ad434e3ad7c 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineResponse.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineResponse.java @@ -20,9 +20,9 @@ import java.util.List; public class SimulatePipelineResponse extends ActionResponse implements ToXContentObject { - private String pipelineId; + private final String pipelineId; private boolean verbose; - private List results; + private final List results; public SimulatePipelineResponse(StreamInput in) throws IOException { super(in); diff --git a/server/src/main/java/org/elasticsearch/action/search/ClearScrollResponse.java b/server/src/main/java/org/elasticsearch/action/search/ClearScrollResponse.java index 3d00d18565756..de0d7b98ef851 100644 --- a/server/src/main/java/org/elasticsearch/action/search/ClearScrollResponse.java +++ b/server/src/main/java/org/elasticsearch/action/search/ClearScrollResponse.java @@ -9,7 +9,6 @@ package org.elasticsearch.action.search; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.ParseField; @@ -34,12 +33,6 @@ public ClearScrollResponse(boolean succeeded, int numFreed) { this.numFreed = numFreed; } - public ClearScrollResponse(StreamInput in) throws IOException { - super(in); - succeeded = in.readBoolean(); - numFreed = in.readVInt(); - } - /** * @return Whether the attempt to clear a scroll was successful. */ diff --git a/server/src/main/java/org/elasticsearch/action/search/ClosePointInTimeResponse.java b/server/src/main/java/org/elasticsearch/action/search/ClosePointInTimeResponse.java index d8cbfa53ee8ca..09cd96289416f 100644 --- a/server/src/main/java/org/elasticsearch/action/search/ClosePointInTimeResponse.java +++ b/server/src/main/java/org/elasticsearch/action/search/ClosePointInTimeResponse.java @@ -8,11 +8,8 @@ package org.elasticsearch.action.search; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.rest.RestStatus; -import java.io.IOException; - import static org.elasticsearch.rest.RestStatus.NOT_FOUND; import static org.elasticsearch.rest.RestStatus.OK; @@ -21,10 +18,6 @@ public ClosePointInTimeResponse(boolean succeeded, int numFreed) { super(succeeded, numFreed); } - public ClosePointInTimeResponse(StreamInput in) throws IOException { - super(in); - } - @Override public RestStatus status() { if (isSucceeded() || getNumFreed() > 0) { diff --git a/server/src/main/java/org/elasticsearch/action/search/OpenPointInTimeResponse.java b/server/src/main/java/org/elasticsearch/action/search/OpenPointInTimeResponse.java index 82cb158a0c59a..fd565ad4878bf 100644 --- a/server/src/main/java/org/elasticsearch/action/search/OpenPointInTimeResponse.java +++ b/server/src/main/java/org/elasticsearch/action/search/OpenPointInTimeResponse.java @@ -9,7 +9,6 @@ package org.elasticsearch.action.search; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -24,11 +23,6 @@ public OpenPointInTimeResponse(String pointInTimeId) { this.pointInTimeId = Objects.requireNonNull(pointInTimeId, "Point in time parameter must be not null"); } - public OpenPointInTimeResponse(StreamInput in) throws IOException { - super(in); - pointInTimeId = in.readString(); - } - @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(pointInTimeId); diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java b/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java index d7ff0359bfd27..ac5b004886319 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java @@ -1183,8 +1183,8 @@ public PendingReplicationActions getPendingReplicationActions() { } public static class ReplicaResponse extends ActionResponse implements ReplicationOperation.ReplicaResponse { - private long localCheckpoint; - private long globalCheckpoint; + private final long localCheckpoint; + private final long globalCheckpoint; ReplicaResponse(StreamInput in) throws IOException { super(in); diff --git a/server/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksResponse.java b/server/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksResponse.java index 3e8290ad4fb4a..0aa565c87b4cd 100644 --- a/server/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksResponse.java +++ b/server/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksResponse.java @@ -35,8 +35,8 @@ public class BaseTasksResponse extends ActionResponse { public static final String TASK_FAILURES = "task_failures"; public static final String NODE_FAILURES = "node_failures"; - private List taskFailures; - private List nodeFailures; + private final List taskFailures; + private final List nodeFailures; public BaseTasksResponse(List taskFailures, List nodeFailures) { this.taskFailures = taskFailures == null ? Collections.emptyList() : List.copyOf(taskFailures); diff --git a/server/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsResponse.java b/server/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsResponse.java index b631d30cfd8bb..5789c4910db09 100644 --- a/server/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsResponse.java @@ -91,11 +91,6 @@ public MultiTermVectorsResponse(MultiTermVectorsItemResponse[] responses) { this.responses = responses; } - public MultiTermVectorsResponse(StreamInput in) throws IOException { - super(in); - responses = in.readArray(MultiTermVectorsItemResponse::new, MultiTermVectorsItemResponse[]::new); - } - public MultiTermVectorsItemResponse[] getResponses() { return this.responses; } diff --git a/server/src/main/java/org/elasticsearch/index/reindex/BulkByScrollResponse.java b/server/src/main/java/org/elasticsearch/index/reindex/BulkByScrollResponse.java index 5bdeac75989a8..87173aceaa059 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/BulkByScrollResponse.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/BulkByScrollResponse.java @@ -30,10 +30,10 @@ * Response used for actions that index many documents using a scroll request. */ public class BulkByScrollResponse extends ActionResponse implements ToXContentFragment { - private TimeValue took; - private BulkByScrollTask.Status status; - private List bulkFailures; - private List searchFailures; + private final TimeValue took; + private final BulkByScrollTask.Status status; + private final List bulkFailures; + private final List searchFailures; private boolean timedOut; static final String TOOK_FIELD = "took"; diff --git a/server/src/main/java/org/elasticsearch/persistent/PersistentTaskResponse.java b/server/src/main/java/org/elasticsearch/persistent/PersistentTaskResponse.java index 3560f3f28076d..fca6a9b2dde7d 100644 --- a/server/src/main/java/org/elasticsearch/persistent/PersistentTaskResponse.java +++ b/server/src/main/java/org/elasticsearch/persistent/PersistentTaskResponse.java @@ -19,7 +19,7 @@ * Response upon a successful start or an persistent task */ public class PersistentTaskResponse extends ActionResponse { - private PersistentTask task; + private final PersistentTask task; public PersistentTaskResponse(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetBasicStatusResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetBasicStatusResponse.java index 58e7be10cfa81..b0a162eb3ec14 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetBasicStatusResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetBasicStatusResponse.java @@ -17,7 +17,7 @@ public class GetBasicStatusResponse extends ActionResponse implements ToXContentObject { - private boolean eligibleToStartBasic; + private final boolean eligibleToStartBasic; GetBasicStatusResponse(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusResponse.java index 9126d22f33250..5bceab8edbe92 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusResponse.java @@ -17,7 +17,7 @@ public class GetTrialStatusResponse extends ActionResponse implements ToXContentObject { - private boolean eligibleToStartTrial; + private final boolean eligibleToStartTrial; GetTrialStatusResponse(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartTrialResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartTrialResponse.java index 853c3d39e4121..87b49f3ef9e82 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartTrialResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartTrialResponse.java @@ -48,9 +48,9 @@ RestStatus getRestStatus() { } - private Status status; - private Map acknowledgeMessages; - private String acknowledgeMessage; + private final Status status; + private final Map acknowledgeMessages; + private final String acknowledgeMessage; PostStartTrialResponse(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java index 34126064997d6..5ba0e584d63bb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java @@ -38,11 +38,11 @@ public class XPackInfoResponse extends ActionResponse implements ToXContentObjec // TODO move this constant to License.java once we move License.java to the protocol jar @Nullable - private BuildInfo buildInfo; + private final BuildInfo buildInfo; @Nullable - private LicenseInfo licenseInfo; + private final LicenseInfo licenseInfo; @Nullable - private FeatureSetsInfo featureSetsInfo; + private final FeatureSetsInfo featureSetsInfo; public XPackInfoResponse(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/GraphExploreResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/GraphExploreResponse.java index 5bf5ecb445c57..ce872b1e406ac 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/GraphExploreResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/GraphExploreResponse.java @@ -9,7 +9,6 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.search.ShardSearchFailure; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.TimeValue; import org.elasticsearch.protocol.xpack.graph.Connection.ConnectionId; @@ -23,8 +22,6 @@ import java.util.HashMap; import java.util.Map; -import static org.elasticsearch.action.search.ShardSearchFailure.readShardSearchFailure; - /** * Graph explore response holds a graph of {@link Vertex} and {@link Connection} objects * (nodes and edges in common graph parlance). @@ -43,40 +40,6 @@ public class GraphExploreResponse extends ActionResponse implements ToXContentOb public GraphExploreResponse() {} - public GraphExploreResponse(StreamInput in) throws IOException { - super(in); - tookInMillis = in.readVLong(); - timedOut = in.readBoolean(); - - int size = in.readVInt(); - if (size == 0) { - shardFailures = ShardSearchFailure.EMPTY_ARRAY; - } else { - shardFailures = new ShardSearchFailure[size]; - for (int i = 0; i < shardFailures.length; i++) { - shardFailures[i] = readShardSearchFailure(in); - } - } - // read vertices - size = in.readVInt(); - vertices = new HashMap<>(); - for (int i = 0; i < size; i++) { - Vertex n = Vertex.readFrom(in); - vertices.put(n.getId(), n); - } - - size = in.readVInt(); - - connections = new HashMap<>(); - for (int i = 0; i < size; i++) { - Connection e = new Connection(in, vertices); - connections.put(e.getId(), e); - } - - returnDetailedInfo = in.readBoolean(); - - } - public GraphExploreResponse( long tookInMillis, boolean timedOut, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponse.java index c6d673aec7d2a..ffeb0867723e7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.protocol.xpack.watcher; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -29,13 +28,6 @@ public DeleteWatchResponse(String id, long version, boolean found) { this.found = found; } - public DeleteWatchResponse(StreamInput in) throws IOException { - super(in); - id = in.readString(); - version = in.readVLong(); - found = in.readBoolean(); - } - public String getId() { return id; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/PutWatchResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/PutWatchResponse.java index 5c1f53bef3ef0..3bc3ebbd6f6f7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/PutWatchResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/PutWatchResponse.java @@ -7,9 +7,7 @@ package org.elasticsearch.protocol.xpack.watcher; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -18,22 +16,11 @@ public class PutWatchResponse extends ActionResponse implements ToXContentObject { - private String id; - private long version; - private long seqNo = SequenceNumbers.UNASSIGNED_SEQ_NO; - private long primaryTerm = SequenceNumbers.UNASSIGNED_PRIMARY_TERM; - private boolean created; - - public PutWatchResponse() {} - - public PutWatchResponse(StreamInput in) throws IOException { - super(in); - id = in.readString(); - version = in.readVLong(); - seqNo = in.readZLong(); - primaryTerm = in.readVLong(); - created = in.readBoolean(); - } + private final String id; + private final long version; + private final long seqNo; + private final long primaryTerm; + private final boolean created; public PutWatchResponse(String id, long version, long seqNo, long primaryTerm, boolean created) { this.id = id; @@ -43,26 +30,6 @@ public PutWatchResponse(String id, long version, long seqNo, long primaryTerm, b this.created = created; } - private void setId(String id) { - this.id = id; - } - - private void setVersion(long version) { - this.version = version; - } - - private void setSeqNo(long seqNo) { - this.seqNo = seqNo; - } - - private void setPrimaryTerm(long primaryTerm) { - this.primaryTerm = primaryTerm; - } - - private void setCreated(boolean created) { - this.created = created; - } - public String getId() { return id; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoFeatureResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoFeatureResponse.java index 5d066a4dc6c50..66037054da685 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoFeatureResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoFeatureResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.action; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.protocol.xpack.XPackInfoResponse.FeatureSetsInfo.FeatureSet; @@ -15,12 +14,7 @@ public class XPackInfoFeatureResponse extends ActionResponse { - private FeatureSet info; - - public XPackInfoFeatureResponse(StreamInput in) throws IOException { - super(in); - info = new FeatureSet(in); - } + private final FeatureSet info; public XPackInfoFeatureResponse(FeatureSet info) { this.info = info; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageFeatureResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageFeatureResponse.java index 33dec1371dc86..71bb9993f3a29 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageFeatureResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageFeatureResponse.java @@ -15,7 +15,7 @@ public class XPackUsageFeatureResponse extends ActionResponse { - private XPackFeatureSet.Usage usage; + private final XPackFeatureSet.Usage usage; public XPackUsageFeatureResponse(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ExplainLifecycleResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ExplainLifecycleResponse.java index 97c7d6d8cb60d..755851b2ec88c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ExplainLifecycleResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ExplainLifecycleResponse.java @@ -30,7 +30,7 @@ public class ExplainLifecycleResponse extends ActionResponse implements ToXConte public static final ParseField INDICES_FIELD = new ParseField("indices"); - private Map indexResponses; + private final Map indexResponses; public ExplainLifecycleResponse(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/GetLifecycleAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/GetLifecycleAction.java index 97d1fbf524963..d40220db794b7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/GetLifecycleAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/GetLifecycleAction.java @@ -40,7 +40,7 @@ protected GetLifecycleAction() { public static class Response extends ActionResponse implements ChunkedToXContentObject { - private List policies; + private final List policies; public Response(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/GetStatusAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/GetStatusAction.java index f70510de382a9..c3022adb2f60a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/GetStatusAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/GetStatusAction.java @@ -29,7 +29,7 @@ protected GetStatusAction() { public static class Response extends ActionResponse implements ToXContentObject { - private OperationMode mode; + private final OperationMode mode; public Response(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/RemoveIndexLifecyclePolicyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/RemoveIndexLifecyclePolicyAction.java index 68537fba3bfd1..e1171d9ab7dd3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/RemoveIndexLifecyclePolicyAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/RemoveIndexLifecyclePolicyAction.java @@ -47,7 +47,7 @@ public static class Response extends ActionResponse implements ToXContentObject PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), HAS_FAILURES_FIELD); } - private List failedIndexes; + private final List failedIndexes; public Response(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/EvaluateDataFrameAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/EvaluateDataFrameAction.java index 298b6e71fc855..9a350c3c68adb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/EvaluateDataFrameAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/EvaluateDataFrameAction.java @@ -188,8 +188,8 @@ public Task createTask(long id, String type, String action, TaskId parentTaskId, public static class Response extends ActionResponse implements ToXContentObject { - private String evaluationName; - private List metrics; + private final String evaluationName; + private final List metrics; public Response(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlInfoAction.java index 1f1eb69ce606c..94bb7047bfe23 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlInfoAction.java @@ -47,7 +47,7 @@ public ActionRequestValidationException validate() { public static class Response extends ActionResponse implements ToXContentObject { - private Map info; + private final Map info; public Response(Map info) { this.info = info; @@ -57,11 +57,6 @@ public Response() { this.info = Collections.emptyMap(); } - public Response(StreamInput in) throws IOException { - super(in); - info = in.readGenericMap(); - } - public Map getInfo() { return info; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PostCalendarEventsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PostCalendarEventsAction.java index 6ca201fd8034a..d58b699fb6555 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PostCalendarEventsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PostCalendarEventsAction.java @@ -120,12 +120,7 @@ public boolean equals(Object obj) { public static class Response extends ActionResponse implements ToXContentObject { - private List scheduledEvents; - - public Response(StreamInput in) throws IOException { - super(in); - in.readCollectionAsList(ScheduledEvent::new); - } + private final List scheduledEvents; public Response(List scheduledEvents) { this.scheduledEvents = scheduledEvents; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PreviewDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PreviewDatafeedAction.java index d03a6d5c0c7c5..7988f885a27da 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PreviewDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PreviewDatafeedAction.java @@ -298,11 +298,6 @@ public static class Response extends ActionResponse implements ToXContentObject private final BytesReference preview; - public Response(StreamInput in) throws IOException { - super(in); - preview = in.readBytesReference(); - } - public Response(BytesReference preview) { this.preview = preview; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutCalendarAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutCalendarAction.java index 755e610c4000c..eab89bb86022a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutCalendarAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutCalendarAction.java @@ -117,12 +117,7 @@ public boolean equals(Object obj) { public static class Response extends ActionResponse implements ToXContentObject { - private Calendar calendar; - - public Response(StreamInput in) throws IOException { - super(in); - calendar = new Calendar(in); - } + private final Calendar calendar; public Response(Calendar calendar) { this.calendar = calendar; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDataFrameAnalyticsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDataFrameAnalyticsAction.java index c9da8aa4dd579..fe26cdb0377fd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDataFrameAnalyticsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDataFrameAnalyticsAction.java @@ -156,14 +156,12 @@ public int hashCode() { public static class Response extends ActionResponse implements ToXContentObject { - private DataFrameAnalyticsConfig config; + private final DataFrameAnalyticsConfig config; public Response(DataFrameAnalyticsConfig config) { this.config = config; } - Response() {} - public Response(StreamInput in) throws IOException { super(in); config = new DataFrameAnalyticsConfig(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedAction.java index 67b1b2f9087e3..c234b24be5a9c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedAction.java @@ -83,7 +83,7 @@ public int hashCode() { public static class Response extends ActionResponse implements ToXContentObject { - private DatafeedConfig datafeed; + private final DatafeedConfig datafeed; public Response(DatafeedConfig datafeed) { this.datafeed = datafeed; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutFilterAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutFilterAction.java index 50216b72f20d6..5b8dae53840b0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutFilterAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutFilterAction.java @@ -99,9 +99,7 @@ public boolean equals(Object obj) { public static class Response extends ActionResponse implements ToXContentObject { - private MlFilter filter; - - Response() {} + private final MlFilter filter; Response(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/DelegatePkiAuthenticationResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/DelegatePkiAuthenticationResponse.java index dd356b8ab41ff..a5c8e10496b3b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/DelegatePkiAuthenticationResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/DelegatePkiAuthenticationResponse.java @@ -30,12 +30,10 @@ public final class DelegatePkiAuthenticationResponse extends ActionResponse impl private static final ParseField EXPIRES_IN_FIELD = new ParseField("expires_in"); private static final ParseField AUTHENTICATION = new ParseField("authentication"); - private String accessToken; - private TimeValue expiresIn; + private final String accessToken; + private final TimeValue expiresIn; private Authentication authentication; - DelegatePkiAuthenticationResponse() {} - public DelegatePkiAuthenticationResponse(String accessToken, TimeValue expiresIn, Authentication authentication) { this.accessToken = Objects.requireNonNull(accessToken); // always store expiration in seconds because this is how we "serialize" to JSON and we need to parse back diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyResponse.java index a1ed1c6092df8..334b395a05b31 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyResponse.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.core.security.action.apikey; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.xcontent.ToXContentObject; @@ -24,11 +23,6 @@ public UpdateApiKeyResponse(boolean updated) { this.updated = updated; } - public UpdateApiKeyResponse(StreamInput in) throws IOException { - super(in); - this.updated = in.readBoolean(); - } - public boolean isUpdated() { return updated; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateResponse.java index 936a2892a6dbe..92b27826e8759 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateResponse.java @@ -8,7 +8,6 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xpack.core.security.authc.Authentication; @@ -16,11 +15,11 @@ import java.io.IOException; public class OpenIdConnectAuthenticateResponse extends ActionResponse { - private String principal; - private String accessTokenString; - private String refreshTokenString; - private TimeValue expiresIn; - private Authentication authentication; + private final String principal; + private final String accessTokenString; + private final String refreshTokenString; + private final TimeValue expiresIn; + private final Authentication authentication; public OpenIdConnectAuthenticateResponse( Authentication authentication, @@ -36,17 +35,6 @@ public OpenIdConnectAuthenticateResponse( this.authentication = authentication; } - public OpenIdConnectAuthenticateResponse(StreamInput in) throws IOException { - super(in); - principal = in.readString(); - accessTokenString = in.readString(); - refreshTokenString = in.readString(); - expiresIn = in.readTimeValue(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_11_0)) { - authentication = new Authentication(in); - } - } - public String getPrincipal() { return principal; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectLogoutResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectLogoutResponse.java index 3dbfccf418c10..681ba15896778 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectLogoutResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectLogoutResponse.java @@ -7,19 +7,13 @@ package org.elasticsearch.xpack.core.security.action.oidc; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; public final class OpenIdConnectLogoutResponse extends ActionResponse { - private String endSessionUrl; - - public OpenIdConnectLogoutResponse(StreamInput in) throws IOException { - super(in); - this.endSessionUrl = in.readString(); - } + private final String endSessionUrl; public OpenIdConnectLogoutResponse(String endSessionUrl) { this.endSessionUrl = endSessionUrl; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationResponse.java index 88d8de80fe7a1..5dcfadd3dd01c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationResponse.java @@ -8,7 +8,6 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -21,19 +20,19 @@ */ public class OpenIdConnectPrepareAuthenticationResponse extends ActionResponse implements ToXContentObject { - private String authenticationRequestUrl; + private final String authenticationRequestUrl; /* * The oAuth2 state parameter used for CSRF protection. */ - private String state; + private final String state; /* * String value used to associate a Client session with an ID Token, and to mitigate replay attacks. */ - private String nonce; + private final String nonce; /* * String value: name of the realm used to perform authentication. */ - private String realmName; + private final String realmName; public OpenIdConnectPrepareAuthenticationResponse(String authorizationEndpointUrl, String state, String nonce, String realmName) { this.authenticationRequestUrl = authorizationEndpointUrl; @@ -42,16 +41,6 @@ public OpenIdConnectPrepareAuthenticationResponse(String authorizationEndpointUr this.realmName = realmName; } - public OpenIdConnectPrepareAuthenticationResponse(StreamInput in) throws IOException { - super(in); - authenticationRequestUrl = in.readString(); - state = in.readString(); - nonce = in.readString(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_11_0)) { - realmName = in.readString(); - } - } - public String getAuthenticationRequestUrl() { return authenticationRequestUrl; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/GetPrivilegesResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/GetPrivilegesResponse.java index 5f8755ef0c0da..7db9b26cfaa60 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/GetPrivilegesResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/GetPrivilegesResponse.java @@ -20,7 +20,7 @@ */ public final class GetPrivilegesResponse extends ActionResponse { - private ApplicationPrivilegeDescriptor[] privileges; + private final ApplicationPrivilegeDescriptor[] privileges; public GetPrivilegesResponse(ApplicationPrivilegeDescriptor... privileges) { this.privileges = Objects.requireNonNull(privileges, "Application privileges cannot be null"); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/ActivateProfileResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/ActivateProfileResponse.java index 8b8f905e59cbf..9d031d7c9065b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/ActivateProfileResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/ActivateProfileResponse.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.core.security.action.profile; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -23,11 +22,6 @@ public ActivateProfileResponse(Profile profile) { this.profile = profile; } - public ActivateProfileResponse(StreamInput in) throws IOException { - super(in); - this.profile = new Profile(in); - } - public Profile getProfile() { return profile; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/GetProfilesResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/GetProfilesResponse.java index 2dbf6743a5fde..77a411ad477f6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/GetProfilesResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/GetProfilesResponse.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.core.security.action.profile; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -29,12 +28,6 @@ public GetProfilesResponse(List profiles, Map errors this.errors = Objects.requireNonNull(errors); } - public GetProfilesResponse(StreamInput in) throws IOException { - super(in); - this.profiles = in.readCollectionAsImmutableList(Profile::new); - this.errors = in.readMap(StreamInput::readException); - } - public List getProfiles() { return profiles; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SuggestProfilesResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SuggestProfilesResponse.java index 0574bb4b100a5..6eaeb4f02ac7f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SuggestProfilesResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SuggestProfilesResponse.java @@ -30,13 +30,6 @@ public SuggestProfilesResponse(ProfileHit[] profileHits, long tookInMillis, Tota this.totalHits = totalHits; } - public SuggestProfilesResponse(StreamInput in) throws IOException { - super(in); - this.profileHits = in.readArray(ProfileHit::new, ProfileHit[]::new); - this.tookInMillis = in.readVLong(); - this.totalHits = Lucene.readTotalHits(in); - } - public ProfileHit[] getProfileHits() { return profileHits; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleResponse.java index 42b672cca6ad8..807c55643b425 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.security.action.role; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -20,12 +19,7 @@ */ public class PutRoleResponse extends ActionResponse implements ToXContentObject { - private boolean created; - - public PutRoleResponse(StreamInput in) throws IOException { - super(in); - this.created = in.readBoolean(); - } + private final boolean created; public PutRoleResponse(boolean created) { this.created = created; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/DeleteRoleMappingResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/DeleteRoleMappingResponse.java index 12393213fa740..87e7f3785015f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/DeleteRoleMappingResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/DeleteRoleMappingResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.security.action.rolemapping; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -22,11 +21,6 @@ public class DeleteRoleMappingResponse extends ActionResponse implements ToXCont private boolean found = false; - public DeleteRoleMappingResponse(StreamInput in) throws IOException { - super(in); - found = in.readBoolean(); - } - public DeleteRoleMappingResponse(boolean found) { this.found = found; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/GetRoleMappingsResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/GetRoleMappingsResponse.java index 51689af1d7bc6..13a751829797f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/GetRoleMappingsResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/GetRoleMappingsResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.security.action.rolemapping; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; @@ -20,16 +19,7 @@ */ public class GetRoleMappingsResponse extends ActionResponse { - private ExpressionRoleMapping[] mappings; - - public GetRoleMappingsResponse(StreamInput in) throws IOException { - super(in); - int size = in.readVInt(); - mappings = new ExpressionRoleMapping[size]; - for (int i = 0; i < size; i++) { - mappings[i] = new ExpressionRoleMapping(in); - } - } + private final ExpressionRoleMapping[] mappings; public GetRoleMappingsResponse(ExpressionRoleMapping... mappings) { this.mappings = mappings; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingResponse.java index d04b0bbe1195f..5a80736dab66d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.security.action.rolemapping; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -21,12 +20,7 @@ */ public class PutRoleMappingResponse extends ActionResponse implements ToXContentObject { - private boolean created; - - public PutRoleMappingResponse(StreamInput in) throws IOException { - super(in); - this.created = in.readBoolean(); - } + private final boolean created; public PutRoleMappingResponse(boolean created) { this.created = created; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlAuthenticateResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlAuthenticateResponse.java index 2cb0a76c2d6bf..71b5e93e60a2c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlAuthenticateResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlAuthenticateResponse.java @@ -8,7 +8,6 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xpack.core.security.authc.Authentication; @@ -21,26 +20,12 @@ */ public final class SamlAuthenticateResponse extends ActionResponse { - private String principal; - private String tokenString; - private String refreshToken; - private String realm; - private TimeValue expiresIn; - private Authentication authentication; - - public SamlAuthenticateResponse(StreamInput in) throws IOException { - super(in); - principal = in.readString(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_0_0)) { - realm = in.readString(); - } - tokenString = in.readString(); - refreshToken = in.readString(); - expiresIn = in.readTimeValue(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_11_0)) { - authentication = new Authentication(in); - } - } + private final String principal; + private final String tokenString; + private final String refreshToken; + private final String realm; + private final TimeValue expiresIn; + private final Authentication authentication; public SamlAuthenticateResponse(Authentication authentication, String tokenString, String refreshToken, TimeValue expiresIn) { this.principal = authentication.getEffectiveSubject().getUser().principal(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlInvalidateSessionResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlInvalidateSessionResponse.java index 097d38c4f886f..42956cb34033d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlInvalidateSessionResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlInvalidateSessionResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.security.action.saml; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; @@ -17,16 +16,9 @@ */ public final class SamlInvalidateSessionResponse extends ActionResponse { - private String realmName; - private int count; - private String redirectUrl; - - public SamlInvalidateSessionResponse(StreamInput in) throws IOException { - super(in); - realmName = in.readString(); - count = in.readInt(); - redirectUrl = in.readString(); - } + private final String realmName; + private final int count; + private final String redirectUrl; public SamlInvalidateSessionResponse(String realmName, int count, String redirectUrl) { this.realmName = realmName; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlLogoutResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlLogoutResponse.java index 0c94e9a372481..8c3e8bd64b9bb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlLogoutResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlLogoutResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.security.action.saml; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; @@ -20,12 +19,6 @@ public final class SamlLogoutResponse extends ActionResponse { private final String requestId; private final String redirectUrl; - public SamlLogoutResponse(StreamInput in) throws IOException { - super(in); - requestId = in.readString(); - redirectUrl = in.readString(); - } - public SamlLogoutResponse(String requestId, String redirectUrl) { this.requestId = requestId; this.redirectUrl = redirectUrl; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlPrepareAuthenticationResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlPrepareAuthenticationResponse.java index 19f50266e5a51..9c7539361837e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlPrepareAuthenticationResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlPrepareAuthenticationResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.security.action.saml; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; @@ -17,14 +16,9 @@ */ public final class SamlPrepareAuthenticationResponse extends ActionResponse { - private String realmName; - private String requestId; - private String redirectUrl; - - public SamlPrepareAuthenticationResponse(StreamInput in) throws IOException { - super(in); - redirectUrl = in.readString(); - } + private final String realmName; + private final String requestId; + private final String redirectUrl; public SamlPrepareAuthenticationResponse(String realmName, String requestId, String redirectUrl) { this.realmName = realmName; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlSpMetadataResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlSpMetadataResponse.java index f0cce0ef5e675..b47c47d4d83d4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlSpMetadataResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlSpMetadataResponse.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.core.security.action.saml; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; @@ -21,12 +20,7 @@ public String getXMLString() { return XMLString; } - private String XMLString; - - public SamlSpMetadataResponse(StreamInput in) throws IOException { - super(in); - XMLString = in.readString(); - } + private final String XMLString; public SamlSpMetadataResponse(String XMLString) { this.XMLString = XMLString; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/service/DeleteServiceAccountTokenResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/service/DeleteServiceAccountTokenResponse.java index 5443deac03bd9..c8b7a1ea04e36 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/service/DeleteServiceAccountTokenResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/service/DeleteServiceAccountTokenResponse.java @@ -18,7 +18,7 @@ public class DeleteServiceAccountTokenResponse extends ActionResponse implements ToXContentObject { - private boolean found; + private final boolean found; public DeleteServiceAccountTokenResponse(boolean found) { this.found = found; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenResponse.java index 73719c7cae489..30522e3389a8a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenResponse.java @@ -25,15 +25,13 @@ */ public final class CreateTokenResponse extends ActionResponse implements ToXContentObject { - private String tokenString; - private TimeValue expiresIn; - private String scope; - private String refreshToken; - private String kerberosAuthenticationResponseToken; + private final String tokenString; + private final TimeValue expiresIn; + private final String scope; + private final String refreshToken; + private final String kerberosAuthenticationResponseToken; private Authentication authentication; - CreateTokenResponse() {} - public CreateTokenResponse(StreamInput in) throws IOException { super(in); tokenString = in.readString(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/DeleteUserResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/DeleteUserResponse.java index 4b07a3db7a038..ec34d54b0d56e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/DeleteUserResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/DeleteUserResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.security.action.user; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -20,12 +19,7 @@ */ public class DeleteUserResponse extends ActionResponse implements ToXContentObject { - private boolean found; - - public DeleteUserResponse(StreamInput in) throws IOException { - super(in); - found = in.readBoolean(); - } + private final boolean found; public DeleteUserResponse(boolean found) { this.found = found; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/GetUsersResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/GetUsersResponse.java index 6395d2a090afa..c7f51b21f4920 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/GetUsersResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/GetUsersResponse.java @@ -8,13 +8,11 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.security.authc.Authentication; -import org.elasticsearch.xpack.core.security.user.InternalUser; import org.elasticsearch.xpack.core.security.user.User; import java.io.IOException; @@ -30,30 +28,6 @@ public class GetUsersResponse extends ActionResponse implements ToXContentObject @Nullable private final Map profileUidLookup; - public GetUsersResponse(StreamInput in) throws IOException { - super(in); - int size = in.readVInt(); - if (size < 0) { - users = null; - } else { - users = new User[size]; - for (int i = 0; i < size; i++) { - final User user = Authentication.AuthenticationSerializationHelper.readUserFrom(in); - assert false == user instanceof InternalUser : "should not get internal user [" + user + "]"; - users[i] = user; - } - } - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_5_0)) { - if (in.readBoolean()) { - profileUidLookup = in.readMap(StreamInput::readString); - } else { - profileUidLookup = null; - } - } else { - profileUidLookup = null; - } - } - public GetUsersResponse(Collection users) { this(users, null); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesResponse.java index e59f588ffd65c..6c83d4b38ff89 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesResponse.java @@ -27,11 +27,11 @@ * Response for a {@link HasPrivilegesRequest} */ public class HasPrivilegesResponse extends ActionResponse implements ToXContentObject { - private String username; - private boolean completeMatch; - private Map cluster; - private Set index; - private Map> application; + private final String username; + private final boolean completeMatch; + private final Map cluster; + private final Set index; + private final Map> application; public HasPrivilegesResponse() { this("", true, Collections.emptyMap(), Collections.emptyList(), Collections.emptyMap()); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/ProfileHasPrivilegesResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/ProfileHasPrivilegesResponse.java index 8e8ff50e5b4ac..9977ad459b8fd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/ProfileHasPrivilegesResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/ProfileHasPrivilegesResponse.java @@ -21,7 +21,7 @@ public class ProfileHasPrivilegesResponse extends ActionResponse implements ToXContentObject { - private Set hasPrivilegeUids; + private final Set hasPrivilegeUids; private final Map errors; public ProfileHasPrivilegesResponse(StreamInput in) throws IOException { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/PutUserResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/PutUserResponse.java index 86a25f8321176..fb6e699cd34c2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/PutUserResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/PutUserResponse.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.core.security.action.user; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -21,12 +20,7 @@ */ public class PutUserResponse extends ActionResponse implements ToXContentObject { - private boolean created; - - public PutUserResponse(StreamInput in) throws IOException { - super(in); - this.created = in.readBoolean(); - } + private final boolean created; public PutUserResponse(boolean created) { this.created = created; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/GetCertificateInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/GetCertificateInfoAction.java index cbb747272eebc..32d9725a909c6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/GetCertificateInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/GetCertificateInfoAction.java @@ -19,7 +19,6 @@ import org.elasticsearch.xpack.core.ssl.cert.CertificateInfo; import java.io.IOException; -import java.util.ArrayList; import java.util.Collection; /** @@ -52,16 +51,7 @@ public ActionRequestValidationException validate() { public static class Response extends ActionResponse implements ToXContentObject { - private Collection certificates; - - public Response(StreamInput in) throws IOException { - super(in); - this.certificates = new ArrayList<>(); - int count = in.readVInt(); - for (int i = 0; i < count; i++) { - certificates.add(new CertificateInfo(in)); - } - } + private final Collection certificates; public Response(Collection certificates) { this.certificates = certificates; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/ack/AckWatchResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/ack/AckWatchResponse.java index 97f4b7d619191..60d20046c4f29 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/ack/AckWatchResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/ack/AckWatchResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.watcher.transport.actions.ack; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.core.watcher.watch.WatchStatus; @@ -20,12 +19,7 @@ */ public class AckWatchResponse extends ActionResponse { - private WatchStatus status; - - public AckWatchResponse(StreamInput in) throws IOException { - super(in); - status = in.readBoolean() ? new WatchStatus(in) : null; - } + private final WatchStatus status; public AckWatchResponse(@Nullable WatchStatus status) { this.status = status; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/activate/ActivateWatchResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/activate/ActivateWatchResponse.java index 8091ba3b5ca26..8b0717c9855ec 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/activate/ActivateWatchResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/activate/ActivateWatchResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.watcher.transport.actions.activate; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.core.watcher.watch.WatchStatus; @@ -20,12 +19,7 @@ */ public class ActivateWatchResponse extends ActionResponse { - private WatchStatus status; - - public ActivateWatchResponse(StreamInput in) throws IOException { - super(in); - status = in.readBoolean() ? new WatchStatus(in) : null; - } + private final WatchStatus status; public ActivateWatchResponse(@Nullable WatchStatus status) { this.status = status; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/execute/ExecuteWatchResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/execute/ExecuteWatchResponse.java index 261a31211e497..cdb4503945904 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/execute/ExecuteWatchResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/execute/ExecuteWatchResponse.java @@ -8,7 +8,6 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -26,12 +25,6 @@ public class ExecuteWatchResponse extends ActionResponse implements ToXContentOb private final String recordId; private final XContentSource recordSource; - public ExecuteWatchResponse(StreamInput in) throws IOException { - super(in); - recordId = in.readString(); - recordSource = XContentSource.readFrom(in); - } - public ExecuteWatchResponse(String recordId, BytesReference recordSource, XContentType contentType) { this.recordId = recordId; this.recordSource = new XContentSource(recordSource, contentType); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/get/GetWatchResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/get/GetWatchResponse.java index d1da1cc490f4b..789925f3832ac 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/get/GetWatchResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/get/GetWatchResponse.java @@ -8,7 +8,6 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.index.seqno.SequenceNumbers; @@ -22,32 +21,13 @@ public class GetWatchResponse extends ActionResponse implements ToXContentObject { - private String id; - private WatchStatus status; - private boolean found; - private XContentSource source; - private long version; - private long seqNo; - private long primaryTerm; - - public GetWatchResponse(StreamInput in) throws IOException { - super(in); - id = in.readString(); - found = in.readBoolean(); - if (found) { - status = new WatchStatus(in); - source = XContentSource.readFrom(in); - version = in.readZLong(); - seqNo = in.readZLong(); - primaryTerm = in.readVLong(); - } else { - status = null; - source = null; - version = Versions.NOT_FOUND; - seqNo = SequenceNumbers.UNASSIGNED_SEQ_NO; - primaryTerm = SequenceNumbers.UNASSIGNED_PRIMARY_TERM; - } - } + private final String id; + private final WatchStatus status; + private final boolean found; + private final XContentSource source; + private final long version; + private final long seqNo; + private final long primaryTerm; /** * ctor for missing watch diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/GetQueryRulesetAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/GetQueryRulesetAction.java index 249cf66e39458..f7e6f166cf53f 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/GetQueryRulesetAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/GetQueryRulesetAction.java @@ -112,7 +112,6 @@ public static Request parse(XContentParser parser, String name) { public static class Response extends ActionResponse implements ToXContentObject { private final QueryRuleset queryRuleset; - private static final ParseField QUERY_RULESET_FIELD = new ParseField("queryRuleset"); public Response(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/GetSecretResponse.java b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/GetSecretResponse.java index 7bba867a74761..f4c26f5bcd094 100644 --- a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/GetSecretResponse.java +++ b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/GetSecretResponse.java @@ -19,7 +19,7 @@ public class GetSecretResponse extends ActionResponse implements ToXContentObject { - private String id; + private final String id; private final String value; public GetSecretResponse(StreamInput in) throws IOException { diff --git a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/SamlInitiateSingleSignOnResponse.java b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/SamlInitiateSingleSignOnResponse.java index a7cd9c606b3c6..c0a5157557f58 100644 --- a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/SamlInitiateSingleSignOnResponse.java +++ b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/SamlInitiateSingleSignOnResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.idp.action; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; import org.elasticsearch.xcontent.XContentBuilder; @@ -16,20 +15,11 @@ public class SamlInitiateSingleSignOnResponse extends ActionResponse { - private String postUrl; - private String samlResponse; - private String entityId; - private String samlStatus; - private String error; - - public SamlInitiateSingleSignOnResponse(StreamInput in) throws IOException { - super(in); - this.entityId = in.readString(); - this.postUrl = in.readString(); - this.samlResponse = in.readString(); - this.samlStatus = in.readString(); - this.error = in.readOptionalString(); - } + private final String postUrl; + private final String samlResponse; + private final String entityId; + private final String samlStatus; + private final String error; public SamlInitiateSingleSignOnResponse( String entityId, diff --git a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/SamlMetadataResponse.java b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/SamlMetadataResponse.java index 8e8a18f862bd7..6f1fb3cc32193 100644 --- a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/SamlMetadataResponse.java +++ b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/SamlMetadataResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.idp.action; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; @@ -17,11 +16,6 @@ public class SamlMetadataResponse extends ActionResponse { private final String xmlString; - public SamlMetadataResponse(StreamInput in) throws IOException { - super(in); - this.xmlString = in.readString(); - } - public SamlMetadataResponse(String xmlString) { this.xmlString = Objects.requireNonNull(xmlString, "Metadata XML string must be provided"); } diff --git a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/SamlValidateAuthnRequestResponse.java b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/SamlValidateAuthnRequestResponse.java index e2b32c7e7023c..b09abb190ef7c 100644 --- a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/SamlValidateAuthnRequestResponse.java +++ b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/SamlValidateAuthnRequestResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.idp.action; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; @@ -21,14 +20,6 @@ public class SamlValidateAuthnRequestResponse extends ActionResponse { private final boolean forceAuthn; private final Map authnState; - public SamlValidateAuthnRequestResponse(StreamInput in) throws IOException { - super(in); - this.spEntityId = in.readString(); - this.assertionConsumerService = in.readString(); - this.forceAuthn = in.readBoolean(); - this.authnState = in.readGenericMap(); - } - public SamlValidateAuthnRequestResponse(String spEntityId, String acs, boolean forceAuthn, Map authnState) { this.spEntityId = Objects.requireNonNull(spEntityId, "spEntityId is required for successful responses"); this.assertionConsumerService = Objects.requireNonNull(acs, "ACS is required for successful responses"); diff --git a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateResponse.java b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateResponse.java index 62ae0d54eb584..b215a724c06f3 100644 --- a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateResponse.java +++ b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateResponse.java @@ -20,7 +20,7 @@ * Response for the sql action for translating SQL queries into ES requests */ public class SqlTranslateResponse extends ActionResponse implements ToXContentObject { - private SearchSourceBuilder source; + private final SearchSourceBuilder source; public SqlTranslateResponse(StreamInput in) throws IOException { super(in); From 436ffe7255b0201f535a0b69a63d097dd1665ec3 Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Mon, 8 Apr 2024 13:50:06 +0300 Subject: [PATCH 075/173] Re-enable unittest with additional tracing (#107202) Related to #105437 --- .../org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java index 2aff5257a6ebf..fc3e46882ef84 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java @@ -191,7 +191,7 @@ private void createIndex(String index, String alias, boolean isTimeSeries) throw createIndexWithSettings(client(), index, alias, settings, mapping); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105437") + @TestLogging(value = "org.elasticsearch.xpack.ilm:TRACE", reason = "https://github.com/elastic/elasticsearch/issues/105437") public void testRollupIndex() throws Exception { createIndex(index, alias, true); index(client(), index, true, null, "@timestamp", "2020-01-01T05:10:00Z", "volume", 11.0, "metricset", randomAlphaOfLength(5)); From cf8fe17766238f69bfc1ad0436b9d71adefdb981 Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Mon, 8 Apr 2024 12:52:59 +0200 Subject: [PATCH 076/173] ES|QL: Make some REST tests deterministic (#107200) Adding an explicit SORT to two tests to make them more deterministic, especially for scenarios where the execution is on multiple nodes. --- .../elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java | 2 +- .../org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java index 752775b20b0e3..e04435b715c99 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java @@ -167,7 +167,7 @@ public void testNonExistentEnrichPolicy_KeepField() throws IOException { public void testMatchField_ImplicitFieldsList() throws IOException { Map result = runEsql( - new RestEsqlTestCase.RequestObjectBuilder().query("from test | enrich countries | keep number") + new RestEsqlTestCase.RequestObjectBuilder().query("from test | enrich countries | keep number | sort number") ); var columns = List.of(Map.of("name", "number", "type", "long")); var values = List.of(List.of(1000), List.of(1000), List.of(5000)); diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java index b67432f491cf3..ab288de4ad27d 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java @@ -322,7 +322,7 @@ public void testNullInAggs() throws IOException { matchesMap().entry("values", List.of(List.of(1))).entry("columns", List.of(Map.of("name", "min(value)", "type", "long"))) ); - builder = new RequestObjectBuilder().query(fromIndex() + " | stats min(value) by group"); + builder = new RequestObjectBuilder().query(fromIndex() + " | stats min(value) by group | sort group, `min(value)`"); result = runEsql(builder); assertMap( result, From 9496fa37469d543d73e558bdcf0d710d63d5833b Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Mon, 8 Apr 2024 07:24:52 -0400 Subject: [PATCH 077/173] Tidy up watcher logging and tests (#107175) --- .../smoketest/DocsClientYamlTestSuiteIT.java | 4 +- .../rest/yaml/ClientYamlTestResponse.java | 5 ++- .../rest/yaml/ESClientYamlSuiteTestCase.java | 3 +- .../EsqlClientYamlAsyncSubmitAndFetchIT.java | 2 +- .../xpack/watcher/WatcherRestTestCase.java | 8 ++-- .../watcher/WatcherYamlSuiteTestCase.java | 38 ++++++++----------- .../watcher/WatcherIndexingListener.java | 4 +- .../watcher/WatcherLifeCycleService.java | 3 +- .../smoketest/WatcherJiraYamlTestSuiteIT.java | 25 ++++++------ .../WatcherPagerDutyYamlTestSuiteIT.java | 25 ++++++------ .../WatcherSlackYamlTestSuiteIT.java | 25 ++++++------ 11 files changed, 63 insertions(+), 79 deletions(-) diff --git a/docs/src/yamlRestTest/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java b/docs/src/yamlRestTest/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java index 1d6df60df0f88..6191f33f1c5dd 100644 --- a/docs/src/yamlRestTest/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java +++ b/docs/src/yamlRestTest/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java @@ -251,7 +251,7 @@ public void reenableWatcher() throws Exception { if (isWatcherTest()) { assertBusy(() -> { ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", emptyMap(), emptyList(), emptyMap()); - String state = (String) response.evaluate("stats.0.watcher_state"); + String state = response.evaluate("stats.0.watcher_state"); switch (state) { case "stopped": @@ -261,7 +261,7 @@ public void reenableWatcher() throws Exception { emptyList(), emptyMap() ); - boolean isAcknowledged = (boolean) startResponse.evaluate("acknowledged"); + boolean isAcknowledged = startResponse.evaluate("acknowledged"); assertThat(isAcknowledged, is(true)); throw new AssertionError("waiting until stopped state reached started state"); case "stopping": diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestResponse.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestResponse.java index b09250e1527f3..8a6dada181c6c 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestResponse.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestResponse.java @@ -149,8 +149,9 @@ public boolean isError() { /** * Parses the response body and extracts a specific value from it (identified by the provided path) */ - public Object evaluate(String path) throws IOException { - return evaluate(path, Stash.EMPTY); + @SuppressWarnings("unchecked") + public T evaluate(String path) throws IOException { + return (T) evaluate(path, Stash.EMPTY); } /** diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java index 5ac83f94f6248..a32679d445629 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java @@ -469,8 +469,7 @@ static String readOsFromNodesInfo(RestClient restClient) throws IOException { ClientYamlTestResponse restTestResponse = new ClientYamlTestResponse(response); SortedSet osPrettyNames = new TreeSet<>(); - @SuppressWarnings("unchecked") - final Map nodes = (Map) restTestResponse.evaluate("nodes"); + final Map nodes = restTestResponse.evaluate("nodes"); for (Entry node : nodes.entrySet()) { @SuppressWarnings("unchecked") diff --git a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncSubmitAndFetchIT.java b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncSubmitAndFetchIT.java index 0f2bf2703f62f..b32a7385d12c5 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncSubmitAndFetchIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncSubmitAndFetchIT.java @@ -71,7 +71,7 @@ public void execute(ClientYamlTestExecutionContext executionContext) throws IOEx original.getApiCallSection().getNodeSelector() ); - String id = (String) startResponse.evaluate("id"); + String id = startResponse.evaluate("id"); boolean finishedEarly = id == null; if (finishedEarly) { /* diff --git a/x-pack/plugin/watcher/qa/common/src/main/java/org/elasticsearch/xpack/watcher/WatcherRestTestCase.java b/x-pack/plugin/watcher/qa/common/src/main/java/org/elasticsearch/xpack/watcher/WatcherRestTestCase.java index 341e92641f641..19f1133e4f14f 100644 --- a/x-pack/plugin/watcher/qa/common/src/main/java/org/elasticsearch/xpack/watcher/WatcherRestTestCase.java +++ b/x-pack/plugin/watcher/qa/common/src/main/java/org/elasticsearch/xpack/watcher/WatcherRestTestCase.java @@ -11,9 +11,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.ObjectPath; -import org.hamcrest.Matchers; import org.junit.After; -import org.junit.Assert; import org.junit.Before; import java.io.IOException; @@ -21,6 +19,8 @@ import java.util.Map; import java.util.concurrent.TimeUnit; +import static org.hamcrest.Matchers.is; + /** * Parent test class for Watcher (not-YAML) based REST tests */ @@ -36,7 +36,7 @@ public final void startWatcher() throws Exception { case "stopped": Response startResponse = ESRestTestCase.adminClient().performRequest(new Request("POST", "/_watcher/_start")); boolean isAcknowledged = ObjectPath.createFromResponse(startResponse).evaluate("acknowledged"); - Assert.assertThat(isAcknowledged, Matchers.is(true)); + assertThat(isAcknowledged, is(true)); throw new AssertionError("waiting until stopped state reached started state"); case "stopping": throw new AssertionError("waiting until stopping state reached stopped state to start again"); @@ -68,7 +68,7 @@ public final void stopWatcher() throws Exception { case "started": Response stopResponse = ESRestTestCase.adminClient().performRequest(new Request("POST", "/_watcher/_stop")); boolean isAcknowledged = ObjectPath.createFromResponse(stopResponse).evaluate("acknowledged"); - Assert.assertThat(isAcknowledged, Matchers.is(true)); + assertThat(isAcknowledged, is(true)); throw new AssertionError("waiting until started state reached stopped state"); default: throw new AssertionError("unknown state[" + state + "]"); diff --git a/x-pack/plugin/watcher/qa/common/src/main/java/org/elasticsearch/xpack/watcher/WatcherYamlSuiteTestCase.java b/x-pack/plugin/watcher/qa/common/src/main/java/org/elasticsearch/xpack/watcher/WatcherYamlSuiteTestCase.java index c7b1e0d0fcbee..ddcf976c84572 100644 --- a/x-pack/plugin/watcher/qa/common/src/main/java/org/elasticsearch/xpack/watcher/WatcherYamlSuiteTestCase.java +++ b/x-pack/plugin/watcher/qa/common/src/main/java/org/elasticsearch/xpack/watcher/WatcherYamlSuiteTestCase.java @@ -13,16 +13,15 @@ import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ClientYamlTestResponse; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import org.hamcrest.Matchers; import org.junit.After; -import org.junit.Assert; import org.junit.Before; +import java.util.List; +import java.util.Map; import java.util.concurrent.TimeUnit; -import static java.util.Collections.emptyList; -import static java.util.Collections.emptyMap; import static org.elasticsearch.xpack.watcher.WatcherRestTestCase.deleteAllWatcherData; +import static org.hamcrest.Matchers.is; /** * Parent test class for Watcher YAML based REST tests @@ -40,25 +39,25 @@ public static Iterable parameters() throws Exception { @Before public final void startWatcher() throws Exception { ESTestCase.assertBusy(() -> { - ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", emptyMap(), emptyList(), emptyMap()); - String state = (String) response.evaluate("stats.0.watcher_state"); + ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", Map.of(), List.of(), Map.of()); + String state = response.evaluate("stats.0.watcher_state"); switch (state) { case "stopped" -> { ClientYamlTestResponse startResponse = getAdminExecutionContext().callApi( "watcher.start", - emptyMap(), - emptyList(), - emptyMap() + Map.of(), + List.of(), + Map.of() ); - boolean isAcknowledged = (boolean) startResponse.evaluate("acknowledged"); - Assert.assertThat(isAcknowledged, Matchers.is(true)); + boolean isAcknowledged = startResponse.evaluate("acknowledged"); + assertThat(isAcknowledged, is(true)); throw new AssertionError("waiting until stopped state reached started state"); } case "stopping" -> throw new AssertionError("waiting until stopping state reached stopped state to start again"); case "starting" -> throw new AssertionError("waiting until starting state reached started state"); case "started" -> { - int watcherCount = (int) response.evaluate("stats.0.watch_count"); + int watcherCount = response.evaluate("stats.0.watch_count"); if (watcherCount > 0) { logger.info("expected 0 active watches, but got [{}], deleting watcher indices again", watcherCount); deleteAllWatcherData(); @@ -73,8 +72,8 @@ public final void startWatcher() throws Exception { @After public final void stopWatcher() throws Exception { ESTestCase.assertBusy(() -> { - ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", emptyMap(), emptyList(), emptyMap()); - String state = (String) response.evaluate("stats.0.watcher_state"); + ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", Map.of(), List.of(), Map.of()); + String state = response.evaluate("stats.0.watcher_state"); switch (state) { case "stopped": // all good here, we are done @@ -84,14 +83,9 @@ public final void stopWatcher() throws Exception { case "starting": throw new AssertionError("waiting until starting state reached started state to stop"); case "started": - ClientYamlTestResponse stopResponse = getAdminExecutionContext().callApi( - "watcher.stop", - emptyMap(), - emptyList(), - emptyMap() - ); - boolean isAcknowledged = (boolean) stopResponse.evaluate("acknowledged"); - Assert.assertThat(isAcknowledged, Matchers.is(true)); + ClientYamlTestResponse stopResponse = getAdminExecutionContext().callApi("watcher.stop", Map.of(), List.of(), Map.of()); + boolean isAcknowledged = stopResponse.evaluate("acknowledged"); + assertThat(isAcknowledged, is(true)); throw new AssertionError("waiting until started state reached stopped state"); default: throw new AssertionError("unknown state[" + state + "]"); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherIndexingListener.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherIndexingListener.java index 7e16a0353f2cd..e77c7aba6824d 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherIndexingListener.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherIndexingListener.java @@ -141,7 +141,7 @@ public void postIndex(ShardId shardId, Engine.Index operation, Engine.IndexResul logger.debug("adding watch [{}] to trigger service", watch.id()); triggerService.add(watch); } else { - logger.debug("removing watch [{}] to trigger service", watch.id()); + logger.debug("removing watch [{}] from trigger service", watch.id()); triggerService.remove(watch.id()); } } else { @@ -179,7 +179,7 @@ public void postIndex(ShardId shardId, Engine.Index index, Exception ex) { @Override public Engine.Delete preDelete(ShardId shardId, Engine.Delete delete) { if (isWatchDocument(shardId.getIndexName())) { - logger.debug("removing watch [{}] to trigger service via delete", delete.id()); + logger.debug("removing watch [{}] from trigger service via delete", delete.id()); triggerService.remove(delete.id()); } return delete; diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherLifeCycleService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherLifeCycleService.java index f6e34ccb243c8..cd0e066de2eaf 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherLifeCycleService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherLifeCycleService.java @@ -44,7 +44,7 @@ public class WatcherLifeCycleService implements ClusterStateListener { private final AtomicReference state = new AtomicReference<>(WatcherState.STARTED); private final AtomicReference> previousShardRoutings = new AtomicReference<>(Collections.emptyList()); private volatile boolean shutDown = false; // indicates that the node has been shutdown and we should never start watcher after this. - private volatile WatcherService watcherService; + private final WatcherService watcherService; private final EnumSet stopStates = EnumSet.of(WatcherState.STOPPED, WatcherState.STOPPING); WatcherLifeCycleService(ClusterService clusterService, WatcherService watcherService) { @@ -123,7 +123,6 @@ public void clusterChanged(ClusterChangedEvent event) { } else { logger.info("watcher has not been stopped. not currently in a stopping state, current state [{}]", state.get()); } - }); } } diff --git a/x-pack/qa/third-party/jira/src/yamlRestTest/java/org/elasticsearch/smoketest/WatcherJiraYamlTestSuiteIT.java b/x-pack/qa/third-party/jira/src/yamlRestTest/java/org/elasticsearch/smoketest/WatcherJiraYamlTestSuiteIT.java index f8977f8d3cf8d..250920382719a 100644 --- a/x-pack/qa/third-party/jira/src/yamlRestTest/java/org/elasticsearch/smoketest/WatcherJiraYamlTestSuiteIT.java +++ b/x-pack/qa/third-party/jira/src/yamlRestTest/java/org/elasticsearch/smoketest/WatcherJiraYamlTestSuiteIT.java @@ -17,13 +17,10 @@ import org.junit.Before; import java.io.IOException; -import java.util.Arrays; import java.util.List; +import java.util.Map; import java.util.concurrent.TimeUnit; -import static java.util.Collections.emptyList; -import static java.util.Collections.emptyMap; -import static java.util.Collections.singletonMap; import static org.hamcrest.Matchers.is; /** Runs rest tests against external cluster */ @@ -40,23 +37,23 @@ public static Iterable parameters() throws Exception { @Before public void startWatcher() throws Exception { - final List watcherTemplates = Arrays.asList(WatcherIndexTemplateRegistryField.TEMPLATE_NAMES_NO_ILM); + final List watcherTemplates = List.of(WatcherIndexTemplateRegistryField.TEMPLATE_NAMES_NO_ILM); assertBusy(() -> { try { - getAdminExecutionContext().callApi("watcher.start", emptyMap(), emptyList(), emptyMap()); + getAdminExecutionContext().callApi("watcher.start", Map.of(), List.of(), Map.of()); for (String template : watcherTemplates) { ClientYamlTestResponse templateExistsResponse = getAdminExecutionContext().callApi( "indices.exists_template", - singletonMap("name", template), - emptyList(), - emptyMap() + Map.of("name", template), + List.of(), + Map.of() ); assertThat(templateExistsResponse.getStatusCode(), is(200)); } - ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", emptyMap(), emptyList(), emptyMap()); - String state = (String) response.evaluate("stats.0.watcher_state"); + ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", Map.of(), List.of(), Map.of()); + String state = response.evaluate("stats.0.watcher_state"); assertThat(state, is("started")); } catch (IOException e) { throw new AssertionError(e); @@ -68,9 +65,9 @@ public void startWatcher() throws Exception { public void stopWatcher() throws Exception { assertBusy(() -> { try { - getAdminExecutionContext().callApi("watcher.stop", emptyMap(), emptyList(), emptyMap()); - ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", emptyMap(), emptyList(), emptyMap()); - String state = (String) response.evaluate("stats.0.watcher_state"); + getAdminExecutionContext().callApi("watcher.stop", Map.of(), List.of(), Map.of()); + ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", Map.of(), List.of(), Map.of()); + String state = response.evaluate("stats.0.watcher_state"); assertThat(state, is("stopped")); } catch (IOException e) { throw new AssertionError(e); diff --git a/x-pack/qa/third-party/pagerduty/src/yamlRestTest/java/org/elasticsearch/smoketest/WatcherPagerDutyYamlTestSuiteIT.java b/x-pack/qa/third-party/pagerduty/src/yamlRestTest/java/org/elasticsearch/smoketest/WatcherPagerDutyYamlTestSuiteIT.java index 10352b54912e5..a8e522f3836fb 100644 --- a/x-pack/qa/third-party/pagerduty/src/yamlRestTest/java/org/elasticsearch/smoketest/WatcherPagerDutyYamlTestSuiteIT.java +++ b/x-pack/qa/third-party/pagerduty/src/yamlRestTest/java/org/elasticsearch/smoketest/WatcherPagerDutyYamlTestSuiteIT.java @@ -17,13 +17,10 @@ import org.junit.Before; import java.io.IOException; -import java.util.Arrays; import java.util.List; +import java.util.Map; import java.util.concurrent.TimeUnit; -import static java.util.Collections.emptyList; -import static java.util.Collections.emptyMap; -import static java.util.Collections.singletonMap; import static org.hamcrest.Matchers.is; /** Runs rest tests against external cluster */ @@ -40,23 +37,23 @@ public static Iterable parameters() throws Exception { @Before public void startWatcher() throws Exception { - final List watcherTemplates = Arrays.asList(WatcherIndexTemplateRegistryField.TEMPLATE_NAMES_NO_ILM); + final List watcherTemplates = List.of(WatcherIndexTemplateRegistryField.TEMPLATE_NAMES_NO_ILM); assertBusy(() -> { try { - getAdminExecutionContext().callApi("watcher.start", emptyMap(), emptyList(), emptyMap()); + getAdminExecutionContext().callApi("watcher.start", Map.of(), List.of(), Map.of()); for (String template : watcherTemplates) { ClientYamlTestResponse templateExistsResponse = getAdminExecutionContext().callApi( "indices.exists_template", - singletonMap("name", template), - emptyList(), - emptyMap() + Map.of("name", template), + List.of(), + Map.of() ); assertThat(templateExistsResponse.getStatusCode(), is(200)); } - ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", emptyMap(), emptyList(), emptyMap()); - String state = (String) response.evaluate("stats.0.watcher_state"); + ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", Map.of(), List.of(), Map.of()); + String state = response.evaluate("stats.0.watcher_state"); assertThat(state, is("started")); } catch (IOException e) { throw new AssertionError(e); @@ -68,9 +65,9 @@ public void startWatcher() throws Exception { public void stopWatcher() throws Exception { assertBusy(() -> { try { - getAdminExecutionContext().callApi("watcher.stop", emptyMap(), emptyList(), emptyMap()); - ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", emptyMap(), emptyList(), emptyMap()); - String state = (String) response.evaluate("stats.0.watcher_state"); + getAdminExecutionContext().callApi("watcher.stop", Map.of(), List.of(), Map.of()); + ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", Map.of(), List.of(), Map.of()); + String state = response.evaluate("stats.0.watcher_state"); assertThat(state, is("stopped")); } catch (IOException e) { throw new AssertionError(e); diff --git a/x-pack/qa/third-party/slack/src/yamlRestTest/java/org/elasticsearch/smoketest/WatcherSlackYamlTestSuiteIT.java b/x-pack/qa/third-party/slack/src/yamlRestTest/java/org/elasticsearch/smoketest/WatcherSlackYamlTestSuiteIT.java index 5c083cb90cd69..9cb64bab89d34 100644 --- a/x-pack/qa/third-party/slack/src/yamlRestTest/java/org/elasticsearch/smoketest/WatcherSlackYamlTestSuiteIT.java +++ b/x-pack/qa/third-party/slack/src/yamlRestTest/java/org/elasticsearch/smoketest/WatcherSlackYamlTestSuiteIT.java @@ -17,13 +17,10 @@ import org.junit.Before; import java.io.IOException; -import java.util.Arrays; import java.util.List; +import java.util.Map; import java.util.concurrent.TimeUnit; -import static java.util.Collections.emptyList; -import static java.util.Collections.emptyMap; -import static java.util.Collections.singletonMap; import static org.hamcrest.Matchers.is; /** Runs rest tests against external cluster */ @@ -40,23 +37,23 @@ public static Iterable parameters() throws Exception { @Before public void startWatcher() throws Exception { - final List watcherTemplates = Arrays.asList(WatcherIndexTemplateRegistryField.TEMPLATE_NAMES_NO_ILM); + final List watcherTemplates = List.of(WatcherIndexTemplateRegistryField.TEMPLATE_NAMES_NO_ILM); assertBusy(() -> { try { - getAdminExecutionContext().callApi("watcher.start", emptyMap(), emptyList(), emptyMap()); + getAdminExecutionContext().callApi("watcher.start", Map.of(), List.of(), Map.of()); for (String template : watcherTemplates) { ClientYamlTestResponse templateExistsResponse = getAdminExecutionContext().callApi( "indices.exists_template", - singletonMap("name", template), - emptyList(), - emptyMap() + Map.of("name", template), + List.of(), + Map.of() ); assertThat(templateExistsResponse.getStatusCode(), is(200)); } - ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", emptyMap(), emptyList(), emptyMap()); - String state = (String) response.evaluate("stats.0.watcher_state"); + ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", Map.of(), List.of(), Map.of()); + String state = response.evaluate("stats.0.watcher_state"); assertThat(state, is("started")); } catch (IOException e) { throw new AssertionError(e); @@ -68,9 +65,9 @@ public void startWatcher() throws Exception { public void stopWatcher() throws Exception { assertBusy(() -> { try { - getAdminExecutionContext().callApi("watcher.stop", emptyMap(), emptyList(), emptyMap()); - ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", emptyMap(), emptyList(), emptyMap()); - String state = (String) response.evaluate("stats.0.watcher_state"); + getAdminExecutionContext().callApi("watcher.stop", Map.of(), List.of(), Map.of()); + ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", Map.of(), List.of(), Map.of()); + String state = response.evaluate("stats.0.watcher_state"); assertThat(state, is("stopped")); } catch (IOException e) { throw new AssertionError(e); From 53ec28d0c99130ed55693c2164d1d67b703fb563 Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Mon, 8 Apr 2024 15:36:17 +0300 Subject: [PATCH 078/173] [TEST] Accept "failed" as status in total downsample latency metric (#107207) * Update 8.13 release notes with known issue * revert unintended * reword * reword * reword * Add rolling update test for downsampling * limit testing to 8.10+ * add retry logic for querying * remove unused variable * check all search results * minor refactor * spotless * Add full cluster restart test for downsampling * Accept "failed" as status in total latency metric. --- .../xpack/downsample/DownsampleActionSingleNodeTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java index 3c4be50b25a73..2057518307fc0 100644 --- a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java +++ b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java @@ -1186,7 +1186,7 @@ private void assertDownsampleIndex(String sourceIndex, String downsampleIndex, D measurement.value().longValue() >= 0 && measurement.value().longValue() < 1000_000 ); assertEquals(1, measurement.attributes().size()); - assertThat(measurement.attributes().get("status"), Matchers.in(List.of("success", "invalid_configuration"))); + assertThat(measurement.attributes().get("status"), Matchers.in(List.of("success", "invalid_configuration", "failed"))); } }, 10, TimeUnit.SECONDS); } From bdf9c605b5535835f25a8dcf0d79f4f090f8c322 Mon Sep 17 00:00:00 2001 From: Chris Hegarty <62058229+ChrisHegarty@users.noreply.github.com> Date: Mon, 8 Apr 2024 13:53:30 +0100 Subject: [PATCH 079/173] ES|QL fix no-length substring with supplementary (4-byte) character (#107183) This commit fixes a bug in the no-length substring variant with supplementary (4-byte) characters. --- docs/changelog/107183.yaml | 5 +++++ .../src/main/resources/string.csv-spec | 9 ++++++++- .../function/scalar/string/Substring.java | 9 +++------ .../function/scalar/string/SubstringTests.java | 14 ++++++++++++++ 4 files changed, 30 insertions(+), 7 deletions(-) create mode 100644 docs/changelog/107183.yaml diff --git a/docs/changelog/107183.yaml b/docs/changelog/107183.yaml new file mode 100644 index 0000000000000..226d036456858 --- /dev/null +++ b/docs/changelog/107183.yaml @@ -0,0 +1,5 @@ +pr: 107183 +summary: ES|QL fix no-length substring with supplementary (4-byte) character +area: ES|QL +type: bug +issues: [] diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec index f22e1b2de7f6a..aeb87ea5b66f4 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec @@ -172,6 +172,13 @@ emp_no:integer | last_name:keyword | x:keyword | z:keyword 10010 | Piveteau | P | a ; +substring Emoji#[skip:-8.13.99,reason:bug fix in 8.14] +row a = "🐱Meow!🐶Woof!" | eval sub1 = substring(a, 2) | eval sub2 = substring(a, 2, 100); + +a:keyword | sub1:keyword | sub2:keyword +🐱Meow!🐶Woof! | Meow!🐶Woof! | Meow!🐶Woof! +; + ltrim from employees | sort emp_no | limit 10 | eval name = concat(" ", first_name, " ") | eval name = ltrim(name) | eval name = concat("'", name, "'") | keep emp_no, name; @@ -1236,7 +1243,7 @@ emp_no:integer | last_name:keyword | f_s:keyword | f_l:integer ; locateUtf16Emoji#[skip:-8.13.99,reason:new string function added in 8.14] -row a = "🐱Meow!🐶Woof!" | eval f_s = substring(a, 3) | eval f_l = locate(a, f_s); +row a = "🐱Meow!🐶Woof!" | eval f_s = substring(a, 2) | eval f_l = locate(a, f_s); a:keyword | f_s:keyword | f_l:integer 🐱Meow!🐶Woof! | Meow!🐶Woof! | 3 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java index 3bd7d660352c3..a1f2586f4faed 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java @@ -86,12 +86,9 @@ public boolean foldable() { @Evaluator(extraName = "NoLength") static BytesRef process(BytesRef str, int start) { - if (str.length == 0) { - return null; - } - int codePointCount = UnicodeUtil.codePointCount(str); - int indexStart = indexStart(codePointCount, start); - return new BytesRef(str.utf8ToString().substring(indexStart)); + int length = str.length; // we just need a value at least the length of the string + return process(str, start, length); + } @Evaluator diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java index 4736ba2cc74d7..648fffada6dc1 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java @@ -25,6 +25,7 @@ import java.util.List; import java.util.function.Supplier; +import static java.nio.charset.StandardCharsets.UTF_8; import static org.elasticsearch.compute.data.BlockUtils.toJavaObject; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -161,6 +162,19 @@ public void testUnicode() { assert s.length() == 8 && s.codePointCount(0, s.length()) == 7; assertThat(process(s, 3, 1000), equalTo("tiger")); assertThat(process(s, -6, 1000), equalTo("\ud83c\udf09tiger")); + assert "🐱".length() == 2 && "🐶".length() == 2; + assert "🐱".codePointCount(0, 2) == 1 && "🐶".codePointCount(0, 2) == 1; + assert "🐱".getBytes(UTF_8).length == 4 && "🐶".getBytes(UTF_8).length == 4; + + for (Integer len : new Integer[] { null, 100, 100000 }) { + assertThat(process(s, 3, len), equalTo("tiger")); + assertThat(process(s, -6, len), equalTo("\ud83c\udf09tiger")); + + assertThat(process("🐱Meow!🐶Woof!", 0, len), equalTo("🐱Meow!🐶Woof!")); + assertThat(process("🐱Meow!🐶Woof!", 1, len), equalTo("🐱Meow!🐶Woof!")); + assertThat(process("🐱Meow!🐶Woof!", 2, len), equalTo("Meow!🐶Woof!")); + assertThat(process("🐱Meow!🐶Woof!", 3, len), equalTo("eow!🐶Woof!")); + } } public void testNegativeLength() { From 9edd67f911c3c9cdd16eecec88ab6d51fdda7fdf Mon Sep 17 00:00:00 2001 From: Chris Hegarty <62058229+ChrisHegarty@users.noreply.github.com> Date: Mon, 8 Apr 2024 13:56:23 +0100 Subject: [PATCH 080/173] Fix ES|QL locate with supplementary (4-byte) character (#107172) This commit fixes the ES|QL locate with supplementary (4-byte) character. --- .../src/main/resources/string.csv-spec | 2 +- .../function/scalar/string/Locate.java | 6 ++- .../function/scalar/string/LocateTests.java | 53 +++++++++++++++++++ 3 files changed, 59 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec index aeb87ea5b66f4..69638ef459805 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec @@ -1246,7 +1246,7 @@ locateUtf16Emoji#[skip:-8.13.99,reason:new string function added in 8.14] row a = "🐱Meow!🐶Woof!" | eval f_s = substring(a, 2) | eval f_l = locate(a, f_s); a:keyword | f_s:keyword | f_l:integer -🐱Meow!🐶Woof! | Meow!🐶Woof! | 3 +🐱Meow!🐶Woof! | Meow!🐶Woof! | 2 ; locateNestedSubstring#[skip:-8.13.99,reason:new string function added in 8.14] diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Locate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Locate.java index a1157fad6c46f..c8b546718aabf 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Locate.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Locate.java @@ -96,7 +96,11 @@ static int process(BytesRef str, BytesRef substr, int start) { int codePointCount = UnicodeUtil.codePointCount(str); int indexStart = indexStart(codePointCount, start); String utf8ToString = str.utf8ToString(); - return 1 + utf8ToString.indexOf(substr.utf8ToString(), utf8ToString.offsetByCodePoints(0, indexStart)); + int idx = utf8ToString.indexOf(substr.utf8ToString(), utf8ToString.offsetByCodePoints(0, indexStart)); + if (idx == -1) { + return 0; + } + return 1 + utf8ToString.codePointCount(0, idx); } @Evaluator(extraName = "NoStart") diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java index c1d3df53ece60..b95f05039630a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java @@ -25,6 +25,7 @@ import java.util.List; import java.util.function.Supplier; +import static java.nio.charset.StandardCharsets.UTF_8; import static org.elasticsearch.compute.data.BlockUtils.toJavaObject; import static org.hamcrest.Matchers.equalTo; @@ -131,6 +132,58 @@ public void testExactString() { assertThat(process("界世", "界世", 0), equalTo(1)); } + public void testSupplementaryCharacter() { + // some assertions about the supplementary (4-byte) character we'll use for testing + assert "𠜎".length() == 2; + assert "𠜎".codePointCount(0, 2) == 1; + assert "𠜎".getBytes(UTF_8).length == 4; + + assertThat(process("a ti𠜎er", "𠜎er", 0), equalTo(5)); + assertThat(process("a ti𠜎er", "i𠜎e", 0), equalTo(4)); + assertThat(process("a ti𠜎er", "ti𠜎", 0), equalTo(3)); + assertThat(process("a ti𠜎er", "er", 0), equalTo(6)); + assertThat(process("a ti𠜎er", "r", 0), equalTo(7)); + + assertThat(process("𠜎a ti𠜎er", "𠜎er", 0), equalTo(6)); + assertThat(process("𠜎a ti𠜎er", "i𠜎e", 0), equalTo(5)); + assertThat(process("𠜎a ti𠜎er", "ti𠜎", 0), equalTo(4)); + assertThat(process("𠜎a ti𠜎er", "er", 0), equalTo(7)); + assertThat(process("𠜎a ti𠜎er", "r", 0), equalTo(8)); + + // exact + assertThat(process("a ti𠜎er", "a ti𠜎er", 0), equalTo(1)); + assertThat(process("𠜎𠜎𠜎abc", "𠜎𠜎𠜎abc", 0), equalTo(1)); + assertThat(process(" 𠜎𠜎𠜎abc", " 𠜎𠜎𠜎abc", 0), equalTo(1)); + assertThat(process("𠜎𠜎𠜎 abc ", "𠜎𠜎𠜎 abc ", 0), equalTo(1)); + + // prefix + assertThat(process("𠜎abc", "𠜎", 0), equalTo(1)); + assertThat(process("𠜎 abc", "𠜎 ", 0), equalTo(1)); + assertThat(process("𠜎𠜎𠜎abc", "𠜎𠜎𠜎", 0), equalTo(1)); + assertThat(process("𠜎𠜎𠜎 abc", "𠜎𠜎𠜎 ", 0), equalTo(1)); + assertThat(process(" 𠜎𠜎𠜎 abc", " 𠜎𠜎𠜎 ", 0), equalTo(1)); + assertThat(process("𠜎 𠜎 𠜎 abc", "𠜎 𠜎 𠜎 ", 0), equalTo(1)); + + // suffix + assertThat(process("abc𠜎", "𠜎", 0), equalTo(4)); + assertThat(process("abc 𠜎", " 𠜎", 0), equalTo(4)); + assertThat(process("abc𠜎𠜎𠜎", "𠜎𠜎𠜎", 0), equalTo(4)); + assertThat(process("abc 𠜎𠜎𠜎", " 𠜎𠜎𠜎", 0), equalTo(4)); + assertThat(process("abc𠜎𠜎𠜎 ", "𠜎𠜎𠜎 ", 0), equalTo(4)); + + // out of range + assertThat(process("𠜎a ti𠜎er", "𠜎a ti𠜎ers", 0), equalTo(0)); + assertThat(process("a ti𠜎er", "aa ti𠜎er", 0), equalTo(0)); + assertThat(process("abc𠜎𠜎", "𠜎𠜎𠜎", 0), equalTo(0)); + + assert "🐱".length() == 2 && "🐶".length() == 2; + assert "🐱".codePointCount(0, 2) == 1 && "🐶".codePointCount(0, 2) == 1; + assert "🐱".getBytes(UTF_8).length == 4 && "🐶".getBytes(UTF_8).length == 4; + assertThat(process("🐱Meow!🐶Woof!", "🐱Meow!🐶Woof!", 0), equalTo(1)); + assertThat(process("🐱Meow!🐶Woof!", "Meow!🐶Woof!", 0), equalTo(2)); + assertThat(process("🐱Meow!🐶Woof!", "eow!🐶Woof!", 0), equalTo(3)); + } + private Integer process(String str, String substr, Integer start) { try ( EvalOperator.ExpressionEvaluator eval = evaluator( From 3f998469e22ff51d641e25879b87ec4ec9b39560 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Mon, 8 Apr 2024 09:29:07 -0400 Subject: [PATCH 081/173] Document ESQL's csv-spec files (#107169) --- .../testFixtures/src/main/resources/README.md | 178 ++++++++++++++++++ 1 file changed, 178 insertions(+) create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/README.md diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/README.md b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/README.md new file mode 100644 index 0000000000000..fdd52c6aac229 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/README.md @@ -0,0 +1,178 @@ +# ESQL's CSV-SPEC Integration Tests + +ESQL has lots of different kinds of integration tests! Like the rest of +Elasticsearch it has YAML tests and Java Rest tests and ESIntegTestCase +subclasses, but it *also* has CSV-SPEC tests. You can think of them like +the YAML tests, but they can *only* call _query and assert on the response. +That simplicity let's us run them in lots of contexts and keeps them *fast*. +As such, most of ESQL's integration tests are CSV-SPEC tests. + +## Running + +CSV-SPEC tests run in lots of different ways. The simplest way to run a +CSV-SPEC test is to open ESQL's CsvTests.java and run it right in IntelliJ using +the unit runner. As of this writing that runs 1,350 tests in about 35 seconds. +It's fast because it doesn't stand up an Elasticsearch node at all. It runs +like a big unit test + +The second-simplest way to run the CSV-SPEC tests is to run `EsqlSpecIT` in +`:x-pack:plugin:esql:qa:server:single-node` via the Gradle runner in IntelliJ +or on the command line. That will boot a real Elasticsearch node, create some +test data, and run the tests. The tests are reused in a few more scenarios, +include multi-node and mixed-cluster. + +## Organization + +The CSV-SPEC tests grew organically for a long time, but we've since grown +general organizing principles. But lots of tests don't follow those principles. +See organic growth. Anyway! + +### Files named after types + +Basic support for a type, like, say, `integer` or `geo_point` will live in a +file named after the type. + +* `boolean` +* `date` +* `floats` (`double`) +* `ints` (`integer` and `long`) +* `ip` +* `null` +* `unsigned_long` +* `version` + +Many functions can take lots of different types as input. Like `TO_STRING` +and `VALUES`. Those tests also live in these files. + +### Themed functions + +Some files are named after groups of functions and contain, unsurprisingly, +the tests for those functions: + +* `comparison` +* `conditional` +* `math` + +### Files named after operations + +Lots of commands have files named after operations in the ESQL language and +contain the integration testing of the syntax and options in that operation. +Operations will appear in many of the other files, especially `FROM`, `WHERE`, +`LIMIT`, and `EVAL`, but to test particular functions. + +* `dissect` +* `drop` +* `enrich` +* `eval` +* `grok` +* `order` +* `keep` +* `limit` +* `meta` +* `mv_expand` +* `rename` +* `row` +* `stats` +* `topN` +* `where` +* `where-like` + +### Deprecated files + +When we first implemented copying snippets into the documentation I dumped all +the snippets into `docs.csv-spec`. This was supposed to be a temporary holding +area until they were relocated, and we haven't had time to do that. Don't put +more tests in there. + +## Embedding examples in the documentation + +Snippets from these tests can be embedded into the asciidoc documentation of +ESQL using the following rather arcane snippet: + +```asciidoc +[source.merge.styled,esql] +---- +include::{esql-specs}/floats.csv-spec[tag=sin] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/floats.csv-spec[tag=sin-result] +|=== +``` +
    + What is this asciidoc syntax? + +The first section is a source code block for the ES|QL query: + +- a [source](https://docs.asciidoctor.org/asciidoc/latest/verbatim/source-blocks/) code block (delimited by `----`) + - `source.merge.styled,esql` indicates custom syntax highlighting for ES|QL +- an [include directive](https://docs.asciidoctor.org/asciidoc/latest/directives/include/) to import content from another file (i.e. test files here) into the current document +- a directory path defined as an [attribute](https://docs.asciidoctor.org/asciidoc/latest/attributes/document-attributes/) or variable, within curly braces: `{esql-specs}` +- a [tagged region](https://docs.asciidoctor.org/asciidoc/latest/directives/include-tagged-regions/#tagging-regions) `[tag=sin]` to only include a specific section of file + +The second section is the response returned as a table: + +- styled using `[%header.monospaced.styled,format=dsv,separator=|]` +- delimited by `|===` +- again using includes, attributes, and tagged regions +
    + +The example above extracts the `sin` test from the `floats` file. If you are +writing the tests for a function don't build this by hand, instead annotate +the `.java` file for the function with `@FunctionInfo` and add an `examples` +field like this: + +```java +@FunctionInfo( + returnType = "double", + description = "Returns ths {wikipedia}/Sine_and_cosine[Sine] trigonometric function of an angle.", + examples = @Example(file = "floats", tag = "sin") +) +``` + +Running the tests will generate the asciidoc files for you. See +`esql/functions/README.md` for all of the docs the tests generate. + +Either way, CSV-SPEC files must be tagged using four special comments so snippets can be +included in the docs: + +```csv-spec +sin +// tag::sin[] +ROW a=1.8 +| EVAL sin=SIN(a) +// end::sin[] +; + +// tag::sin-result[] +a:double | sin:double + 1.8 | 0.9738476308781951 +// end::sin-result[] +; +``` + +The `// tag::` and `// end::` are standard asciidoc syntax for working with [tagged regions](https://docs.asciidoctor.org/asciidoc/latest/directives/include-tagged-regions/#tagging-regions). Weird looking but +you aren't going to type it by accident! + +Finally, this'll appear in the docs as a table kind of like this: + +| a:double | sin:double | +|---------:|-------------------:| +| 1.8 | 0.9738476308781951 | + +### Skipping tests in old versions + +CSV-SPEC tests run against half-upgraded clusters in the +`x-pack:plugin:esql:qa:server:mixed-cluster` project and will fail if they test +new behavior against an old node. To stop them from running you should create +a `NodeFeature` in `EsqlFeatures` for your change. Then you can skip it by +adding a `required_feature` to your test like so: +```csv-spec +mvSlice +required_feature: esql.mv_sort + +row a = [true, false, false, true] +| eval a1 = mv_slice(a, 1), a2 = mv_slice(a, 2, 3); +``` + +That skips nodes that don't have the `esql.mv_sort` feature. From 43efc9505700217e258ad1df3b7f1e71213ccc1a Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Mon, 8 Apr 2024 15:30:10 +0200 Subject: [PATCH 082/173] Refactoring on merging InternalTerms (#107049) This refactor introduces a TermsAggregationReducer that holds the logic to merge InternalTerms. The main difference is that we are accumulating the buckets now instead of the internal aggregations. --- .../bucket/terms/AbstractInternalTerms.java | 191 +++++++++--------- .../bucket/terms/DoubleTerms.java | 16 +- .../aggregations/bucket/terms/LongTerms.java | 15 +- .../bucket/terms/StringTerms.java | 16 +- .../multiterms/InternalMultiTerms.java | 113 ++++++----- 5 files changed, 181 insertions(+), 170 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java index c423b2ca8cb51..11bd63bcdaa8c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.search.aggregations.AggregationErrors; import org.elasticsearch.search.aggregations.AggregationReduceContext; +import org.elasticsearch.search.aggregations.AggregatorReducer; import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.DelayedBucket; import org.elasticsearch.search.aggregations.InternalAggregation; @@ -112,23 +113,6 @@ private B reduceBucket(List buckets, AggregationReduceContext context) { return createBucket(docCount, aggs, docCountError, buckets.get(0)); } - private BucketOrder getReduceOrder(List aggregations) { - BucketOrder thisReduceOrder = null; - for (InternalAggregation aggregation : aggregations) { - @SuppressWarnings("unchecked") - A terms = (A) aggregation; - if (terms.getBuckets().size() == 0) { - continue; - } - if (thisReduceOrder == null) { - thisReduceOrder = terms.getReduceOrder(); - } else if (thisReduceOrder.equals(terms.getReduceOrder()) == false) { - return getOrder(); - } - } - return thisReduceOrder != null ? thisReduceOrder : getOrder(); - } - private long getDocCountError(A terms) { int size = terms.getBuckets().size(); if (size == 0 || size < terms.getShardSize() || isKeyOrder(terms.getOrder())) { @@ -154,47 +138,37 @@ private long getDocCountError(A terms) { * @return the order we used to reduce the buckets */ private BucketOrder reduceBuckets( - List aggregations, + List> bucketsList, + BucketOrder thisReduceOrder, AggregationReduceContext reduceContext, Consumer> sink ) { - /* - * Buckets returned by a partial reduce or a shard response are sorted by key since {@link Version#V_7_10_0}. - * That allows to perform a merge sort when reducing multiple aggregations together. - * For backward compatibility, we disable the merge sort and use ({@link #reduceLegacy} if any of - * the provided aggregations use a different {@link #reduceOrder}. - */ - BucketOrder thisReduceOrder = getReduceOrder(aggregations); if (isKeyOrder(thisReduceOrder)) { // extract the primary sort in case this is a compound order. thisReduceOrder = InternalOrder.key(isKeyAsc(thisReduceOrder)); - reduceMergeSort(aggregations, thisReduceOrder, reduceContext, sink); + reduceMergeSort(bucketsList, thisReduceOrder, reduceContext, sink); } else { - reduceLegacy(aggregations, reduceContext, sink); + reduceLegacy(bucketsList, reduceContext, sink); } return thisReduceOrder; } private void reduceMergeSort( - List aggregations, + List> bucketsList, BucketOrder thisReduceOrder, AggregationReduceContext reduceContext, Consumer> sink ) { assert isKeyOrder(thisReduceOrder); final Comparator cmp = thisReduceOrder.comparator(); - final PriorityQueue> pq = new PriorityQueue<>(aggregations.size()) { + final PriorityQueue> pq = new PriorityQueue<>(bucketsList.size()) { @Override protected boolean lessThan(IteratorAndCurrent a, IteratorAndCurrent b) { return cmp.compare(a.current(), b.current()) < 0; } }; - for (InternalAggregation aggregation : aggregations) { - @SuppressWarnings("unchecked") - A terms = (A) aggregation; - if (terms.getBuckets().isEmpty() == false) { - pq.add(new IteratorAndCurrent<>(terms.getBuckets().iterator())); - } + for (List buckets : bucketsList) { + pq.add(new IteratorAndCurrent<>(buckets.iterator())); } // list of buckets coming from different shards that have the same key List sameTermBuckets = new ArrayList<>(); @@ -228,19 +202,11 @@ protected boolean lessThan(IteratorAndCurrent a, IteratorAndCurrent b) { } } - private void reduceLegacy( - List aggregations, - AggregationReduceContext reduceContext, - Consumer> sink - ) { - Map> bucketMap = new HashMap<>(); - for (InternalAggregation aggregation : aggregations) { - @SuppressWarnings("unchecked") - A terms = (A) aggregation; - if (terms.getBuckets().isEmpty() == false) { - for (B bucket : terms.getBuckets()) { - bucketMap.computeIfAbsent(bucket.getKey(), k -> new ArrayList<>()).add(bucket); - } + private void reduceLegacy(List> bucketsList, AggregationReduceContext reduceContext, Consumer> sink) { + final Map> bucketMap = new HashMap<>(); + for (List buckets : bucketsList) { + for (B bucket : buckets) { + bucketMap.computeIfAbsent(bucket.getKey(), k -> new ArrayList<>()).add(bucket); } } for (List sameTermBuckets : bucketMap.values()) { @@ -248,21 +214,49 @@ private void reduceLegacy( } } - public InternalAggregation doReduce(List aggregations, AggregationReduceContext reduceContext) { - long sumDocCountError = 0; - long[] otherDocCount = new long[] { 0 }; - A referenceTerms = null; - for (InternalAggregation aggregation : aggregations) { + public final AggregatorReducer termsAggregationReducer(AggregationReduceContext reduceContext, int size) { + return new TermsAggregationReducer(reduceContext, size); + } + + private class TermsAggregationReducer implements AggregatorReducer { + private final List> bucketsList; + private final AggregationReduceContext reduceContext; + + private long sumDocCountError = 0; + private final long[] otherDocCount = new long[] { 0 }; + private A referenceTerms = null; + /* + * Buckets returned by a partial reduce or a shard response are sorted by key since {@link Version#V_7_10_0}. + * That allows to perform a merge sort when reducing multiple aggregations together. + * For backward compatibility, we disable the merge sort and use ({@link #reduceLegacy} if any of + * the provided aggregations use a different {@link #reduceOrder}. + */ + private BucketOrder thisReduceOrder = null; + + private TermsAggregationReducer(AggregationReduceContext reduceContext, int size) { + bucketsList = new ArrayList<>(size); + this.reduceContext = reduceContext; + } + + @Override + public void accept(InternalAggregation aggregation) { + if (aggregation.canLeadReduction() == false) { + return; + } @SuppressWarnings("unchecked") A terms = (A) aggregation; - if (referenceTerms == null && terms.canLeadReduction()) { + if (referenceTerms == null) { referenceTerms = terms; - } - if (referenceTerms != null && referenceTerms.getClass().equals(terms.getClass()) == false && terms.canLeadReduction()) { + } else if (referenceTerms.getClass().equals(terms.getClass()) == false) { // control gets into this loop when the same field name against which the query is executed // is of different types in different indices. throw AggregationErrors.reduceTypeMismatch(referenceTerms.getName(), Optional.empty()); } + if (thisReduceOrder == null) { + thisReduceOrder = terms.getReduceOrder(); + } else if (thisReduceOrder != getOrder() && thisReduceOrder.equals(terms.getReduceOrder()) == false) { + thisReduceOrder = getOrder(); + } otherDocCount[0] += terms.getSumOfOtherDocCounts(); final long thisAggDocCountError = getDocCountError(terms); if (sumDocCountError != -1) { @@ -283,52 +277,63 @@ public InternalAggregation doReduce(List aggregations, Aggr // later in this method. bucket.updateDocCountError(-thisAggDocCountError); } + if (terms.getBuckets().isEmpty() == false) { + bucketsList.add(terms.getBuckets()); + } } - BucketOrder thisReduceOrder; - List result; - if (reduceContext.isFinalReduce()) { - TopBucketBuilder top = TopBucketBuilder.build( - getRequiredSize(), - getOrder(), - removed -> otherDocCount[0] += removed.getDocCount() - ); - thisReduceOrder = reduceBuckets(aggregations, reduceContext, bucket -> { - if (bucket.getDocCount() >= getMinDocCount()) { - top.add(bucket); - } - }); - result = top.build(); - } else { - /* - * We can prune the list on partial reduce if the aggregation is ordered - * by key and not filtered on doc count. The results come in key order - * so we can just stop iteration early. - */ - boolean canPrune = isKeyOrder(getOrder()) && getMinDocCount() == 0; - result = new ArrayList<>(); - thisReduceOrder = reduceBuckets(aggregations, reduceContext, bucket -> { - if (canPrune == false || result.size() < getRequiredSize()) { - result.add(bucket.reduced()); + @Override + public InternalAggregation get() { + BucketOrder thisReduceOrder; + List result; + if (isKeyOrder(getOrder()) && getMinDocCount() <= 1) { + /* + * the aggregation is order by key and not filtered on doc count. The results come in key order + * so we can just have an optimize collection. + */ + result = new ArrayList<>(); + thisReduceOrder = reduceBuckets(bucketsList, getThisReduceOrder(), reduceContext, bucket -> { + if (result.size() < getRequiredSize()) { + result.add(bucket.reduced()); + } else { + otherDocCount[0] += bucket.getDocCount(); + } + }); + } else if (reduceContext.isFinalReduce()) { + TopBucketBuilder top = TopBucketBuilder.build( + getRequiredSize(), + getOrder(), + removed -> otherDocCount[0] += removed.getDocCount() + ); + thisReduceOrder = reduceBuckets(bucketsList, getThisReduceOrder(), reduceContext, bucket -> { + if (bucket.getDocCount() >= getMinDocCount()) { + top.add(bucket); + } + }); + result = top.build(); + } else { + result = new ArrayList<>(); + thisReduceOrder = reduceBuckets(bucketsList, getThisReduceOrder(), reduceContext, bucket -> result.add(bucket.reduced())); + } + for (B r : result) { + if (sumDocCountError == -1) { + r.setDocCountError(-1); } else { - otherDocCount[0] += bucket.getDocCount(); + r.updateDocCountError(sumDocCountError); } - }); - } - for (B r : result) { + } + long docCountError; if (sumDocCountError == -1) { - r.setDocCountError(-1); + docCountError = -1; } else { - r.updateDocCountError(sumDocCountError); + docCountError = bucketsList.size() == 1 ? 0 : sumDocCountError; } + return create(name, result, reduceContext.isFinalReduce() ? getOrder() : thisReduceOrder, docCountError, otherDocCount[0]); } - long docCountError; - if (sumDocCountError == -1) { - docCountError = -1; - } else { - docCountError = aggregations.size() == 1 ? 0 : sumDocCountError; + + private BucketOrder getThisReduceOrder() { + return thisReduceOrder == null ? getOrder() : thisReduceOrder; } - return create(name, result, reduceContext.isFinalReduce() ? getOrder() : thisReduceOrder, docCountError, otherDocCount[0]); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/DoubleTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/DoubleTerms.java index 6710dd51a3dd7..2e40ab35b21c0 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/DoubleTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/DoubleTerms.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Releasables; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.AggregatorReducer; @@ -18,7 +19,6 @@ import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; -import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Objects; @@ -190,21 +190,25 @@ protected DoubleTerms create(String name, List buckets, BucketOrder redu @Override protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceContext, int size) { return new AggregatorReducer() { - private final List aggregations = new ArrayList<>(); + private final AggregatorReducer processor = termsAggregationReducer(reduceContext, size); @Override public void accept(InternalAggregation aggregation) { if (aggregation instanceof LongTerms longTerms) { - DoubleTerms dTerms = LongTerms.convertLongTermsToDouble(longTerms, format); - aggregations.add(dTerms); + processor.accept(LongTerms.convertLongTermsToDouble(longTerms, format)); } else { - aggregations.add(aggregation); + processor.accept(aggregation); } } @Override public InternalAggregation get() { - return ((AbstractInternalTerms) aggregations.get(0)).doReduce(aggregations, reduceContext); + return processor.get(); + } + + @Override + public void close() { + Releasables.close(processor); } }; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTerms.java index b0af2c3d4e618..76f33b1c0e726 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTerms.java @@ -213,8 +213,8 @@ protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceCont } return new AggregatorReducer() { - final List aggregations = new ArrayList<>(size); - boolean isPromotedToDouble = false; + private List aggregations = new ArrayList<>(size); + private boolean isPromotedToDouble = false; @Override public void accept(InternalAggregation aggregation) { @@ -243,7 +243,16 @@ private void promoteToDouble(List aggregations) { @Override public InternalAggregation get() { - return ((AbstractInternalTerms) aggregations.get(0)).doReduce(aggregations, reduceContext); + try ( + AggregatorReducer processor = ((AbstractInternalTerms) aggregations.get(0)).termsAggregationReducer( + reduceContext, + size + ) + ) { + aggregations.forEach(processor::accept); + aggregations = null; // release memory + return processor.get(); + } } }; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTerms.java index 44bbf62c7cb19..aa3788f241079 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTerms.java @@ -14,12 +14,10 @@ import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.AggregatorReducer; import org.elasticsearch.search.aggregations.BucketOrder; -import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; -import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Objects; @@ -153,19 +151,7 @@ public StringTerms(StreamInput in) throws IOException { @Override protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceContext, int size) { - return new AggregatorReducer() { - private final List aggregations = new ArrayList<>(size); - - @Override - public void accept(InternalAggregation aggregation) { - aggregations.add(aggregation); - } - - @Override - public InternalAggregation get() { - return ((AbstractInternalTerms) aggregations.get(0)).doReduce(aggregations, reduceContext); - } - }; + return termsAggregationReducer(reduceContext, size); } @Override diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java index 3e321d57d877c..0f732d2017c74 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java @@ -439,44 +439,6 @@ public InternalMultiTerms create(List buckets) { ); } - /** - * Checks if any keys need to be promoted to double from long or unsigned_long - */ - private boolean[] needsPromotionToDouble(List aggregations) { - if (aggregations.size() < 2) { - return null; - } - boolean[] promotions = null; - - for (int i = 0; i < keyConverters.size(); i++) { - boolean hasLong = false; - boolean hasUnsignedLong = false; - boolean hasDouble = false; - boolean hasNonNumber = false; - for (InternalAggregation aggregation : aggregations) { - InternalMultiTerms agg = (InternalMultiTerms) aggregation; - KeyConverter keyConverter = agg.keyConverters.get(i); - switch (keyConverter) { - case DOUBLE -> hasDouble = true; - case LONG -> hasLong = true; - case UNSIGNED_LONG -> hasUnsignedLong = true; - default -> hasNonNumber = true; - } - } - if (hasNonNumber && (hasDouble || hasUnsignedLong || hasLong)) { - throw AggregationErrors.reduceTypeMismatch(name, Optional.of(i + 1)); - } - // Promotion to double is required if at least 2 of these 3 conditions are true. - if ((hasDouble ? 1 : 0) + (hasUnsignedLong ? 1 : 0) + (hasLong ? 1 : 0) > 1) { - if (promotions == null) { - promotions = new boolean[keyConverters.size()]; - } - promotions[i] = true; - } - } - return promotions; - } - private InternalAggregation promoteToDouble(InternalAggregation aggregation, boolean[] needsPromotion) { InternalMultiTerms multiTerms = (InternalMultiTerms) aggregation; List multiTermsBuckets = multiTerms.getBuckets(); @@ -539,33 +501,78 @@ private InternalAggregation promoteToDouble(InternalAggregation aggregation, boo ); } - public List getProcessedAggs(List aggregations, boolean[] needsPromotionToDouble) { - if (needsPromotionToDouble != null) { - List newAggs = new ArrayList<>(aggregations.size()); - for (InternalAggregation agg : aggregations) { - newAggs.add(promoteToDouble(agg, needsPromotionToDouble)); - } - return newAggs; - } else { - return aggregations; - } - } - @Override protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceContext, int size) { return new AggregatorReducer() { - final List aggregations = new ArrayList<>(size); + private List aggregations = new ArrayList<>(size); @Override public void accept(InternalAggregation aggregation) { aggregations.add(aggregation); } + private List getProcessedAggs(List aggregations, boolean[] needsPromotionToDouble) { + if (needsPromotionToDouble != null) { + aggregations.replaceAll(agg -> promoteToDouble(agg, needsPromotionToDouble)); + } + return aggregations; + } + + /** + * Checks if any keys need to be promoted to double from long or unsigned_long + */ + private boolean[] needsPromotionToDouble(List aggregations) { + if (aggregations.size() < 2) { + return null; + } + boolean[] promotions = null; + + for (int i = 0; i < keyConverters.size(); i++) { + boolean hasLong = false; + boolean hasUnsignedLong = false; + boolean hasDouble = false; + boolean hasNonNumber = false; + for (InternalAggregation aggregation : aggregations) { + InternalMultiTerms agg = (InternalMultiTerms) aggregation; + KeyConverter keyConverter = agg.keyConverters.get(i); + switch (keyConverter) { + case DOUBLE -> hasDouble = true; + case LONG -> hasLong = true; + case UNSIGNED_LONG -> hasUnsignedLong = true; + default -> hasNonNumber = true; + } + } + if (hasNonNumber && (hasDouble || hasUnsignedLong || hasLong)) { + throw AggregationErrors.reduceTypeMismatch(name, Optional.of(i + 1)); + } + // Promotion to double is required if at least 2 of these 3 conditions are true. + if ((hasDouble ? 1 : 0) + (hasUnsignedLong ? 1 : 0) + (hasLong ? 1 : 0) > 1) { + if (promotions == null) { + promotions = new boolean[keyConverters.size()]; + } + promotions[i] = true; + } + } + return promotions; + } + @Override public InternalAggregation get() { - List processed = getProcessedAggs(aggregations, needsPromotionToDouble(aggregations)); - return ((AbstractInternalTerms) processed.get(0)).doReduce(processed, reduceContext); + final boolean[] needsPromotionToDouble = needsPromotionToDouble(aggregations); + if (needsPromotionToDouble != null) { + aggregations.replaceAll(agg -> promoteToDouble(agg, needsPromotionToDouble)); + } + try ( + AggregatorReducer processor = ((AbstractInternalTerms) aggregations.get(0)).termsAggregationReducer( + reduceContext, + size + ) + ) { + aggregations.forEach(processor::accept); + aggregations = null; // release memory + return processor.get(); + } } }; } From ba983cf22c5970a940d2bf988e6052a121d22af9 Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 8 Apr 2024 15:49:25 +0100 Subject: [PATCH 083/173] Fix `TransportTasksActionTests#testFailedTasksCount` (#107190) Prior to #106733 when the `TestNodesAction` threw an exception it would immediately unregister the task: https://github.com/elastic/elasticsearch/blob/d39d1e2c249f49b8170d4f50329934d871b2b382/server/src/main/java/org/elasticsearch/transport/RequestHandlerRegistry.java#L78 However with that change the exception is caught and passed to the `TaskTransportChannel`, so unregistration happens after sending the response and may therefore not be recorded by the time the test makes its assertion. This commit fixes the test with a busy-wait. Closes #107043 --- .../node/tasks/TransportTasksActionTests.java | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java index 9ddcf8a596226..67cba13661e34 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java @@ -563,7 +563,6 @@ public void testCancellingTasksThatDontSupportCancellation() throws Exception { responseLatch.await(10, TimeUnit.SECONDS); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107043") public void testFailedTasksCount() throws Exception { Settings settings = Settings.builder().put(MockTaskManager.USE_MOCK_TASK_MANAGER_SETTING.getKey(), true).build(); setupTestNodes(settings); @@ -605,14 +604,14 @@ protected NodeResponse nodeOperation(NodeRequest request, Task task) { // Make sure that actions are still registered in the task manager on all nodes // Twice on the coordinating node and once on all other nodes. - assertEquals(4, listeners[0].getEvents().size()); - assertEquals(2, listeners[0].getRegistrationEvents().size()); - assertEquals(2, listeners[0].getUnregistrationEvents().size()); - for (int i = 1; i < listeners.length; i++) { - assertEquals(2, listeners[i].getEvents().size()); - assertEquals(1, listeners[i].getRegistrationEvents().size()); - assertEquals(1, listeners[i].getUnregistrationEvents().size()); - } + assertBusy(() -> { + assertEquals(2, listeners[0].getRegistrationEvents().size()); + assertEquals(2, listeners[0].getUnregistrationEvents().size()); + for (int i = 1; i < listeners.length; i++) { + assertEquals(1, listeners[i].getRegistrationEvents().size()); + assertEquals(1, listeners[i].getUnregistrationEvents().size()); + } + }); } private List getAllTaskDescriptions() { From 9d62deb709b1f4932dd92ae6999902af939daeb1 Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Mon, 8 Apr 2024 17:15:10 +0200 Subject: [PATCH 084/173] ES|QL: Make more tests deterministic (#107217) Fixes https://github.com/elastic/elasticsearch/issues/105540 Fixes https://github.com/elastic/elasticsearch/issues/103866 Making a few more ES|QL CSV tests deterministic, especially those that involve `AVG()` (mostly failures like `expected:<27517.27973714994[7]> but was:<27517.27973714994[4]>` due to double precision in distributed execution) --- .../esql/qa/testFixtures/src/main/resources/keep.csv-spec | 6 +++--- .../esql/qa/testFixtures/src/main/resources/stats.csv-spec | 7 ++++--- .../qa/testFixtures/src/main/resources/string.csv-spec | 2 +- 3 files changed, 8 insertions(+), 7 deletions(-) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/keep.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/keep.csv-spec index facf06eb6a960..14a3807b8729c 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/keep.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/keep.csv-spec @@ -280,10 +280,10 @@ avg_salary:double | x:double ; averageOfEvalValue -from employees | eval ratio = salary / height | stats avg(ratio); +from employees | eval ratio = salary / height | stats avg = avg(ratio) | eval avg = round(avg, 8); -avg(ratio):double -27517.279737149947 +avg:double +27517.27973715 ; simpleWhere diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index 6ccaf1eb0b6e7..fb2d46baf27ff 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -137,6 +137,7 @@ avgOfDouble FROM employees | STATS AVG(height) // end::avg[] +| EVAL `AVG(height)` = ROUND(`AVG(height)`, 5) ; // tag::avg-result[] @@ -159,7 +160,7 @@ h:double 1.76818359375 ; avgOfScaledFloat -from employees | stats h = avg(height.scaled_float); +from employees | stats h = avg(height.scaled_float) | eval h = round(h, 4); h:double 1.7682 @@ -1025,13 +1026,13 @@ c:long | cd:long docsStatsAvgNestedExpression#[skip:-8.12.99,reason:supported in 8.13+] // tag::docsStatsAvgNestedExpression[] FROM employees -| STATS avg_salary_change = AVG(MV_AVG(salary_change)) +| STATS avg_salary_change = ROUND(AVG(MV_AVG(salary_change)), 10) // end::docsStatsAvgNestedExpression[] ; // tag::docsStatsAvgNestedExpression-result[] avg_salary_change:double -1.3904535864978902 +1.3904535865 // end::docsStatsAvgNestedExpression-result[] ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec index 69638ef459805..5a81a05cee143 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec @@ -799,7 +799,7 @@ emp_no:integer | full_name:keyword | full_name_2:keyword | job_positions:keyword ; showTextFields -from hosts | where host == "beta" | keep host, host_group, description; +from hosts | sort description, card, ip0, ip1 | where host == "beta" | keep host, host_group, description; ignoreOrder:true host:keyword | host_group:text | description:text From ef7e50c97df05d9e013c388bc1dc88c5d5b3cf28 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Mon, 8 Apr 2024 17:23:49 +0200 Subject: [PATCH 085/173] Fix native initialization for ESTestCase (#107203) This tweaks the idea specific setup for multi java version projects to only ignore those in idea when importing and syncing the project so we keep those sourcesets not imported to idea by default but picked up when triggering (gradle) test execution (where idea.active = true) --- .../java/org/elasticsearch/gradle/internal/MrjarPlugin.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java index c64bd3cc9c068..16c286bfdd3f2 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java @@ -62,13 +62,13 @@ public class MrjarPlugin implements Plugin { public void apply(Project project) { project.getPluginManager().apply(ElasticsearchJavaBasePlugin.class); var javaExtension = project.getExtensions().getByType(JavaPluginExtension.class); - var isIdea = System.getProperty("idea.active", "false").equals("true"); + var isIdeaSync = System.getProperty("idea.sync.active", "false").equals("true"); var ideaSourceSetsEnabled = project.hasProperty(MRJAR_IDEA_ENABLED) && project.property(MRJAR_IDEA_ENABLED).equals("true"); // Ignore version-specific source sets if we are importing into IntelliJ and have not explicitly enabled this. // Avoids an IntelliJ bug: // https://youtrack.jetbrains.com/issue/IDEA-285640/Compiler-Options-Settings-language-level-is-set-incorrectly-with-JDK-19ea - if (isIdea == false || ideaSourceSetsEnabled) { + if (isIdeaSync == false || ideaSourceSetsEnabled) { List mainVersions = findSourceVersions(project); List mainSourceSets = new ArrayList<>(); mainSourceSets.add(SourceSet.MAIN_SOURCE_SET_NAME); From 00aee781a2dfcf5f8fdbcb1419ed4747b88cbf36 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Mon, 8 Apr 2024 11:45:22 -0400 Subject: [PATCH 086/173] Forward port release notes for v8.13.1 (#107002) --- .../reference/migration/migrate_8_13.asciidoc | 17 ++++--- docs/reference/release-notes.asciidoc | 2 + docs/reference/release-notes/8.13.1.asciidoc | 33 +++++++++++++ .../release-notes/highlights.asciidoc | 48 +++++-------------- 4 files changed, 56 insertions(+), 44 deletions(-) create mode 100644 docs/reference/release-notes/8.13.1.asciidoc diff --git a/docs/reference/migration/migrate_8_13.asciidoc b/docs/reference/migration/migrate_8_13.asciidoc index c9e726d940b1d..dca10671e57bc 100644 --- a/docs/reference/migration/migrate_8_13.asciidoc +++ b/docs/reference/migration/migrate_8_13.asciidoc @@ -16,14 +16,17 @@ coming::[8.13.0] [[breaking-changes-8.13]] === Breaking changes -The following changes in {es} 8.13 might affect your applications -and prevent them from operating normally. -Before upgrading to 8.13, review these changes and take the described steps -to mitigate the impact. +There are no breaking changes in 8.13. - -There are no notable breaking changes in {es} 8.13. -But there are some less critical breaking changes. +[discrete] +[[migrate-notable-changes-8.13]] +=== Notable changes +The following are notable, non-breaking updates to be aware of: + +* Changes to features that are in Technical Preview. +* Changes to log formats. +* Changes to non-public APIs. +* Behaviour changes that repair critical bugs. [discrete] [[breaking_813_index_setting_changes]] diff --git a/docs/reference/release-notes.asciidoc b/docs/reference/release-notes.asciidoc index e3c8da281f2a1..f9da92aef925e 100644 --- a/docs/reference/release-notes.asciidoc +++ b/docs/reference/release-notes.asciidoc @@ -7,6 +7,7 @@ This section summarizes the changes in each release. * <> +* <> * <> * <> * <> @@ -63,6 +64,7 @@ This section summarizes the changes in each release. -- include::release-notes/8.14.0.asciidoc[] +include::release-notes/8.13.1.asciidoc[] include::release-notes/8.13.0.asciidoc[] include::release-notes/8.12.2.asciidoc[] include::release-notes/8.12.1.asciidoc[] diff --git a/docs/reference/release-notes/8.13.1.asciidoc b/docs/reference/release-notes/8.13.1.asciidoc new file mode 100644 index 0000000000000..9f5f34d27eb79 --- /dev/null +++ b/docs/reference/release-notes/8.13.1.asciidoc @@ -0,0 +1,33 @@ +[[release-notes-8.13.1]] +== {es} version 8.13.1 + +Also see <>. + +[[bug-8.13.1]] +[float] +=== Bug fixes + +Aggregations:: +* Add test to exercise reduction of terms aggregation order by key {es-pull}106799[#106799] + +Downsampling:: +* Gate reading of optional string array for bwc {es-pull}106878[#106878] + +Machine Learning:: +* Fix Array out of bounds exception in the XLM Roberta tokenizer {es-pull}106655[#106655] + +Search:: +* Fix concurrency bug in `AbstractStringScriptFieldAutomatonQuery` {es-pull}106678[#106678] (issue: {es-issue}105911[#105911]) +* Fix the background set of significant terms aggregations in case the data is in different shards than the foreground set {es-pull}106564[#106564] + +Transform:: +* Fail checkpoint on missing clusters {es-pull}106793[#106793] (issues: {es-issue}104533[#104533], {es-issue}106790[#106790]) + +[[enhancement-8.13.1]] +[float] +=== Enhancements + +Transform:: +* Raise loglevel of events related to transform lifecycle from DEBUG to INFO {es-pull}106602[#106602] + + diff --git a/docs/reference/release-notes/highlights.asciidoc b/docs/reference/release-notes/highlights.asciidoc index 25096779521e4..8d9d743a239f5 100644 --- a/docs/reference/release-notes/highlights.asciidoc +++ b/docs/reference/release-notes/highlights.asciidoc @@ -31,46 +31,20 @@ endif::[] // tag::notable-highlights[] [discrete] -[[improve_storage_efficiency_for_non_metric_fields_in_tsdb]] -=== Improve storage efficiency for non-metric fields in TSDB -Adds a new `doc_values` encoding for non-metric fields in TSDB that takes advantage of TSDB's index sorting. -While terms that are used in multiple documents (such as the host name) are already stored only once in the terms dictionary, -there are a lot of repetitions in the references to the terms dictionary that are stored in `doc_values` (ordinals). -In TSDB, documents (and therefore `doc_values`) are implicitly sorted by dimenstions and timestamp. -This means that for each time series, we are storing long consecutive runs of the same ordinal. -With this change, we are introducing an encoding that detects and efficiently stores runs of the same value (such as `1 1 1 2 2 2 …`), -and runs of cycling values (such as `1 2 1 2 …`). -In our testing, we have seen a reduction in storage size by about 13%. -The effectiveness of this encoding depends on how many non-metric fields, such as dimensions, are used. -The more non-metric fields, the more effective this improvement will be. +[[add_global_retention_in_data_stream_lifecycle]] +=== Add global retention in data stream lifecycle +Data stream lifecycle now supports configuring retention on a cluster level, namely global retention. Global retention +allows us to configure two different retentions: -{es-pull}99747[#99747] +- `default_retention` is applied to all data streams managed by the data stream lifecycle that do not have retention +defined on the data stream level. +- `max_retention` is applied to all data streams managed by the data stream lifecycle and it allows any data stream +data to be deleted after the `max_retention` has passed. -[discrete] -[[ga_release_of_synonyms_api]] -=== GA Release of Synonyms API -Removes the beta label for the Synonyms API to make it GA. - -{es-pull}103223[#103223] - -[discrete] -[[flag_in_field_caps_to_return_only_fields_with_values_in_index]] -=== Flag in `_field_caps` to return only fields with values in index -We added support for filtering the field capabilities API output by removing -fields that don't have a value. This can be done through the newly added -`include_empty_fields` parameter, which defaults to true. - -{es-pull}103651[#103651] - -[discrete] -[[new_lucene_9_10_release]] -=== New Lucene 9.10 release -- https://github.com/apache/lucene/pull/13090: Prevent humongous allocations in ScalarQuantizer when building quantiles. -- https://github.com/apache/lucene/pull/12962: Speedup concurrent multi-segment HNSW graph search -- https://github.com/apache/lucene/pull/13033: Range queries on numeric/date/ip fields now exit earlier on segments whose values don't intersect with the query range. This should especially help when there are other required clauses in the `bool` query and when the range filter is narrow, e.g. filtering on the last 5 minutes. -- https://github.com/apache/lucene/pull/13026: `bool` queries that mix `filter` and `should` clauses will now propagate minimum competitive scores through the `should` clauses. This should yield speedups when sorting by descending score. +Furthermore, we introduce the term `effective_retention` which is the retention applied at a certain moment to a data +stream considering all the available retention configurations. -{es-pull}105578[#105578] +{es-pull}105682[#105682] // end::notable-highlights[] From ab52ef1f06af1b08f5808ad165f062dc0dbdff7e Mon Sep 17 00:00:00 2001 From: Felix Barnsteiner Date: Mon, 8 Apr 2024 17:55:41 +0200 Subject: [PATCH 087/173] Fix merging component templates with a mix of dotted and nested object mapper definitions (#106077) Co-authored-by: Andrei Dan --- docs/changelog/106077.yaml | 7 ++ .../RankFeatureMetaFieldMapperTests.java | 6 +- .../PercolatorFieldMapperTests.java | 13 +-- .../metadata/MetadataMappingService.java | 16 +--- .../index/mapper/DocumentParserContext.java | 10 ++- .../index/mapper/MapperBuilderContext.java | 25 +++++- .../index/mapper/MapperMergeContext.java | 13 ++- .../index/mapper/MapperService.java | 21 ++--- .../elasticsearch/index/mapper/Mapping.java | 8 +- .../index/mapper/MappingParser.java | 12 ++- .../index/mapper/NestedObjectMapper.java | 20 ++--- .../index/mapper/ObjectMapper.java | 41 +++------ .../index/mapper/PassThroughObjectMapper.java | 10 ++- .../index/mapper/RootObjectMapper.java | 10 +-- .../index/mapper/DocumentMapperTests.java | 6 +- .../mapper/MapperBuilderContextTests.java | 30 +++++++ .../index/mapper/MapperMergeContextTests.java | 6 ++ .../index/mapper/MapperServiceTests.java | 87 +++++++++++++++++++ .../index/mapper/NestedObjectMapperTests.java | 3 +- .../index/mapper/ObjectMapperMergeTests.java | 16 +--- .../index/mapper/ObjectMapperTests.java | 23 +++-- .../index/similarity/SimilarityTests.java | 3 +- .../index/mapper/MetadataMapperTestCase.java | 9 +- 23 files changed, 276 insertions(+), 119 deletions(-) create mode 100644 docs/changelog/106077.yaml create mode 100644 server/src/test/java/org/elasticsearch/index/mapper/MapperBuilderContextTests.java diff --git a/docs/changelog/106077.yaml b/docs/changelog/106077.yaml new file mode 100644 index 0000000000000..eb987cd9617f8 --- /dev/null +++ b/docs/changelog/106077.yaml @@ -0,0 +1,7 @@ +pr: 106077 +summary: Fix merging component templates with a mix of dotted and nested object mapper + definitions +area: Mapping +type: bug +issues: + - 105482 diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/RankFeatureMetaFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/RankFeatureMetaFieldMapperTests.java index b9ca544e7532d..9f559c8f55858 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/RankFeatureMetaFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/RankFeatureMetaFieldMapperTests.java @@ -49,7 +49,11 @@ public void testBasics() throws Exception { .endObject() ); - Mapping parsedMapping = createMapperService(mapping).parseMapping("type", new CompressedXContent(mapping)); + Mapping parsedMapping = createMapperService(mapping).parseMapping( + "type", + MapperService.MergeReason.MAPPING_UPDATE, + new CompressedXContent(mapping) + ); assertEquals(mapping, parsedMapping.toCompressedXContent().toString()); assertNotNull(parsedMapping.getMetadataMapperByClass(RankFeatureMetaFieldMapper.class)); } diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java index 46b9e365fd0ea..4adc7f9b5ba27 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java @@ -52,6 +52,7 @@ import org.elasticsearch.index.mapper.LuceneDocument; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.mapper.TestDocumentParserContext; @@ -206,7 +207,7 @@ public void init() throws Exception { .endObject() .endObject() ); - mapperService.merge("doc", new CompressedXContent(mapper), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge("doc", new CompressedXContent(mapper), MergeReason.MAPPING_UPDATE); } private void addQueryFieldMappings() throws Exception { @@ -223,7 +224,7 @@ private void addQueryFieldMappings() throws Exception { .endObject() .endObject() ); - mapperService.merge("doc", new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge("doc", new CompressedXContent(percolatorMapper), MergeReason.MAPPING_UPDATE); fieldType = (PercolatorFieldMapper.PercolatorFieldType) mapperService.fieldType(fieldName); } @@ -699,7 +700,7 @@ public void testAllowNoAdditionalSettings() throws Exception { MapperParsingException e = expectThrows( MapperParsingException.class, () -> indexServiceWithoutSettings.mapperService() - .merge("doc", new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE) + .merge("doc", new CompressedXContent(percolatorMapper), MergeReason.MAPPING_UPDATE) ); assertThat(e.getMessage(), containsString("Mapping definition for [" + fieldName + "] has unsupported parameters: [index : no]")); } @@ -722,7 +723,7 @@ public void testMultiplePercolatorFields() throws Exception { .endObject() .endObject() ); - mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MergeReason.MAPPING_UPDATE); QueryBuilder queryBuilder = matchQuery("field", "value"); ParsedDocument doc = mapperService.documentMapper() @@ -763,7 +764,7 @@ public void testNestedPercolatorField() throws Exception { .endObject() .endObject() ); - mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MergeReason.MAPPING_UPDATE); QueryBuilder queryBuilder = matchQuery("field", "value"); ParsedDocument doc = mapperService.documentMapper() @@ -912,7 +913,7 @@ public void testEmptyName() throws Exception { ); MapperParsingException e = expectThrows( MapperParsingException.class, - () -> mapperService.parseMapping("type1", new CompressedXContent(mapping)) + () -> mapperService.parseMapping("type1", MergeReason.MAPPING_UPDATE, new CompressedXContent(mapping)) ); assertThat(e.getMessage(), containsString("field name cannot be an empty string")); } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java index 7e2c0849a6fad..3ca206eaddb28 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java @@ -133,6 +133,7 @@ private static ClusterState applyRequest( final CompressedXContent mappingUpdateSource = request.source(); final Metadata metadata = currentState.metadata(); final List updateList = new ArrayList<>(); + MergeReason reason = request.autoUpdate() ? MergeReason.MAPPING_AUTO_UPDATE : MergeReason.MAPPING_UPDATE; for (Index index : request.indices()) { MapperService mapperService = indexMapperServices.get(index); // IMPORTANT: always get the metadata from the state since it get's batched @@ -147,13 +148,8 @@ private static ClusterState applyRequest( updateList.add(indexMetadata); // try and parse it (no need to add it here) so we can bail early in case of parsing exception // first, simulate: just call merge and ignore the result - Mapping mapping = mapperService.parseMapping(MapperService.SINGLE_MAPPING_NAME, mappingUpdateSource); - MapperService.mergeMappings( - mapperService.documentMapper(), - mapping, - request.autoUpdate() ? MergeReason.MAPPING_AUTO_UPDATE : MergeReason.MAPPING_UPDATE, - mapperService.getIndexSettings() - ); + Mapping mapping = mapperService.parseMapping(MapperService.SINGLE_MAPPING_NAME, reason, mappingUpdateSource); + MapperService.mergeMappings(mapperService.documentMapper(), mapping, reason, mapperService.getIndexSettings()); } Metadata.Builder builder = Metadata.builder(metadata); boolean updated = false; @@ -169,11 +165,7 @@ private static ClusterState applyRequest( if (existingMapper != null) { existingSource = existingMapper.mappingSource(); } - DocumentMapper mergedMapper = mapperService.merge( - MapperService.SINGLE_MAPPING_NAME, - mappingUpdateSource, - request.autoUpdate() ? MergeReason.MAPPING_AUTO_UPDATE : MergeReason.MAPPING_UPDATE - ); + DocumentMapper mergedMapper = mapperService.merge(MapperService.SINGLE_MAPPING_NAME, mappingUpdateSource, reason); CompressedXContent updatedSource = mergedMapper.mappingSource(); if (existingSource != null) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java index 92aa8662eaf9d..a42477bed2146 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java @@ -15,6 +15,7 @@ import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.IndexAnalyzers; +import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.xcontent.FilterXContentParserWrapper; import org.elasticsearch.xcontent.FlatteningXContentParser; import org.elasticsearch.xcontent.XContentParser; @@ -618,7 +619,14 @@ public final MapperBuilderContext createDynamicMapperBuilderContext() { if (objectMapper instanceof PassThroughObjectMapper passThroughObjectMapper) { containsDimensions = passThroughObjectMapper.containsDimensions(); } - return new MapperBuilderContext(p, mappingLookup().isSourceSynthetic(), false, containsDimensions, dynamic); + return new MapperBuilderContext( + p, + mappingLookup().isSourceSynthetic(), + false, + containsDimensions, + dynamic, + MergeReason.MAPPING_UPDATE + ); } public abstract XContentParser parser(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperBuilderContext.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperBuilderContext.java index bbfb9298c23ca..15caa7f5a6238 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperBuilderContext.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperBuilderContext.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.mapper.MapperService.MergeReason; import java.util.Objects; @@ -22,7 +23,11 @@ public class MapperBuilderContext { * The root context, to be used when building a tree of mappers */ public static MapperBuilderContext root(boolean isSourceSynthetic, boolean isDataStream) { - return new MapperBuilderContext(null, isSourceSynthetic, isDataStream, false, ObjectMapper.Defaults.DYNAMIC); + return root(isSourceSynthetic, isDataStream, MergeReason.MAPPING_UPDATE); + } + + public static MapperBuilderContext root(boolean isSourceSynthetic, boolean isDataStream, MergeReason mergeReason) { + return new MapperBuilderContext(null, isSourceSynthetic, isDataStream, false, ObjectMapper.Defaults.DYNAMIC, mergeReason); } private final String path; @@ -30,9 +35,10 @@ public static MapperBuilderContext root(boolean isSourceSynthetic, boolean isDat private final boolean isDataStream; private final boolean parentObjectContainsDimensions; private final ObjectMapper.Dynamic dynamic; + private final MergeReason mergeReason; MapperBuilderContext(String path) { - this(path, false, false, false, ObjectMapper.Defaults.DYNAMIC); + this(path, false, false, false, ObjectMapper.Defaults.DYNAMIC, MergeReason.MAPPING_UPDATE); } MapperBuilderContext( @@ -40,7 +46,8 @@ public static MapperBuilderContext root(boolean isSourceSynthetic, boolean isDat boolean isSourceSynthetic, boolean isDataStream, boolean parentObjectContainsDimensions, - ObjectMapper.Dynamic dynamic + ObjectMapper.Dynamic dynamic, + MergeReason mergeReason ) { Objects.requireNonNull(dynamic, "dynamic must not be null"); this.path = path; @@ -48,6 +55,7 @@ public static MapperBuilderContext root(boolean isSourceSynthetic, boolean isDat this.isDataStream = isDataStream; this.parentObjectContainsDimensions = parentObjectContainsDimensions; this.dynamic = dynamic; + this.mergeReason = mergeReason; } /** @@ -79,7 +87,8 @@ public MapperBuilderContext createChildContext( this.isSourceSynthetic, this.isDataStream, parentObjectContainsDimensions, - getDynamic(dynamic) + getDynamic(dynamic), + this.mergeReason ); } @@ -121,4 +130,12 @@ public boolean parentObjectContainsDimensions() { public ObjectMapper.Dynamic getDynamic() { return dynamic; } + + /** + * The merge reason to use when merging mappers while building the mapper. + * See also {@link ObjectMapper.Builder#buildMappers(MapperBuilderContext)}. + */ + public MergeReason getMergeReason() { + return mergeReason; + } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperMergeContext.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperMergeContext.java index 8f8854ad47c7d..1e3f69baf86dd 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperMergeContext.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperMergeContext.java @@ -8,6 +8,8 @@ package org.elasticsearch.index.mapper; +import org.elasticsearch.index.mapper.MapperService.MergeReason; + /** * Holds context used when merging mappings. * As the merge process also involves building merged {@link Mapper.Builder}s, @@ -23,11 +25,18 @@ private MapperMergeContext(MapperBuilderContext mapperBuilderContext, NewFieldsB this.newFieldsBudget = newFieldsBudget; } + static MapperMergeContext root(boolean isSourceSynthetic, boolean isDataStream, long newFieldsBudget) { + return root(isSourceSynthetic, isDataStream, MergeReason.MAPPING_UPDATE, newFieldsBudget); + } + /** * The root context, to be used when merging a tree of mappers */ - public static MapperMergeContext root(boolean isSourceSynthetic, boolean isDataStream, long newFieldsBudget) { - return new MapperMergeContext(MapperBuilderContext.root(isSourceSynthetic, isDataStream), NewFieldsBudget.of(newFieldsBudget)); + public static MapperMergeContext root(boolean isSourceSynthetic, boolean isDataStream, MergeReason mergeReason, long newFieldsBudget) { + return new MapperMergeContext( + MapperBuilderContext.root(isSourceSynthetic, isDataStream, mergeReason), + NewFieldsBudget.of(newFieldsBudget) + ); } /** diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java index 4646936b8891f..f91c4f176c6da 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -310,7 +310,7 @@ public void updateMapping(final IndexMetadata currentIndexMetadata, final IndexM if (newMappingMetadata != null) { String type = newMappingMetadata.type(); CompressedXContent incomingMappingSource = newMappingMetadata.source(); - Mapping incomingMapping = parseMapping(type, incomingMappingSource); + Mapping incomingMapping = parseMapping(type, MergeReason.MAPPING_UPDATE, incomingMappingSource); DocumentMapper previousMapper; synchronized (this) { previousMapper = this.mapper; @@ -366,7 +366,7 @@ boolean assertNoUpdateRequired(final IndexMetadata newIndexMetadata) { // that the incoming mappings are the same as the current ones: we need to // parse the incoming mappings into a DocumentMapper and check that its // serialization is the same as the existing mapper - Mapping newMapping = parseMapping(mapping.type(), mapping.source()); + Mapping newMapping = parseMapping(mapping.type(), MergeReason.MAPPING_UPDATE, mapping.source()); final CompressedXContent currentSource = this.mapper.mappingSource(); final CompressedXContent newSource = newMapping.toCompressedXContent(); if (Objects.equals(currentSource, newSource) == false @@ -533,7 +533,7 @@ public DocumentMapper merge(String type, CompressedXContent mappingSource, Merge } private synchronized DocumentMapper doMerge(String type, MergeReason reason, Map mappingSourceAsMap) { - Mapping incomingMapping = parseMapping(type, mappingSourceAsMap); + Mapping incomingMapping = parseMapping(type, reason, mappingSourceAsMap); Mapping mapping = mergeMappings(this.mapper, incomingMapping, reason, this.indexSettings); // TODO: In many cases the source here is equal to mappingSource so we need not serialize again. // We should identify these cases reliably and save expensive serialization here @@ -542,7 +542,7 @@ private synchronized DocumentMapper doMerge(String type, MergeReason reason, Map return newMapper; } this.mapper = newMapper; - assert assertSerialization(newMapper); + assert assertSerialization(newMapper, reason); return newMapper; } @@ -552,9 +552,9 @@ private DocumentMapper newDocumentMapper(Mapping mapping, MergeReason reason, Co return newMapper; } - public Mapping parseMapping(String mappingType, CompressedXContent mappingSource) { + public Mapping parseMapping(String mappingType, MergeReason reason, CompressedXContent mappingSource) { try { - return mappingParser.parse(mappingType, mappingSource); + return mappingParser.parse(mappingType, reason, mappingSource); } catch (Exception e) { throw new MapperParsingException("Failed to parse mapping: {}", e, e.getMessage()); } @@ -564,12 +564,13 @@ public Mapping parseMapping(String mappingType, CompressedXContent mappingSource * A method to parse mapping from a source in a map form. * * @param mappingType the mapping type + * @param reason the merge reason to use when merging mappers while building the mapper * @param mappingSource mapping source already converted to a map form, but not yet processed otherwise * @return a parsed mapping */ - public Mapping parseMapping(String mappingType, Map mappingSource) { + public Mapping parseMapping(String mappingType, MergeReason reason, Map mappingSource) { try { - return mappingParser.parse(mappingType, mappingSource); + return mappingParser.parse(mappingType, reason, mappingSource); } catch (Exception e) { throw new MapperParsingException("Failed to parse mapping: {}", e, e.getMessage()); } @@ -619,10 +620,10 @@ static Mapping mergeMappings(DocumentMapper currentMapper, Mapping incomingMappi return newMapping; } - private boolean assertSerialization(DocumentMapper mapper) { + private boolean assertSerialization(DocumentMapper mapper, MergeReason reason) { // capture the source now, it may change due to concurrent parsing final CompressedXContent mappingSource = mapper.mappingSource(); - Mapping newMapping = parseMapping(mapper.type(), mappingSource); + Mapping newMapping = parseMapping(mapper.type(), reason, mappingSource); if (newMapping.toCompressedXContent().equals(mappingSource) == false) { throw new AssertionError( "Mapping serialization result is different from source. \n--> Source [" diff --git a/server/src/main/java/org/elasticsearch/index/mapper/Mapping.java b/server/src/main/java/org/elasticsearch/index/mapper/Mapping.java index 903e4e5da5b29..b5de3971fa091 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/Mapping.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/Mapping.java @@ -137,8 +137,8 @@ public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() { * @return the resulting merged mapping. */ Mapping merge(Mapping mergeWith, MergeReason reason, long newFieldsBudget) { - MapperMergeContext mergeContext = MapperMergeContext.root(isSourceSynthetic(), false, newFieldsBudget); - RootObjectMapper mergedRoot = root.merge(mergeWith.root, reason, mergeContext); + MapperMergeContext mergeContext = MapperMergeContext.root(isSourceSynthetic(), false, reason, newFieldsBudget); + RootObjectMapper mergedRoot = root.merge(mergeWith.root, mergeContext); // When merging metadata fields as part of applying an index template, new field definitions // completely overwrite existing ones instead of being merged. This behavior matches how we @@ -176,11 +176,11 @@ Mapping merge(Mapping mergeWith, MergeReason reason, long newFieldsBudget) { * @param fieldsBudget the maximum number of fields this mapping may have */ public Mapping withFieldsBudget(long fieldsBudget) { - MapperMergeContext mergeContext = MapperMergeContext.root(isSourceSynthetic(), false, fieldsBudget); + MapperMergeContext mergeContext = MapperMergeContext.root(isSourceSynthetic(), false, MergeReason.MAPPING_RECOVERY, fieldsBudget); // get a copy of the root mapper, without any fields RootObjectMapper shallowRoot = root.withoutMappers(); // calling merge on the shallow root to ensure we're only adding as many fields as allowed by the fields budget - return new Mapping(shallowRoot.merge(root, MergeReason.MAPPING_RECOVERY, mergeContext), metadataMappers, meta); + return new Mapping(shallowRoot.merge(root, mergeContext), metadataMappers, meta); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappingParser.java b/server/src/main/java/org/elasticsearch/index/mapper/MappingParser.java index 8b30915ca4d3c..86d8c1686858c 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappingParser.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappingParser.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.IndexMode; +import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.xcontent.XContentType; import java.util.Collections; @@ -79,20 +80,25 @@ static Map convertToMap(CompressedXContent source) { } Mapping parse(@Nullable String type, CompressedXContent source) throws MapperParsingException { + return parse(type, MergeReason.MAPPING_UPDATE, source); + } + + Mapping parse(@Nullable String type, MergeReason reason, CompressedXContent source) throws MapperParsingException { Map mapping = convertToMap(source); - return parse(type, mapping); + return parse(type, reason, mapping); } /** * A method to parse mapping from a source in a map form. * * @param type the mapping type + * @param reason the merge reason to use when merging mappers while building the mapper * @param mappingSource mapping source already converted to a map form, but not yet processed otherwise * @return a parsed mapping * @throws MapperParsingException in case of parsing error */ @SuppressWarnings("unchecked") - Mapping parse(@Nullable String type, Map mappingSource) throws MapperParsingException { + Mapping parse(@Nullable String type, MergeReason reason, Map mappingSource) throws MapperParsingException { if (mappingSource.isEmpty()) { if (type == null) { throw new MapperParsingException("malformed mapping, no type name found"); @@ -178,7 +184,7 @@ Mapping parse(@Nullable String type, Map mappingSource) throws M } return new Mapping( - rootObjectMapper.build(MapperBuilderContext.root(isSourceSynthetic, isDataStream)), + rootObjectMapper.build(MapperBuilderContext.root(isSourceSynthetic, isDataStream, reason)), metadataMappers.values().toArray(new MetadataFieldMapper[0]), meta ); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java index f07d69d86f36c..5c2880a4bf760 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java @@ -65,7 +65,8 @@ public NestedObjectMapper build(MapperBuilderContext context) { NestedMapperBuilderContext nestedContext = new NestedMapperBuilderContext( context.buildFullName(name()), parentIncludedInRoot, - context.getDynamic(dynamic) + context.getDynamic(dynamic), + context.getMergeReason() ); final String fullPath = context.buildFullName(name()); final String nestedTypePath; @@ -121,14 +122,14 @@ private static class NestedMapperBuilderContext extends MapperBuilderContext { final boolean parentIncludedInRoot; - NestedMapperBuilderContext(String path, boolean parentIncludedInRoot, Dynamic dynamic) { - super(path, false, false, false, dynamic); + NestedMapperBuilderContext(String path, boolean parentIncludedInRoot, Dynamic dynamic, MapperService.MergeReason mergeReason) { + super(path, false, false, false, dynamic, mergeReason); this.parentIncludedInRoot = parentIncludedInRoot; } @Override public MapperBuilderContext createChildContext(String name, Dynamic dynamic) { - return new NestedMapperBuilderContext(buildFullName(name), parentIncludedInRoot, getDynamic(dynamic)); + return new NestedMapperBuilderContext(buildFullName(name), parentIncludedInRoot, getDynamic(dynamic), getMergeReason()); } } @@ -226,16 +227,14 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } @Override - public ObjectMapper merge(Mapper mergeWith, MapperService.MergeReason reason, MapperMergeContext parentMergeContext) { + public ObjectMapper merge(Mapper mergeWith, MapperMergeContext parentMergeContext) { if ((mergeWith instanceof NestedObjectMapper) == false) { MapperErrors.throwNestedMappingConflictError(mergeWith.name()); } NestedObjectMapper mergeWithObject = (NestedObjectMapper) mergeWith; - return merge(mergeWithObject, reason, parentMergeContext); - } - ObjectMapper merge(NestedObjectMapper mergeWithObject, MapperService.MergeReason reason, MapperMergeContext parentMergeContext) { - var mergeResult = MergeResult.build(this, mergeWithObject, reason, parentMergeContext); + final MapperService.MergeReason reason = parentMergeContext.getMapperBuilderContext().getMergeReason(); + var mergeResult = MergeResult.build(this, mergeWithObject, parentMergeContext); Explicit incInParent = this.includeInParent; Explicit incInRoot = this.includeInRoot; if (reason == MapperService.MergeReason.INDEX_TEMPLATE) { @@ -287,7 +286,8 @@ protected MapperMergeContext createChildContext(MapperMergeContext mapperMergeCo new NestedMapperBuilderContext( mapperBuilderContext.buildFullName(name), parentIncludedInRoot, - mapperBuilderContext.getDynamic(dynamic) + mapperBuilderContext.getDynamic(dynamic), + mapperBuilderContext.getMergeReason() ) ); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java index 33e736ff122a1..ba396e9a72d30 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java @@ -454,11 +454,6 @@ public final boolean subobjects() { return subobjects.value(); } - @Override - public ObjectMapper merge(Mapper mergeWith, MapperMergeContext mapperMergeContext) { - return merge(mergeWith, MergeReason.MAPPING_UPDATE, mapperMergeContext); - } - @Override public void validate(MappingLookup mappers) { for (Mapper mapper : this.mappers.values()) { @@ -470,7 +465,8 @@ protected MapperMergeContext createChildContext(MapperMergeContext mapperMergeCo return mapperMergeContext.createChildContext(name, dynamic); } - public ObjectMapper merge(Mapper mergeWith, MergeReason reason, MapperMergeContext parentMergeContext) { + @Override + public ObjectMapper merge(Mapper mergeWith, MapperMergeContext parentMergeContext) { if (mergeWith instanceof ObjectMapper == false) { MapperErrors.throwObjectMappingConflictError(mergeWith.name()); } @@ -478,11 +474,7 @@ public ObjectMapper merge(Mapper mergeWith, MergeReason reason, MapperMergeConte // TODO stop NestedObjectMapper extending ObjectMapper? MapperErrors.throwNestedMappingConflictError(mergeWith.name()); } - return merge((ObjectMapper) mergeWith, reason, parentMergeContext); - } - - ObjectMapper merge(ObjectMapper mergeWith, MergeReason reason, MapperMergeContext parentMergeContext) { - var mergeResult = MergeResult.build(this, mergeWith, reason, parentMergeContext); + var mergeResult = MergeResult.build(this, (ObjectMapper) mergeWith, parentMergeContext); return new ObjectMapper( simpleName(), fullPath, @@ -499,13 +491,9 @@ protected record MergeResult( ObjectMapper.Dynamic dynamic, Map mappers ) { - static MergeResult build( - ObjectMapper existing, - ObjectMapper mergeWithObject, - MergeReason reason, - MapperMergeContext parentMergeContext - ) { + static MergeResult build(ObjectMapper existing, ObjectMapper mergeWithObject, MapperMergeContext parentMergeContext) { final Explicit enabled; + final MergeReason reason = parentMergeContext.getMapperBuilderContext().getMergeReason(); if (mergeWithObject.enabled.explicit()) { if (reason == MergeReason.INDEX_TEMPLATE) { enabled = mergeWithObject.enabled; @@ -532,13 +520,7 @@ static MergeResult build( subObjects = existing.subobjects; } MapperMergeContext objectMergeContext = existing.createChildContext(parentMergeContext, existing.simpleName()); - Map mergedMappers = buildMergedMappers( - existing, - mergeWithObject, - reason, - objectMergeContext, - subObjects.value() - ); + Map mergedMappers = buildMergedMappers(existing, mergeWithObject, objectMergeContext, subObjects.value()); return new MergeResult( enabled, subObjects, @@ -550,7 +532,6 @@ static MergeResult build( private static Map buildMergedMappers( ObjectMapper existing, ObjectMapper mergeWithObject, - MergeReason reason, MapperMergeContext objectMergeContext, boolean subobjects ) { @@ -576,11 +557,11 @@ private static Map buildMergedMappers( } else if (objectMergeContext.decrementFieldBudgetIfPossible(mergeWithMapper.getTotalFieldsCount())) { putMergedMapper(mergedMappers, mergeWithMapper); } else if (mergeWithMapper instanceof ObjectMapper om) { - putMergedMapper(mergedMappers, truncateObjectMapper(reason, objectMergeContext, om)); + putMergedMapper(mergedMappers, truncateObjectMapper(objectMergeContext, om)); } } else if (mergeIntoMapper instanceof ObjectMapper objectMapper) { assert subobjects : "existing object mappers are supposed to be flattened if subobjects is false"; - putMergedMapper(mergedMappers, objectMapper.merge(mergeWithMapper, reason, objectMergeContext)); + putMergedMapper(mergedMappers, objectMapper.merge(mergeWithMapper, objectMergeContext)); } else { assert mergeIntoMapper instanceof FieldMapper || mergeIntoMapper instanceof FieldAliasMapper; if (mergeWithMapper instanceof NestedObjectMapper) { @@ -591,7 +572,7 @@ private static Map buildMergedMappers( // If we're merging template mappings when creating an index, then a field definition always // replaces an existing one. - if (reason == MergeReason.INDEX_TEMPLATE) { + if (objectMergeContext.getMapperBuilderContext().getMergeReason() == MergeReason.INDEX_TEMPLATE) { putMergedMapper(mergedMappers, mergeWithMapper); } else { putMergedMapper(mergedMappers, mergeIntoMapper.merge(mergeWithMapper, objectMergeContext)); @@ -607,13 +588,13 @@ private static void putMergedMapper(Map mergedMappers, @Nullable } } - private static ObjectMapper truncateObjectMapper(MergeReason reason, MapperMergeContext context, ObjectMapper objectMapper) { + private static ObjectMapper truncateObjectMapper(MapperMergeContext context, ObjectMapper objectMapper) { // there's not enough capacity for the whole object mapper, // so we're just trying to add the shallow object, without it's sub-fields ObjectMapper shallowObjectMapper = objectMapper.withoutMappers(); if (context.decrementFieldBudgetIfPossible(shallowObjectMapper.getTotalFieldsCount())) { // now trying to add the sub-fields one by one via a merge, until we hit the limit - return shallowObjectMapper.merge(objectMapper, reason, context); + return shallowObjectMapper.merge(objectMapper, context); } return null; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/PassThroughObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/PassThroughObjectMapper.java index 16b4d0b49917f..d44f03d72e211 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/PassThroughObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/PassThroughObjectMapper.java @@ -10,7 +10,6 @@ import org.elasticsearch.common.Explicit; import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -100,9 +99,14 @@ public PassThroughObjectMapper.Builder newBuilder(IndexVersion indexVersionCreat return builder; } - public PassThroughObjectMapper merge(ObjectMapper mergeWith, MergeReason reason, MapperMergeContext parentBuilderContext) { - final var mergeResult = MergeResult.build(this, mergeWith, reason, parentBuilderContext); + @Override + public PassThroughObjectMapper merge(Mapper mergeWith, MapperMergeContext parentBuilderContext) { + if (mergeWith instanceof PassThroughObjectMapper == false) { + MapperErrors.throwObjectMappingConflictError(mergeWith.name()); + } + PassThroughObjectMapper mergeWithObject = (PassThroughObjectMapper) mergeWith; + final var mergeResult = MergeResult.build(this, mergeWithObject, parentBuilderContext); final Explicit containsDimensions = (mergeWithObject.timeSeriesDimensionSubFields.explicit()) ? mergeWithObject.timeSeriesDimensionSubFields diff --git a/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java index 90d9c879c57e1..8db3a970e31c8 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java @@ -345,15 +345,13 @@ protected MapperMergeContext createChildContext(MapperMergeContext mapperMergeCo } @Override - public RootObjectMapper merge(Mapper mergeWith, MergeReason reason, MapperMergeContext parentMergeContext) { + public RootObjectMapper merge(Mapper mergeWith, MapperMergeContext parentMergeContext) { if (mergeWith instanceof RootObjectMapper == false) { MapperErrors.throwObjectMappingConflictError(mergeWith.name()); } - return merge((RootObjectMapper) mergeWith, reason, parentMergeContext); - } - RootObjectMapper merge(RootObjectMapper mergeWithObject, MergeReason reason, MapperMergeContext parentMergeContext) { - final var mergeResult = MergeResult.build(this, mergeWithObject, reason, parentMergeContext); + RootObjectMapper mergeWithObject = (RootObjectMapper) mergeWith; + final var mergeResult = MergeResult.build(this, mergeWithObject, parentMergeContext); final Explicit numericDetection; if (mergeWithObject.numericDetection.explicit()) { numericDetection = mergeWithObject.numericDetection; @@ -377,7 +375,7 @@ RootObjectMapper merge(RootObjectMapper mergeWithObject, MergeReason reason, Map final Explicit dynamicTemplates; if (mergeWithObject.dynamicTemplates.explicit()) { - if (reason == MergeReason.INDEX_TEMPLATE) { + if (parentMergeContext.getMapperBuilderContext().getMergeReason() == MergeReason.INDEX_TEMPLATE) { Map templatesByKey = new LinkedHashMap<>(); for (DynamicTemplate template : this.dynamicTemplates.value()) { templatesByKey.put(template.name(), template); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperTests.java index 144bfa3e8701e..486b33d9b155a 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperTests.java @@ -464,7 +464,11 @@ public void testDeeplyNestedMapping() throws Exception { threads[threadId] = new Thread(() -> { try { latch.await(); - mapperService.parseMapping("_doc", new CompressedXContent(Strings.toString(builders[threadId]))); + mapperService.parseMapping( + "_doc", + MergeReason.MAPPING_UPDATE, + new CompressedXContent(Strings.toString(builders[threadId])) + ); } catch (Exception e) { throw new AssertionError(e); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MapperBuilderContextTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MapperBuilderContextTests.java new file mode 100644 index 0000000000000..8c9197b0f3173 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/mapper/MapperBuilderContextTests.java @@ -0,0 +1,30 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.mapper; + +import org.elasticsearch.test.ESTestCase; + +public class MapperBuilderContextTests extends ESTestCase { + + public void testRoot() { + MapperBuilderContext root = MapperBuilderContext.root(false, false); + assertFalse(root.isSourceSynthetic()); + assertFalse(root.isDataStream()); + assertEquals(MapperService.MergeReason.MAPPING_UPDATE, root.getMergeReason()); + } + + public void testRootWithMergeReason() { + MapperService.MergeReason mergeReason = randomFrom(MapperService.MergeReason.values()); + MapperBuilderContext root = MapperBuilderContext.root(false, false, mergeReason); + assertFalse(root.isSourceSynthetic()); + assertFalse(root.isDataStream()); + assertEquals(mergeReason, root.getMergeReason()); + } + +} diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MapperMergeContextTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MapperMergeContextTests.java index 9c38487dbdf7b..77d3259ea1091 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MapperMergeContextTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MapperMergeContextTests.java @@ -29,4 +29,10 @@ public void testAddFieldIfPossibleUnlimited() { assertTrue(context.decrementFieldBudgetIfPossible(Integer.MAX_VALUE)); } + public void testMergeReasons() { + MapperService.MergeReason mergeReason = randomFrom(MapperService.MergeReason.values()); + MapperMergeContext context = MapperMergeContext.root(false, false, mergeReason, Integer.MAX_VALUE); + assertEquals(mergeReason, context.getMapperBuilderContext().getMergeReason()); + } + } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java index 7f762bbfa7234..0a49907b25567 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java @@ -1707,6 +1707,93 @@ public void testExpandDottedNotationToObjectMappers() throws IOException { }"""); } + public void testMergeDottedAndNestedNotation() throws IOException { + CompressedXContent mapping1 = new CompressedXContent(""" + { + "properties": { + "parent.child": { + "type": "keyword" + } + } + }"""); + + CompressedXContent mapping2 = new CompressedXContent(""" + { + "properties": { + "parent" : { + "properties" : { + "child" : { + "type" : "integer" + } + } + } + } + }"""); + + assertMergeEquals(List.of(mapping1, mapping2), """ + { + "_doc" : { + "properties" : { + "parent" : { + "properties" : { + "child" : { + "type" : "integer" + } + } + } + } + } + }"""); + + assertMergeEquals(List.of(mapping2, mapping1), """ + { + "_doc" : { + "properties" : { + "parent" : { + "properties" : { + "child" : { + "type" : "keyword" + } + } + } + } + } + }"""); + } + + public void testDottedAndNestedNotationInSameMapping() throws IOException { + CompressedXContent mapping = new CompressedXContent(""" + { + "properties": { + "parent.child": { + "type": "keyword" + }, + "parent" : { + "properties" : { + "child" : { + "type" : "integer" + } + } + } + } + }"""); + + assertMergeEquals(List.of(mapping), """ + { + "_doc" : { + "properties" : { + "parent" : { + "properties" : { + "child" : { + "type" : "integer" + } + } + } + } + } + }"""); + } + private void assertMergeEquals(List mappingSources, String expected) throws IOException { final MapperService mapperServiceBulk = createMapperService(mapping(b -> {})); // simulates multiple component templates being merged in a composable index template diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java index 61d62c1e41969..25e4ccdf4d3a9 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java @@ -1515,8 +1515,7 @@ public void testMergeNested() { NestedObjectMapper result = (NestedObjectMapper) firstMapper.merge( secondMapper, - MapperService.MergeReason.INDEX_TEMPLATE, - MapperMergeContext.root(false, false, Long.MAX_VALUE) + MapperMergeContext.root(false, false, MapperService.MergeReason.INDEX_TEMPLATE, Long.MAX_VALUE) ); assertFalse(result.isIncludeInParent()); assertTrue(result.isIncludeInRoot()); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java index 3c4aca4d36284..94a4c2ea92fbb 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java @@ -75,10 +75,7 @@ public void testMergeDisabledField() { new ObjectMapper.Builder("disabled", Explicit.IMPLICIT_TRUE) ).build(MapperBuilderContext.root(false, false)); - RootObjectMapper merged = (RootObjectMapper) rootObjectMapper.merge( - mergeWith, - MapperMergeContext.root(false, false, Long.MAX_VALUE) - ); + RootObjectMapper merged = rootObjectMapper.merge(mergeWith, MapperMergeContext.root(false, false, Long.MAX_VALUE)); assertFalse(((ObjectMapper) merged.getMapper("disabled")).isEnabled()); } @@ -93,8 +90,7 @@ public void testMergeEnabled() { ObjectMapper result = rootObjectMapper.merge( mergeWith, - MapperService.MergeReason.INDEX_TEMPLATE, - MapperMergeContext.root(false, false, Long.MAX_VALUE) + MapperMergeContext.root(false, false, MapperService.MergeReason.INDEX_TEMPLATE, Long.MAX_VALUE) ); assertTrue(result.isEnabled()); } @@ -115,8 +111,7 @@ public void testMergeEnabledForRootMapper() { ObjectMapper result = firstMapper.merge( secondMapper, - MapperService.MergeReason.INDEX_TEMPLATE, - MapperMergeContext.root(false, false, Long.MAX_VALUE) + MapperMergeContext.root(false, false, MapperService.MergeReason.INDEX_TEMPLATE, Long.MAX_VALUE) ); assertFalse(result.isEnabled()); } @@ -131,10 +126,7 @@ public void testMergeDisabledRootMapper() { Collections.singletonMap("test", new TestRuntimeField("test", "long")) ).build(MapperBuilderContext.root(false, false)); - RootObjectMapper merged = (RootObjectMapper) rootObjectMapper.merge( - mergeWith, - MapperMergeContext.root(false, false, Long.MAX_VALUE) - ); + RootObjectMapper merged = rootObjectMapper.merge(mergeWith, MapperMergeContext.root(false, false, Long.MAX_VALUE)); assertFalse(merged.isEnabled()); assertEquals(1, merged.runtimeFields().size()); assertEquals("test", merged.runtimeFields().iterator().next().name()); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java index 74b293ca7d6d6..154132c772927 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java @@ -126,6 +126,7 @@ public void testMerge() throws IOException { assertNull(mapper.mapping().getRoot().dynamic()); Mapping mergeWith = mapperService.parseMapping( "_doc", + MergeReason.MAPPING_UPDATE, new CompressedXContent(BytesReference.bytes(topMapping(b -> b.field("dynamic", "strict")))) ); Mapping merged = mapper.mapping().merge(mergeWith, reason, Long.MAX_VALUE); @@ -463,10 +464,14 @@ public void testSubobjectsCannotBeUpdated() throws IOException { MapperService mapperService = createMapperService(fieldMapping(b -> b.field("type", "object"))); DocumentMapper mapper = mapperService.documentMapper(); assertNull(mapper.mapping().getRoot().dynamic()); - Mapping mergeWith = mapperService.parseMapping("_doc", new CompressedXContent(BytesReference.bytes(fieldMapping(b -> { - b.field("type", "object"); - b.field("subobjects", "false"); - })))); + Mapping mergeWith = mapperService.parseMapping( + "_doc", + MergeReason.MAPPING_UPDATE, + new CompressedXContent(BytesReference.bytes(fieldMapping(b -> { + b.field("type", "object"); + b.field("subobjects", "false"); + }))) + ); MapperException exception = expectThrows( MapperException.class, () -> mapper.mapping().merge(mergeWith, MergeReason.MAPPING_UPDATE, Long.MAX_VALUE) @@ -478,9 +483,13 @@ public void testSubobjectsCannotBeUpdatedOnRoot() throws IOException { MapperService mapperService = createMapperService(topMapping(b -> b.field("subobjects", false))); DocumentMapper mapper = mapperService.documentMapper(); assertNull(mapper.mapping().getRoot().dynamic()); - Mapping mergeWith = mapperService.parseMapping("_doc", new CompressedXContent(BytesReference.bytes(topMapping(b -> { - b.field("subobjects", true); - })))); + Mapping mergeWith = mapperService.parseMapping( + "_doc", + MergeReason.MAPPING_UPDATE, + new CompressedXContent(BytesReference.bytes(topMapping(b -> { + b.field("subobjects", true); + }))) + ); MapperException exception = expectThrows( MapperException.class, () -> mapper.mapping().merge(mergeWith, MergeReason.MAPPING_UPDATE, Long.MAX_VALUE) diff --git a/server/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java b/server/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java index a52fd7e608d24..9b686417badfc 100644 --- a/server/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java +++ b/server/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java @@ -27,6 +27,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.lucene.similarity.LegacyBM25Similarity; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -254,7 +255,7 @@ public void testResolveSimilaritiesFromMapping_Unknown() throws IOException { IndexService indexService = createIndex("foo"); MapperParsingException e = expectThrows( MapperParsingException.class, - () -> indexService.mapperService().parseMapping("type", new CompressedXContent(mapping)) + () -> indexService.mapperService().parseMapping("type", MergeReason.MAPPING_UPDATE, new CompressedXContent(mapping)) ); assertThat(e.getMessage(), equalTo("Failed to parse mapping: Unknown Similarity type [unknown_similarity] for field [field1]")); } diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MetadataMapperTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MetadataMapperTestCase.java index 77391aadaa554..1b00ba3e9fd09 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MetadataMapperTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MetadataMapperTestCase.java @@ -12,6 +12,7 @@ import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; +import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.test.index.IndexVersionUtils; import org.elasticsearch.xcontent.XContentBuilder; @@ -120,7 +121,7 @@ public final void testUnsupportedParametersAreRejected() throws IOException { + "}"; MapperParsingException exception = expectThrows( MapperParsingException.class, - () -> mapperService.parseMapping("_doc", new CompressedXContent(mappingAsString)) + () -> mapperService.parseMapping("_doc", MergeReason.MAPPING_UPDATE, new CompressedXContent(mappingAsString)) ); assertEquals( "Failed to parse mapping: unknown parameter [anything] on metadata field [" + fieldName() + "]", @@ -136,7 +137,7 @@ public final void testFixedMetaFieldsAreNotConfigurable() throws IOException { String mappingAsString = "{\n" + " \"_doc\" : {\n" + " \"" + fieldName() + "\" : {\n" + " }\n" + " }\n" + "}"; MapperParsingException exception = expectThrows( MapperParsingException.class, - () -> mapperService.parseMapping("_doc", new CompressedXContent(mappingAsString)) + () -> mapperService.parseMapping("_doc", MergeReason.MAPPING_UPDATE, new CompressedXContent(mappingAsString)) ); assertEquals("Failed to parse mapping: " + fieldName() + " is not configurable", exception.getMessage()); } @@ -161,7 +162,7 @@ public void testTypeAndFriendsAreAcceptedBefore_8_6_0() throws IOException { + " }\n" + " }\n" + "}"; - assertNotNull(mapperService.parseMapping("_doc", new CompressedXContent(mappingAsString))); + assertNotNull(mapperService.parseMapping("_doc", MergeReason.MAPPING_UPDATE, new CompressedXContent(mappingAsString))); } } @@ -184,7 +185,7 @@ public void testTypeAndFriendsAreDeprecatedFrom_8_6_0() throws IOException { + " }\n" + " }\n" + "}"; - assertNotNull(mapperService.parseMapping("_doc", new CompressedXContent(mappingAsString))); + assertNotNull(mapperService.parseMapping("_doc", MergeReason.MAPPING_UPDATE, new CompressedXContent(mappingAsString))); assertWarnings("Parameter [" + param + "] has no effect on metadata field [" + fieldName() + "] and will be removed in future"); } } From 073048abf2e67161457c33a7403149db025bc14d Mon Sep 17 00:00:00 2001 From: John Verwolf Date: Mon, 8 Apr 2024 09:28:34 -0700 Subject: [PATCH 088/173] Track ongoing search tasks (#107129) --- docs/changelog/107129.yaml | 5 +++++ .../elasticsearch/threadpool/ThreadPool.java | 18 ++++++++++++++++-- 2 files changed, 21 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/107129.yaml diff --git a/docs/changelog/107129.yaml b/docs/changelog/107129.yaml new file mode 100644 index 0000000000000..6c9b9094962c1 --- /dev/null +++ b/docs/changelog/107129.yaml @@ -0,0 +1,5 @@ +pr: 107129 +summary: Track ongoing search tasks +area: Search +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java index cf554fe81d4a3..507eff05780b8 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java +++ b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java @@ -151,6 +151,8 @@ public static ThreadPoolType fromType(String type) { entry(Names.SYSTEM_CRITICAL_WRITE, ThreadPoolType.FIXED) ); + public static final double searchAutoscalingEWMA = 0.1; + private final Map executors; private final ThreadPoolInfo threadPoolInfo; @@ -222,7 +224,13 @@ public ThreadPool(final Settings settings, MeterRegistry meterRegistry, final Ex builders.put(Names.ANALYZE, new FixedExecutorBuilder(settings, Names.ANALYZE, 1, 16, TaskTrackingConfig.DO_NOT_TRACK)); builders.put( Names.SEARCH, - new FixedExecutorBuilder(settings, Names.SEARCH, searchOrGetThreadPoolSize, 1000, TaskTrackingConfig.DEFAULT) + new FixedExecutorBuilder( + settings, + Names.SEARCH, + searchOrGetThreadPoolSize, + 1000, + new TaskTrackingConfig(true, searchAutoscalingEWMA) + ) ); builders.put( Names.SEARCH_WORKER, @@ -230,7 +238,13 @@ public ThreadPool(final Settings settings, MeterRegistry meterRegistry, final Ex ); builders.put( Names.SEARCH_COORDINATION, - new FixedExecutorBuilder(settings, Names.SEARCH_COORDINATION, halfProc, 1000, TaskTrackingConfig.DEFAULT) + new FixedExecutorBuilder( + settings, + Names.SEARCH_COORDINATION, + halfProc, + 1000, + new TaskTrackingConfig(true, searchAutoscalingEWMA) + ) ); builders.put( Names.AUTO_COMPLETE, From 91bdfb84a0b04be531b113bd58119ea4804f6e18 Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Mon, 8 Apr 2024 13:41:53 -0600 Subject: [PATCH 089/173] Clarify data stream recommendations and best practices (#107233) * Clarify data stream recommendations and best practices Our documentation around data streams versus aliases could be interpreted in a way where someone doing *any* updates thinks they need to use an alias with indices instead of a data stream. This commit enhances the documentation around these areas to determine the correct abstraction in a more concrete way. It also tries to clarify that data streams still allow updates to the backing indices, and that a difference is last-write-wins versus first-write-wins. --- .../data-streams/data-streams.asciidoc | 40 ++++++++++++++----- docs/reference/ilm/ilm-tutorial.asciidoc | 17 ++++---- .../ilm/set-up-lifecycle-policy.asciidoc | 7 +++- 3 files changed, 45 insertions(+), 19 deletions(-) diff --git a/docs/reference/data-streams/data-streams.asciidoc b/docs/reference/data-streams/data-streams.asciidoc index 307930d64c4fb..9c7137563caef 100644 --- a/docs/reference/data-streams/data-streams.asciidoc +++ b/docs/reference/data-streams/data-streams.asciidoc @@ -18,6 +18,28 @@ automate the management of these backing indices. For example, you can use hardware and delete unneeded indices. {ilm-init} can help you reduce costs and overhead as your data grows. + +[discrete] +[[should-you-use-a-data-stream]] +== Should you use a data stream? + +To determine whether you should use a data stream for your data, you should consider the format of +the data, and your expected interaction. A good candidate for using a data stream will match the +following criteria: + +* Your data contains a timestamp field, or one could be automatically generated. +* You mostly perform indexing requests, with occasional updates and deletes. +* You index documents without an `_id`, or when indexing documents with an explicit `_id` you expect first-write-wins behavior. + +For most time series data use-cases, a data stream will be a good fit. However, if you find that +your data doesn't fit into these categories (for example, if you frequently send multiple documents +using the same `_id` expecting last-write-wins), you may want to use an index alias with a write +index instead. See documentation for <> for more information. + +Keep in mind that some features such as <> and +<> require a data stream. + [discrete] [[backing-indices]] == Backing indices @@ -116,19 +138,19 @@ You should not derive any intelligence from the backing indices names. [discrete] [[data-streams-append-only]] -== Append-only +== Append-only (mostly) -Data streams are designed for use cases where existing data is rarely, -if ever, updated. You cannot send update or deletion requests for existing -documents directly to a data stream. Instead, use the +Data streams are designed for use cases where existing data is rarely updated. You cannot send +update or deletion requests for existing documents directly to a data stream. However, you can still +<> in a data stream by submitting +requests directly to the document's backing index. + +If you need to update a larger number of documents in a data stream, you can use the <> and <> APIs. -If needed, you can <> by submitting requests directly to the document's backing index. - -TIP: If you frequently update or delete existing time series data, use an index -alias with a write index instead of a data stream. See +TIP: If you frequently send multiple documents using the same `_id` expecting last-write-wins, you +may want to use an index alias with a write index instead. See <>. include::set-up-a-data-stream.asciidoc[] diff --git a/docs/reference/ilm/ilm-tutorial.asciidoc b/docs/reference/ilm/ilm-tutorial.asciidoc index c7f2c3537b5e8..4513c523056a9 100644 --- a/docs/reference/ilm/ilm-tutorial.asciidoc +++ b/docs/reference/ilm/ilm-tutorial.asciidoc @@ -282,14 +282,15 @@ DELETE /_index_template/timeseries_template [[manage-time-series-data-without-data-streams]] === Manage time series data without data streams -Even though <> are a convenient way to scale -and manage time series data, they are designed to be append-only. We recognise there -might be use-cases where data needs to be updated or deleted in place and the -data streams don't support delete and update requests directly, -so the index APIs would need to be used directly on the data stream's backing indices. - -In these cases, you can use an index alias to manage indices containing the time series data -and periodically roll over to a new index. +Even though <> are a convenient way to scale and manage time series +data, they are designed to be append-only. We recognise there might be use-cases where data needs to +be updated or deleted in place and the data streams don't support delete and update requests +directly, so the index APIs would need to be used directly on the data stream's backing indices. In +these cases we still recommend using a data stream. + +If you frequently send multiple documents using the same `_id` expecting last-write-wins, you can +use an index alias instead of a data stream to manage indices containing the time series data and +periodically roll over to a new index. To automate rollover and management of time series indices with {ilm-init} using an index alias, you: diff --git a/docs/reference/ilm/set-up-lifecycle-policy.asciidoc b/docs/reference/ilm/set-up-lifecycle-policy.asciidoc index 79be6205a8c88..b6310050a4f25 100644 --- a/docs/reference/ilm/set-up-lifecycle-policy.asciidoc +++ b/docs/reference/ilm/set-up-lifecycle-policy.asciidoc @@ -81,6 +81,8 @@ To use a policy that triggers the rollover action, you need to configure the policy in the index template used to create each new index. You specify the name of the policy and the alias used to reference the rolling indices. +TIP: An `index.lifecycle.rollover_alias` setting is only required if using {ilm} with an alias. It is unnecessary when using <>. + You can use the {kib} Create template wizard to create a template. To access the wizard, open the menu and go to *Stack Management > Index Management*. In the *Index Templates* tab, click *Create template*. @@ -128,8 +130,9 @@ DELETE _index_template/my_template [[create-initial-index]] ==== Create an initial managed index -When you set up policies for your own rolling indices, you need to manually create the first index -managed by a policy and designate it as the write index. +When you set up policies for your own rolling indices, if you are not using the recommended +<>, you need to manually create the first index managed by a policy and +designate it as the write index. IMPORTANT: When you enable {ilm} for {beats} or the {ls} {es} output plugin, the necessary policies and configuration changes are applied automatically. From f1bb5bb5aa0870b939d814650d81967d256c4baf Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Mon, 8 Apr 2024 16:56:57 -0400 Subject: [PATCH 090/173] Bump versions after 8.13.2 release --- .buildkite/pipelines/intake.yml | 2 +- .buildkite/pipelines/periodic-packaging.yml | 6 +++--- .buildkite/pipelines/periodic.yml | 10 +++++----- .ci/bwcVersions | 2 +- .ci/snapshotBwcVersions | 2 +- server/src/main/java/org/elasticsearch/Version.java | 1 + .../resources/org/elasticsearch/TransportVersions.csv | 1 + .../org/elasticsearch/index/IndexVersions.csv | 1 + 8 files changed, 14 insertions(+), 11 deletions(-) diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index abde05ec7919e..612838592712b 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -48,7 +48,7 @@ steps: timeout_in_minutes: 300 matrix: setup: - BWC_VERSION: ["7.17.20", "8.13.2", "8.14.0"] + BWC_VERSION: ["7.17.20", "8.13.3", "8.14.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index 53243c2c081eb..58dcf875ce297 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -529,8 +529,8 @@ steps: env: BWC_VERSION: 8.12.2 - - label: "{{matrix.image}} / 8.13.2 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.13.2 + - label: "{{matrix.image}} / 8.13.3 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.13.3 timeout_in_minutes: 300 matrix: setup: @@ -543,7 +543,7 @@ steps: machineType: custom-16-32768 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 8.13.2 + BWC_VERSION: 8.13.3 - label: "{{matrix.image}} / 8.14.0 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.14.0 diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index c5b9bb830a8d6..3462e0fb95aba 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -312,8 +312,8 @@ steps: buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.12.2 - - label: 8.13.2 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.13.2#bwcTest + - label: 8.13.3 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.13.3#bwcTest timeout_in_minutes: 300 agents: provider: gcp @@ -321,7 +321,7 @@ steps: machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 8.13.2 + BWC_VERSION: 8.13.3 - label: 8.14.0 / bwc command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.14.0#bwcTest timeout_in_minutes: 300 @@ -396,7 +396,7 @@ steps: setup: ES_RUNTIME_JAVA: - openjdk17 - BWC_VERSION: ["7.17.20", "8.13.2", "8.14.0"] + BWC_VERSION: ["7.17.20", "8.13.3", "8.14.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 @@ -438,7 +438,7 @@ steps: - graalvm-ce17 - openjdk17 - openjdk21 - BWC_VERSION: ["7.17.20", "8.13.2", "8.14.0"] + BWC_VERSION: ["7.17.20", "8.13.3", "8.14.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.ci/bwcVersions b/.ci/bwcVersions index 2d8ace4845f4f..d3e57196e1c89 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -30,5 +30,5 @@ BWC_VERSION: - "8.10.4" - "8.11.4" - "8.12.2" - - "8.13.2" + - "8.13.3" - "8.14.0" diff --git a/.ci/snapshotBwcVersions b/.ci/snapshotBwcVersions index 89449ff7f9f2f..db131b89ffa4e 100644 --- a/.ci/snapshotBwcVersions +++ b/.ci/snapshotBwcVersions @@ -1,4 +1,4 @@ BWC_VERSION: - "7.17.20" - - "8.13.2" + - "8.13.3" - "8.14.0" diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 992308bd32018..99e811c021845 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -170,6 +170,7 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_8_13_0 = new Version(8_13_00_99); public static final Version V_8_13_1 = new Version(8_13_01_99); public static final Version V_8_13_2 = new Version(8_13_02_99); + public static final Version V_8_13_3 = new Version(8_13_03_99); public static final Version V_8_14_0 = new Version(8_14_00_99); public static final Version CURRENT = V_8_14_0; diff --git a/server/src/main/resources/org/elasticsearch/TransportVersions.csv b/server/src/main/resources/org/elasticsearch/TransportVersions.csv index 679270e90e894..4bae460e3bce2 100644 --- a/server/src/main/resources/org/elasticsearch/TransportVersions.csv +++ b/server/src/main/resources/org/elasticsearch/TransportVersions.csv @@ -115,3 +115,4 @@ 8.12.2,8560001 8.13.0,8595000 8.13.1,8595000 +8.13.2,8595000 diff --git a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv index b60066601bf68..61cc2167a9048 100644 --- a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv +++ b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv @@ -115,3 +115,4 @@ 8.12.2,8500010 8.13.0,8503000 8.13.1,8503000 +8.13.2,8503000 From e28ecbb5fb59e1fca0e06436bdc6591df46db5e1 Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Mon, 8 Apr 2024 16:57:35 -0400 Subject: [PATCH 091/173] Prune changelogs after 8.13.2 release --- docs/changelog/106247.yaml | 5 ----- docs/changelog/106673.yaml | 6 ------ docs/changelog/106873.yaml | 6 ------ docs/changelog/106990.yaml | 5 ----- docs/changelog/107054.yaml | 6 ------ docs/changelog/107059.yaml | 5 ----- 6 files changed, 33 deletions(-) delete mode 100644 docs/changelog/106247.yaml delete mode 100644 docs/changelog/106673.yaml delete mode 100644 docs/changelog/106873.yaml delete mode 100644 docs/changelog/106990.yaml delete mode 100644 docs/changelog/107054.yaml delete mode 100644 docs/changelog/107059.yaml diff --git a/docs/changelog/106247.yaml b/docs/changelog/106247.yaml deleted file mode 100644 index 5895dffd685a4..0000000000000 --- a/docs/changelog/106247.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106247 -summary: Fix a downsample persistent task assignment bug -area: Downsampling -type: bug -issues: [] diff --git a/docs/changelog/106673.yaml b/docs/changelog/106673.yaml deleted file mode 100644 index 9a716d20ad2bc..0000000000000 --- a/docs/changelog/106673.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 106673 -summary: "ESQL: Fix fully pruned aggregates" -area: ES|QL -type: bug -issues: - - 106427 diff --git a/docs/changelog/106873.yaml b/docs/changelog/106873.yaml deleted file mode 100644 index f823caff7aefe..0000000000000 --- a/docs/changelog/106873.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 106873 -summary: Query API Key Information API support for the `typed_keys` request parameter -area: Security -type: enhancement -issues: - - 106817 diff --git a/docs/changelog/106990.yaml b/docs/changelog/106990.yaml deleted file mode 100644 index 26646e742a5ee..0000000000000 --- a/docs/changelog/106990.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106990 -summary: Address concurrency issue in top hits aggregation -area: Aggregations -type: bug -issues: [] diff --git a/docs/changelog/107054.yaml b/docs/changelog/107054.yaml deleted file mode 100644 index 6511cb5185492..0000000000000 --- a/docs/changelog/107054.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 107054 -summary: Query API Keys support for both `aggs` and `aggregations` keywords -area: Security -type: enhancement -issues: - - 106839 diff --git a/docs/changelog/107059.yaml b/docs/changelog/107059.yaml deleted file mode 100644 index 6c7ee48f9b53b..0000000000000 --- a/docs/changelog/107059.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 107059 -summary: "[Connector API] Support numeric for configuration select option value type" -area: Application -type: bug -issues: [] From 1c89a8ee280f6df07dc3de90e610bbc248da214d Mon Sep 17 00:00:00 2001 From: sliu1013 Date: Tue, 9 Apr 2024 08:05:35 +0800 Subject: [PATCH 092/173] Fix typo in IndicesAdminClient#prepareOpen Javadoc (#107198) --- .../org/elasticsearch/client/internal/IndicesAdminClient.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/client/internal/IndicesAdminClient.java b/server/src/main/java/org/elasticsearch/client/internal/IndicesAdminClient.java index d931302740f19..69b897df4d76d 100644 --- a/server/src/main/java/org/elasticsearch/client/internal/IndicesAdminClient.java +++ b/server/src/main/java/org/elasticsearch/client/internal/IndicesAdminClient.java @@ -247,7 +247,7 @@ public interface IndicesAdminClient extends ElasticsearchClient { /** * Opens one or more indices based on their index name. * - * @param indices The name of the indices to close + * @param indices The name of the indices to open */ OpenIndexRequestBuilder prepareOpen(String... indices); From 911aaf8ef92d8ecba7d0389571ef18fb82a99eb7 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 9 Apr 2024 02:08:36 -0400 Subject: [PATCH 093/173] Forward port release notes for v8.13.2 (#107243) --- docs/reference/release-notes.asciidoc | 2 ++ docs/reference/release-notes/8.13.2.asciidoc | 31 ++++++++++++++++++++ 2 files changed, 33 insertions(+) create mode 100644 docs/reference/release-notes/8.13.2.asciidoc diff --git a/docs/reference/release-notes.asciidoc b/docs/reference/release-notes.asciidoc index f9da92aef925e..05c97d51a38e7 100644 --- a/docs/reference/release-notes.asciidoc +++ b/docs/reference/release-notes.asciidoc @@ -7,6 +7,7 @@ This section summarizes the changes in each release. * <> +* <> * <> * <> * <> @@ -64,6 +65,7 @@ This section summarizes the changes in each release. -- include::release-notes/8.14.0.asciidoc[] +include::release-notes/8.13.2.asciidoc[] include::release-notes/8.13.1.asciidoc[] include::release-notes/8.13.0.asciidoc[] include::release-notes/8.12.2.asciidoc[] diff --git a/docs/reference/release-notes/8.13.2.asciidoc b/docs/reference/release-notes/8.13.2.asciidoc new file mode 100644 index 0000000000000..1da23b5125833 --- /dev/null +++ b/docs/reference/release-notes/8.13.2.asciidoc @@ -0,0 +1,31 @@ +[[release-notes-8.13.2]] +== {es} version 8.13.2 + +Also see <>. + +[[bug-8.13.2]] +[float] +=== Bug fixes + +Aggregations:: +* Address concurrency issue in top hits aggregation {es-pull}106990[#106990] + +Application:: +* [Connector API] Support numeric for configuration select option value type {es-pull}107059[#107059] + +Downsampling:: +* Fix a downsample persistent task assignment bug {es-pull}106247[#106247] +* Fix downsample action request serialization {es-pull}106920[#106920] + +ES|QL:: +* ESQL: Fix fully pruned aggregates {es-pull}106673[#106673] (issue: {es-issue}106427[#106427]) + +[[enhancement-8.13.2]] +[float] +=== Enhancements + +Security:: +* Query API Key Information API support for the `typed_keys` request parameter {es-pull}106873[#106873] (issue: {es-issue}106817[#106817]) +* Query API Keys support for both `aggs` and `aggregations` keywords {es-pull}107054[#107054] (issue: {es-issue}106839[#106839]) + + From 49ffa045a694192884d274194fa14100b963e0c2 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Tue, 9 Apr 2024 09:18:58 +0200 Subject: [PATCH 094/173] Cut over stored fields to ZSTD for compression. (#103374) This cuts over stored fields with `index.codec: best_speed` (default) to ZSTD with level 0 and blocks of at most 128 documents or 14kB, and `index.codec: best_compression` to ZSTD with level 3 and blocks of at most 2,048 documents or 240kB. Compared with the current codecs, this would yield similar indexing speed, much better space efficiency and similar retrieval speed. Benchmarks on the `elastic/logs` track suggest 10% better storage efficiency and slightly faster ingestion. The Lucene codec infrastructure records the codec on a per-segment basis and ensures that this change is backward-compatible. Segments will get progressively migrated to ZSTD as they get merged in the background. Bindings for ZSTD are provided by the Panama FFI API on JDK21+ and JNA on older JDKs. ZSTD support is currently behind a feature flag, so it won't be enabled immediately when this feature gets merged, this will need a follow-up change. Co-authored-by: Mark Vieira Co-authored-by: Ryan Ernst --- docs/changelog/103374.yaml | 16 ++ server/src/main/java/module-info.java | 3 + .../index/codec/CodecService.java | 30 ++- .../index/codec/Elasticsearch814Codec.java | 130 +++++++++++ .../codec/LegacyPerFieldMapperCodec.java | 52 +++++ .../index/codec/PerFieldFormatSupplier.java | 123 ++++++++++ .../index/codec/PerFieldMapperCodec.java | 94 +------- .../codec/zstd/Zstd814StoredFieldsFormat.java | 212 ++++++++++++++++++ .../services/org.apache.lucene.codecs.Codec | 1 + .../elasticsearch/index/codec/CodecTests.java | 60 +++-- .../index/codec/PerFieldMapperCodecTests.java | 33 +-- ...estCompressionStoredFieldsFormatTests.java | 23 ++ ...td814BestSpeedStoredFieldsFormatTests.java | 23 ++ .../index/mapper/MapperServiceTestCase.java | 4 +- 14 files changed, 678 insertions(+), 126 deletions(-) create mode 100644 docs/changelog/103374.yaml create mode 100644 server/src/main/java/org/elasticsearch/index/codec/Elasticsearch814Codec.java create mode 100644 server/src/main/java/org/elasticsearch/index/codec/LegacyPerFieldMapperCodec.java create mode 100644 server/src/main/java/org/elasticsearch/index/codec/PerFieldFormatSupplier.java create mode 100644 server/src/main/java/org/elasticsearch/index/codec/zstd/Zstd814StoredFieldsFormat.java create mode 100644 server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec create mode 100644 server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestCompressionStoredFieldsFormatTests.java create mode 100644 server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestSpeedStoredFieldsFormatTests.java diff --git a/docs/changelog/103374.yaml b/docs/changelog/103374.yaml new file mode 100644 index 0000000000000..fcdee9185eb92 --- /dev/null +++ b/docs/changelog/103374.yaml @@ -0,0 +1,16 @@ +pr: 103374 +summary: Cut over stored fields to ZSTD for compression +area: Search +type: enhancement +issues: [] +highlight: + title: Stored fields are now compressed with ZStandard instead of LZ4/DEFLATE + body: |- + Stored fields are now compressed by splitting documents into blocks, which + are then compressed independently with ZStandard. `index.codec: default` + (default) uses blocks of at most 14kB or 128 documents compressed with level + 0, while `index.codec: best_compression` uses blocks of at most 240kB or + 2048 documents compressed at level 3. On most datasets that we tested + against, this yielded storage improvements in the order of 10%, slightly + faster indexing and similar retrieval latencies. + notable: true diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index 83b8606da2997..abfea0b18b9d8 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -6,6 +6,7 @@ * Side Public License, v 1. */ +import org.elasticsearch.index.codec.Elasticsearch814Codec; import org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat; import org.elasticsearch.plugins.internal.RestExtension; @@ -243,6 +244,7 @@ exports org.elasticsearch.index.codec; exports org.elasticsearch.index.codec.tsdb; exports org.elasticsearch.index.codec.bloomfilter; + exports org.elasticsearch.index.codec.zstd; exports org.elasticsearch.index.engine; exports org.elasticsearch.index.fielddata; exports org.elasticsearch.index.fielddata.fieldcomparator; @@ -433,6 +435,7 @@ with org.elasticsearch.index.codec.vectors.ES813FlatVectorFormat, org.elasticsearch.index.codec.vectors.ES813Int8FlatVectorFormat; + provides org.apache.lucene.codecs.Codec with Elasticsearch814Codec; exports org.elasticsearch.cluster.routing.allocation.shards to diff --git a/server/src/main/java/org/elasticsearch/index/codec/CodecService.java b/server/src/main/java/org/elasticsearch/index/codec/CodecService.java index d4771ba74e0fb..3ebcd1cb5b420 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/CodecService.java +++ b/server/src/main/java/org/elasticsearch/index/codec/CodecService.java @@ -11,7 +11,9 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.FeatureFlag; import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.codec.zstd.Zstd814StoredFieldsFormat; import org.elasticsearch.index.mapper.MapperService; import java.util.HashMap; @@ -25,22 +27,40 @@ */ public class CodecService { + public static final FeatureFlag ZSTD_STORED_FIELDS_FEATURE_FLAG = new FeatureFlag("zstd_stored_fields"); + private final Map codecs; public static final String DEFAULT_CODEC = "default"; + public static final String LEGACY_DEFAULT_CODEC = "legacy_default"; // escape hatch public static final String BEST_COMPRESSION_CODEC = "best_compression"; + public static final String LEGACY_BEST_COMPRESSION_CODEC = "legacy_best_compression"; // escape hatch + /** the raw unfiltered lucene default. useful for testing */ public static final String LUCENE_DEFAULT_CODEC = "lucene_default"; public CodecService(@Nullable MapperService mapperService, BigArrays bigArrays) { final var codecs = new HashMap(); - if (mapperService == null) { - codecs.put(DEFAULT_CODEC, new Lucene99Codec()); - codecs.put(BEST_COMPRESSION_CODEC, new Lucene99Codec(Lucene99Codec.Mode.BEST_COMPRESSION)); + + Codec legacyBestSpeedCodec = new LegacyPerFieldMapperCodec(Lucene99Codec.Mode.BEST_SPEED, mapperService, bigArrays); + if (ZSTD_STORED_FIELDS_FEATURE_FLAG.isEnabled()) { + codecs.put(DEFAULT_CODEC, new PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode.BEST_SPEED, mapperService, bigArrays)); } else { - codecs.put(DEFAULT_CODEC, new PerFieldMapperCodec(Lucene99Codec.Mode.BEST_SPEED, mapperService, bigArrays)); - codecs.put(BEST_COMPRESSION_CODEC, new PerFieldMapperCodec(Lucene99Codec.Mode.BEST_COMPRESSION, mapperService, bigArrays)); + codecs.put(DEFAULT_CODEC, legacyBestSpeedCodec); } + codecs.put(LEGACY_DEFAULT_CODEC, legacyBestSpeedCodec); + + Codec legacyBestCompressionCodec = new LegacyPerFieldMapperCodec(Lucene99Codec.Mode.BEST_COMPRESSION, mapperService, bigArrays); + if (ZSTD_STORED_FIELDS_FEATURE_FLAG.isEnabled()) { + codecs.put( + BEST_COMPRESSION_CODEC, + new PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode.BEST_COMPRESSION, mapperService, bigArrays) + ); + } else { + codecs.put(BEST_COMPRESSION_CODEC, legacyBestCompressionCodec); + } + codecs.put(LEGACY_BEST_COMPRESSION_CODEC, legacyBestCompressionCodec); + codecs.put(LUCENE_DEFAULT_CODEC, Codec.getDefault()); for (String codec : Codec.availableCodecs()) { codecs.put(codec, Codec.forName(codec)); diff --git a/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch814Codec.java b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch814Codec.java new file mode 100644 index 0000000000000..e85e05c87b083 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch814Codec.java @@ -0,0 +1,130 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.codec; + +import org.apache.lucene.codecs.DocValuesFormat; +import org.apache.lucene.codecs.FilterCodec; +import org.apache.lucene.codecs.KnnVectorsFormat; +import org.apache.lucene.codecs.PostingsFormat; +import org.apache.lucene.codecs.StoredFieldsFormat; +import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; +import org.apache.lucene.codecs.lucene99.Lucene99Codec; +import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; +import org.apache.lucene.codecs.lucene99.Lucene99PostingsFormat; +import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat; +import org.apache.lucene.codecs.perfield.PerFieldKnnVectorsFormat; +import org.apache.lucene.codecs.perfield.PerFieldPostingsFormat; +import org.elasticsearch.index.codec.zstd.Zstd814StoredFieldsFormat; + +/** + * Elasticsearch codec as of 8.14. This extends the Lucene 9.9 codec to compressed stored fields with ZSTD instead of LZ4/DEFLATE. See + * {@link Zstd814StoredFieldsFormat}. + */ +public class Elasticsearch814Codec extends FilterCodec { + + private final StoredFieldsFormat storedFieldsFormat; + + private final PostingsFormat defaultPostingsFormat; + private final PostingsFormat postingsFormat = new PerFieldPostingsFormat() { + @Override + public PostingsFormat getPostingsFormatForField(String field) { + return Elasticsearch814Codec.this.getPostingsFormatForField(field); + } + }; + + private final DocValuesFormat defaultDVFormat; + private final DocValuesFormat docValuesFormat = new PerFieldDocValuesFormat() { + @Override + public DocValuesFormat getDocValuesFormatForField(String field) { + return Elasticsearch814Codec.this.getDocValuesFormatForField(field); + } + }; + + private final KnnVectorsFormat defaultKnnVectorsFormat; + private final KnnVectorsFormat knnVectorsFormat = new PerFieldKnnVectorsFormat() { + @Override + public KnnVectorsFormat getKnnVectorsFormatForField(String field) { + return Elasticsearch814Codec.this.getKnnVectorsFormatForField(field); + } + }; + + /** Public no-arg constructor, needed for SPI loading at read-time. */ + public Elasticsearch814Codec() { + this(Zstd814StoredFieldsFormat.Mode.BEST_SPEED); + } + + /** + * Constructor. Takes a {@link Zstd814StoredFieldsFormat.Mode} that describes whether to optimize for retrieval speed at the expense of + * worse space-efficiency or vice-versa. + */ + public Elasticsearch814Codec(Zstd814StoredFieldsFormat.Mode mode) { + super("Elasticsearch814", new Lucene99Codec()); + this.storedFieldsFormat = new Zstd814StoredFieldsFormat(mode); + this.defaultPostingsFormat = new Lucene99PostingsFormat(); + this.defaultDVFormat = new Lucene90DocValuesFormat(); + this.defaultKnnVectorsFormat = new Lucene99HnswVectorsFormat(); + } + + @Override + public StoredFieldsFormat storedFieldsFormat() { + return storedFieldsFormat; + } + + @Override + public final PostingsFormat postingsFormat() { + return postingsFormat; + } + + @Override + public final DocValuesFormat docValuesFormat() { + return docValuesFormat; + } + + @Override + public final KnnVectorsFormat knnVectorsFormat() { + return knnVectorsFormat; + } + + /** + * Returns the postings format that should be used for writing new segments of field. + * + *

    The default implementation always returns "Lucene99". + * + *

    WARNING: if you subclass, you are responsible for index backwards compatibility: + * future version of Lucene are only guaranteed to be able to read the default implementation, + */ + public PostingsFormat getPostingsFormatForField(String field) { + return defaultPostingsFormat; + } + + /** + * Returns the docvalues format that should be used for writing new segments of field + * . + * + *

    The default implementation always returns "Lucene99". + * + *

    WARNING: if you subclass, you are responsible for index backwards compatibility: + * future version of Lucene are only guaranteed to be able to read the default implementation. + */ + public DocValuesFormat getDocValuesFormatForField(String field) { + return defaultDVFormat; + } + + /** + * Returns the vectors format that should be used for writing new segments of field + * + *

    The default implementation always returns "Lucene95". + * + *

    WARNING: if you subclass, you are responsible for index backwards compatibility: + * future version of Lucene are only guaranteed to be able to read the default implementation. + */ + public KnnVectorsFormat getKnnVectorsFormatForField(String field) { + return defaultKnnVectorsFormat; + } +} diff --git a/server/src/main/java/org/elasticsearch/index/codec/LegacyPerFieldMapperCodec.java b/server/src/main/java/org/elasticsearch/index/codec/LegacyPerFieldMapperCodec.java new file mode 100644 index 0000000000000..a682d26b094e6 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/codec/LegacyPerFieldMapperCodec.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.codec; + +import org.apache.lucene.codecs.Codec; +import org.apache.lucene.codecs.DocValuesFormat; +import org.apache.lucene.codecs.KnnVectorsFormat; +import org.apache.lucene.codecs.PostingsFormat; +import org.apache.lucene.codecs.lucene99.Lucene99Codec; +import org.elasticsearch.common.lucene.Lucene; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.index.mapper.MapperService; + +/** + * Legacy version of {@link PerFieldMapperCodec}. This codec is preserved to give an escape hatch in case we encounter issues with new + * changes in {@link PerFieldMapperCodec}. + */ +public final class LegacyPerFieldMapperCodec extends Lucene99Codec { + + private final PerFieldFormatSupplier formatSupplier; + + public LegacyPerFieldMapperCodec(Lucene99Codec.Mode compressionMode, MapperService mapperService, BigArrays bigArrays) { + super(compressionMode); + this.formatSupplier = new PerFieldFormatSupplier(mapperService, bigArrays); + // If the below assertion fails, it is a sign that Lucene released a new codec. You must create a copy of the current Elasticsearch + // codec that delegates to this new Lucene codec, and make PerFieldMapperCodec extend this new Elasticsearch codec. + assert Codec.forName(Lucene.LATEST_CODEC).getClass() == getClass().getSuperclass() + : "LegacyPerFieldMapperCodec must be on the latest lucene codec: " + Lucene.LATEST_CODEC; + } + + @Override + public PostingsFormat getPostingsFormatForField(String field) { + return formatSupplier.getPostingsFormatForField(field); + } + + @Override + public KnnVectorsFormat getKnnVectorsFormatForField(String field) { + return formatSupplier.getKnnVectorsFormatForField(field); + } + + @Override + public DocValuesFormat getDocValuesFormatForField(String field) { + return formatSupplier.getDocValuesFormatForField(field); + } + +} diff --git a/server/src/main/java/org/elasticsearch/index/codec/PerFieldFormatSupplier.java b/server/src/main/java/org/elasticsearch/index/codec/PerFieldFormatSupplier.java new file mode 100644 index 0000000000000..81fc2c0b4a065 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/codec/PerFieldFormatSupplier.java @@ -0,0 +1,123 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.codec; + +import org.apache.lucene.codecs.DocValuesFormat; +import org.apache.lucene.codecs.KnnVectorsFormat; +import org.apache.lucene.codecs.PostingsFormat; +import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; +import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.index.IndexMode; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.codec.bloomfilter.ES87BloomFilterPostingsFormat; +import org.elasticsearch.index.codec.postings.ES812PostingsFormat; +import org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat; +import org.elasticsearch.index.mapper.IdFieldMapper; +import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; + +import java.util.Objects; + +/** + * Class that encapsulates the logic of figuring out the most appropriate file format for a given field, across postings, doc values and + * vectors. + */ +public class PerFieldFormatSupplier { + + private final MapperService mapperService; + private final BigArrays bigArrays; + private final DocValuesFormat docValuesFormat = new Lucene90DocValuesFormat(); + private final KnnVectorsFormat knnVectorsFormat = new Lucene99HnswVectorsFormat(); + private final ES87BloomFilterPostingsFormat bloomFilterPostingsFormat; + private final ES87TSDBDocValuesFormat tsdbDocValuesFormat; + + private final ES812PostingsFormat es812PostingsFormat; + + public PerFieldFormatSupplier(MapperService mapperService, BigArrays bigArrays) { + this.mapperService = mapperService; + this.bigArrays = Objects.requireNonNull(bigArrays); + this.bloomFilterPostingsFormat = new ES87BloomFilterPostingsFormat(bigArrays, this::internalGetPostingsFormatForField); + this.tsdbDocValuesFormat = new ES87TSDBDocValuesFormat(); + this.es812PostingsFormat = new ES812PostingsFormat(); + } + + public PostingsFormat getPostingsFormatForField(String field) { + if (useBloomFilter(field)) { + return bloomFilterPostingsFormat; + } + return internalGetPostingsFormatForField(field); + } + + private PostingsFormat internalGetPostingsFormatForField(String field) { + if (mapperService != null) { + final PostingsFormat format = mapperService.mappingLookup().getPostingsFormat(field); + if (format != null) { + return format; + } + } + // return our own posting format using PFOR + return es812PostingsFormat; + } + + boolean useBloomFilter(String field) { + if (mapperService == null) { + return false; + } + IndexSettings indexSettings = mapperService.getIndexSettings(); + if (mapperService.mappingLookup().isDataStreamTimestampFieldEnabled()) { + // In case for time series indices, the _id isn't randomly generated, + // but based on dimension fields and timestamp field, so during indexing + // version/seq_no/term needs to be looked up and having a bloom filter + // can speed this up significantly. + return indexSettings.getMode() == IndexMode.TIME_SERIES + && IdFieldMapper.NAME.equals(field) + && IndexSettings.BLOOM_FILTER_ID_FIELD_ENABLED_SETTING.get(indexSettings.getSettings()); + } else { + return IdFieldMapper.NAME.equals(field) && IndexSettings.BLOOM_FILTER_ID_FIELD_ENABLED_SETTING.get(indexSettings.getSettings()); + } + } + + public KnnVectorsFormat getKnnVectorsFormatForField(String field) { + if (mapperService != null) { + Mapper mapper = mapperService.mappingLookup().getMapper(field); + if (mapper instanceof DenseVectorFieldMapper vectorMapper) { + return vectorMapper.getKnnVectorsFormatForField(knnVectorsFormat); + } + } + return knnVectorsFormat; + } + + public DocValuesFormat getDocValuesFormatForField(String field) { + if (useTSDBDocValuesFormat(field)) { + return tsdbDocValuesFormat; + } + return docValuesFormat; + } + + boolean useTSDBDocValuesFormat(final String field) { + if (excludeFields(field)) { + return false; + } + + return mapperService != null && isTimeSeriesModeIndex() && mapperService.getIndexSettings().isES87TSDBCodecEnabled(); + } + + private boolean excludeFields(String fieldName) { + // Avoid using tsdb codec for fields like _seq_no, _primary_term. + // But _tsid and _ts_routing_hash should always use the tsdb codec. + return fieldName.startsWith("_") && fieldName.equals("_tsid") == false && fieldName.equals("_ts_routing_hash") == false; + } + + private boolean isTimeSeriesModeIndex() { + return mapperService != null && IndexMode.TIME_SERIES == mapperService.getIndexSettings().getMode(); + } + +} diff --git a/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java b/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java index ae497af887d9c..6f88578260db3 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java +++ b/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java @@ -12,19 +12,10 @@ import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.KnnVectorsFormat; import org.apache.lucene.codecs.PostingsFormat; -import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; -import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.index.IndexMode; -import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.codec.bloomfilter.ES87BloomFilterPostingsFormat; -import org.elasticsearch.index.codec.postings.ES812PostingsFormat; -import org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat; -import org.elasticsearch.index.mapper.IdFieldMapper; -import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.codec.zstd.Zstd814StoredFieldsFormat; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; /** * {@link PerFieldMapperCodec This Lucene codec} provides the default @@ -34,93 +25,32 @@ * per index in real time via the mapping API. If no specific postings format or vector format is * configured for a specific field the default postings or vector format is used. */ -public final class PerFieldMapperCodec extends Lucene99Codec { +public final class PerFieldMapperCodec extends Elasticsearch814Codec { - private final MapperService mapperService; - private final DocValuesFormat docValuesFormat = new Lucene90DocValuesFormat(); - private final ES87BloomFilterPostingsFormat bloomFilterPostingsFormat; - private final ES87TSDBDocValuesFormat tsdbDocValuesFormat; + private final PerFieldFormatSupplier formatSupplier; - private final ES812PostingsFormat es812PostingsFormat; - - static { - assert Codec.forName(Lucene.LATEST_CODEC).getClass().isAssignableFrom(PerFieldMapperCodec.class) - : "PerFieldMapperCodec must subclass the latest lucene codec: " + Lucene.LATEST_CODEC; - } - - public PerFieldMapperCodec(Mode compressionMode, MapperService mapperService, BigArrays bigArrays) { + public PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode compressionMode, MapperService mapperService, BigArrays bigArrays) { super(compressionMode); - this.mapperService = mapperService; - this.bloomFilterPostingsFormat = new ES87BloomFilterPostingsFormat(bigArrays, this::internalGetPostingsFormatForField); - this.tsdbDocValuesFormat = new ES87TSDBDocValuesFormat(); - this.es812PostingsFormat = new ES812PostingsFormat(); + this.formatSupplier = new PerFieldFormatSupplier(mapperService, bigArrays); + // If the below assertion fails, it is a sign that Lucene released a new codec. You must create a copy of the current Elasticsearch + // codec that delegates to this new Lucene codec, and make PerFieldMapperCodec extend this new Elasticsearch codec. + assert Codec.forName(Lucene.LATEST_CODEC).getClass() == delegate.getClass() + : "PerFieldMapperCodec must be on the latest lucene codec: " + Lucene.LATEST_CODEC; } @Override public PostingsFormat getPostingsFormatForField(String field) { - if (useBloomFilter(field)) { - return bloomFilterPostingsFormat; - } - return internalGetPostingsFormatForField(field); - } - - private PostingsFormat internalGetPostingsFormatForField(String field) { - final PostingsFormat format = mapperService.mappingLookup().getPostingsFormat(field); - if (format != null) { - return format; - } - // return our own posting format using PFOR - return es812PostingsFormat; - } - - boolean useBloomFilter(String field) { - IndexSettings indexSettings = mapperService.getIndexSettings(); - if (mapperService.mappingLookup().isDataStreamTimestampFieldEnabled()) { - // In case for time series indices, they _id isn't randomly generated, - // but based on dimension fields and timestamp field, so during indexing - // version/seq_no/term needs to be looked up and having a bloom filter - // can speed this up significantly. - return indexSettings.getMode() == IndexMode.TIME_SERIES - && IdFieldMapper.NAME.equals(field) - && IndexSettings.BLOOM_FILTER_ID_FIELD_ENABLED_SETTING.get(indexSettings.getSettings()); - } else { - return IdFieldMapper.NAME.equals(field) && IndexSettings.BLOOM_FILTER_ID_FIELD_ENABLED_SETTING.get(indexSettings.getSettings()); - } + return formatSupplier.getPostingsFormatForField(field); } @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { - Mapper mapper = mapperService.mappingLookup().getMapper(field); - if (mapper instanceof DenseVectorFieldMapper vectorMapper) { - return vectorMapper.getKnnVectorsFormatForField(super.getKnnVectorsFormatForField(field)); - } - return super.getKnnVectorsFormatForField(field); + return formatSupplier.getKnnVectorsFormatForField(field); } @Override public DocValuesFormat getDocValuesFormatForField(String field) { - if (useTSDBDocValuesFormat(field)) { - return tsdbDocValuesFormat; - } - return docValuesFormat; - } - - boolean useTSDBDocValuesFormat(final String field) { - if (excludeFields(field)) { - return false; - } - - return mapperService != null && isTimeSeriesModeIndex() && mapperService.getIndexSettings().isES87TSDBCodecEnabled(); - } - - private boolean excludeFields(String fieldName) { - // Avoid using tsdb codec for fields like _seq_no, _primary_term. - // But _tsid and _ts_routing_hash should always use the tsdb codec. - return fieldName.startsWith("_") && fieldName.equals("_tsid") == false && fieldName.equals("_ts_routing_hash") == false; - } - - private boolean isTimeSeriesModeIndex() { - return IndexMode.TIME_SERIES == mapperService.getIndexSettings().getMode(); + return formatSupplier.getDocValuesFormatForField(field); } } diff --git a/server/src/main/java/org/elasticsearch/index/codec/zstd/Zstd814StoredFieldsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/zstd/Zstd814StoredFieldsFormat.java new file mode 100644 index 0000000000000..b827bb6436f07 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/codec/zstd/Zstd814StoredFieldsFormat.java @@ -0,0 +1,212 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.codec.zstd; + +import org.apache.lucene.codecs.StoredFieldsWriter; +import org.apache.lucene.codecs.compressing.CompressionMode; +import org.apache.lucene.codecs.compressing.Compressor; +import org.apache.lucene.codecs.compressing.Decompressor; +import org.apache.lucene.codecs.lucene90.compressing.Lucene90CompressingStoredFieldsFormat; +import org.apache.lucene.index.CorruptIndexException; +import org.apache.lucene.index.SegmentInfo; +import org.apache.lucene.store.ByteBuffersDataInput; +import org.apache.lucene.store.DataInput; +import org.apache.lucene.store.DataOutput; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.IOContext; +import org.apache.lucene.util.ArrayUtil; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.nativeaccess.CloseableByteBuffer; +import org.elasticsearch.nativeaccess.NativeAccess; +import org.elasticsearch.nativeaccess.Zstd; + +import java.io.IOException; + +/** + * {@link org.apache.lucene.codecs.StoredFieldsFormat} that compresses blocks of data using ZStandard. + * + * Unlike Lucene's default stored fields format, this format does not make use of dictionaries (even though ZStandard has great support for + * dictionaries!). This is mostly due to the fact that LZ4/DEFLATE have short sliding windows that they can use to find duplicate strings + * (64kB and 32kB respectively). In contrast, ZSTD doesn't have such a limitation and can better take advantage of large compression + * buffers. + */ +public final class Zstd814StoredFieldsFormat extends Lucene90CompressingStoredFieldsFormat { + + // ZSTD has special optimizations for inputs that are less than 16kB and less than 256kB. So subtract a bit of memory from 16kB and + // 256kB to make our inputs unlikely to grow beyond 16kB for BEST_SPEED and 256kB for BEST_COMPRESSION. + private static final int BEST_SPEED_BLOCK_SIZE = (16 - 2) * 1_024; + private static final int BEST_COMPRESSION_BLOCK_SIZE = (256 - 16) * 1_024; + + /** Attribute key for compression mode. */ + public static final String MODE_KEY = Zstd814StoredFieldsFormat.class.getSimpleName() + ".mode"; + + public enum Mode { + BEST_SPEED(0, BEST_SPEED_BLOCK_SIZE, 128), + BEST_COMPRESSION(3, BEST_COMPRESSION_BLOCK_SIZE, 2048); + + final int level, blockSizeInBytes, blockDocCount; + + Mode(int level, int blockSizeInBytes, int blockDocCount) { + this.level = level; + this.blockSizeInBytes = blockSizeInBytes; + this.blockDocCount = blockDocCount; + } + } + + private final Mode mode; + + public Zstd814StoredFieldsFormat(Mode mode) { + super("ZstdStoredFields814", new ZstdCompressionMode(mode.level), mode.blockSizeInBytes, mode.blockDocCount, 10); + this.mode = mode; + } + + @Override + public StoredFieldsWriter fieldsWriter(Directory directory, SegmentInfo si, IOContext context) throws IOException { + // Both modes are compatible, we only put an attribute for debug purposes. + String previous = si.putAttribute(MODE_KEY, mode.name()); + if (previous != null && previous.equals(mode.name()) == false) { + throw new IllegalStateException( + "found existing value for " + MODE_KEY + " for segment: " + si.name + "old=" + previous + ", new=" + mode.name() + ); + } + return super.fieldsWriter(directory, si, context); + } + + private static class ZstdCompressionMode extends CompressionMode { + private final int level; + + ZstdCompressionMode(int level) { + this.level = level; + } + + @Override + public Compressor newCompressor() { + return new ZstdCompressor(level); + } + + @Override + public Decompressor newDecompressor() { + return new ZstdDecompressor(); + } + + @Override + public String toString() { + return "ZSTD(level=" + level + ")"; + } + } + + private static final class ZstdDecompressor extends Decompressor { + + // Buffer for copying between the DataInput and native memory. No hard science behind this number, it just tries to be high enough + // to benefit from bulk copying and low enough to keep heap usage under control. + final byte[] copyBuffer = new byte[4096]; + + ZstdDecompressor() {} + + @Override + public void decompress(DataInput in, int originalLength, int offset, int length, BytesRef bytes) throws IOException { + if (originalLength == 0) { + bytes.offset = 0; + bytes.length = 0; + return; + } + + final NativeAccess nativeAccess = NativeAccess.instance(); + final Zstd zstd = nativeAccess.getZstd(); + + final int compressedLength = in.readVInt(); + + try ( + CloseableByteBuffer src = nativeAccess.newBuffer(compressedLength); + CloseableByteBuffer dest = nativeAccess.newBuffer(originalLength) + ) { + + while (src.buffer().position() < compressedLength) { + final int numBytes = Math.min(copyBuffer.length, compressedLength - src.buffer().position()); + in.readBytes(copyBuffer, 0, numBytes); + src.buffer().put(copyBuffer, 0, numBytes); + } + src.buffer().flip(); + + final int decompressedLen = zstd.decompress(dest, src); + if (decompressedLen != originalLength) { + throw new CorruptIndexException("Expected " + originalLength + " decompressed bytes, got " + decompressedLen, in); + } + + bytes.bytes = ArrayUtil.growNoCopy(bytes.bytes, length); + dest.buffer().get(offset, bytes.bytes, 0, length); + bytes.offset = 0; + bytes.length = length; + } + } + + @Override + public Decompressor clone() { + return new ZstdDecompressor(); + } + } + + private static class ZstdCompressor extends Compressor { + + final int level; + // Buffer for copying between the DataInput and native memory. No hard science behind this number, it just tries to be high enough + // to benefit from bulk copying and low enough to keep heap usage under control. + final byte[] copyBuffer = new byte[4096]; + + ZstdCompressor(int level) { + this.level = level; + } + + @Override + public void compress(ByteBuffersDataInput buffersInput, DataOutput out) throws IOException { + final NativeAccess nativeAccess = NativeAccess.instance(); + final Zstd zstd = nativeAccess.getZstd(); + + final int srcLen = Math.toIntExact(buffersInput.length()); + if (srcLen == 0) { + return; + } + + final int compressBound = zstd.compressBound(srcLen); + + // NOTE: We are allocating/deallocating native buffers on each call. We could save allocations by reusing these buffers, though + // this would come at the expense of higher permanent memory usage. Benchmarks suggested that there is some performance to save + // there, but it wouldn't be a game changer either. + // Also note that calls to #compress implicitly allocate memory under the hood for e.g. hash tables and chain tables that help + // identify duplicate strings. So if we wanted to avoid allocating memory on every compress call, we should also look into + // reusing compression contexts, which are not small and would increase permanent memory usage as well. + try ( + CloseableByteBuffer src = nativeAccess.newBuffer(srcLen); + CloseableByteBuffer dest = nativeAccess.newBuffer(compressBound) + ) { + + while (buffersInput.position() < buffersInput.length()) { + final int numBytes = Math.min(copyBuffer.length, (int) (buffersInput.length() - buffersInput.position())); + buffersInput.readBytes(copyBuffer, 0, numBytes); + src.buffer().put(copyBuffer, 0, numBytes); + } + src.buffer().flip(); + + final int compressedLen = zstd.compress(dest, src, level); + out.writeVInt(compressedLen); + + for (int written = 0; written < compressedLen;) { + final int numBytes = Math.min(copyBuffer.length, compressedLen - written); + dest.buffer().get(copyBuffer, 0, numBytes); + out.writeBytes(copyBuffer, 0, numBytes); + written += numBytes; + assert written == dest.buffer().position(); + } + } + } + + @Override + public void close() throws IOException {} + } +} diff --git a/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec b/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec new file mode 100644 index 0000000000000..b99a15507f742 --- /dev/null +++ b/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec @@ -0,0 +1 @@ +org.elasticsearch.index.codec.Elasticsearch814Codec diff --git a/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java b/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java index 7a3d48aad13d3..bd4aa0241cd27 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java @@ -12,10 +12,11 @@ import org.apache.lucene.codecs.lucene90.Lucene90StoredFieldsFormat; import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.apache.lucene.document.Document; +import org.apache.lucene.document.Field; +import org.apache.lucene.document.IntField; +import org.apache.lucene.document.KeywordField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; -import org.apache.lucene.index.IndexWriterConfig; -import org.apache.lucene.index.SegmentReader; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.util.LuceneTestCase.SuppressCodecs; import org.elasticsearch.TransportVersion; @@ -31,6 +32,7 @@ import org.elasticsearch.script.ScriptCompiler; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; +import org.hamcrest.Matchers; import java.io.IOException; import java.util.Collections; @@ -43,35 +45,51 @@ public class CodecTests extends ESTestCase { public void testResolveDefaultCodecs() throws Exception { CodecService codecService = createCodecService(); assertThat(codecService.codec("default"), instanceOf(PerFieldMapperCodec.class)); - assertThat(codecService.codec("default"), instanceOf(Lucene99Codec.class)); + assertThat(codecService.codec("default"), instanceOf(Elasticsearch814Codec.class)); } public void testDefault() throws Exception { Codec codec = createCodecService().codec("default"); - assertStoredFieldsCompressionEquals(Lucene99Codec.Mode.BEST_SPEED, codec); + assertEquals( + "Zstd814StoredFieldsFormat(compressionMode=ZSTD(level=0), chunkSize=14336, maxDocsPerChunk=128, blockShift=10)", + codec.storedFieldsFormat().toString() + ); } public void testBestCompression() throws Exception { Codec codec = createCodecService().codec("best_compression"); - assertStoredFieldsCompressionEquals(Lucene99Codec.Mode.BEST_COMPRESSION, codec); + assertEquals( + "Zstd814StoredFieldsFormat(compressionMode=ZSTD(level=3), chunkSize=245760, maxDocsPerChunk=2048, blockShift=10)", + codec.storedFieldsFormat().toString() + ); + } + + public void testLegacyDefault() throws Exception { + Codec codec = createCodecService().codec("legacy_default"); + assertThat(codec, Matchers.instanceOf(Lucene99Codec.class)); + assertThat(codec.storedFieldsFormat(), Matchers.instanceOf(Lucene90StoredFieldsFormat.class)); + // Make sure the legacy codec is writable + try (Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig().setCodec(codec))) { + Document doc = new Document(); + doc.add(new KeywordField("string_field", "abc", Field.Store.YES)); + doc.add(new IntField("int_field", 42, Field.Store.YES)); + w.addDocument(doc); + try (DirectoryReader r = DirectoryReader.open(w)) {} + } } - // write some docs with it, inspect .si to see this was the used compression - private void assertStoredFieldsCompressionEquals(Lucene99Codec.Mode expected, Codec actual) throws Exception { - Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(null); - iwc.setCodec(actual); - IndexWriter iw = new IndexWriter(dir, iwc); - iw.addDocument(new Document()); - iw.commit(); - iw.close(); - DirectoryReader ir = DirectoryReader.open(dir); - SegmentReader sr = (SegmentReader) ir.leaves().get(0).reader(); - String v = sr.getSegmentInfo().info.getAttribute(Lucene90StoredFieldsFormat.MODE_KEY); - assertNotNull(v); - assertEquals(expected, Lucene99Codec.Mode.valueOf(v)); - ir.close(); - dir.close(); + public void testLegacyBestCompression() throws Exception { + Codec codec = createCodecService().codec("legacy_best_compression"); + assertThat(codec, Matchers.instanceOf(Lucene99Codec.class)); + assertThat(codec.storedFieldsFormat(), Matchers.instanceOf(Lucene90StoredFieldsFormat.class)); + // Make sure the legacy codec is writable + try (Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig().setCodec(codec))) { + Document doc = new Document(); + doc.add(new KeywordField("string_field", "abc", Field.Store.YES)); + doc.add(new IntField("int_field", 42, Field.Store.YES)); + w.addDocument(doc); + try (DirectoryReader r = DirectoryReader.open(w)) {} + } } private CodecService createCodecService() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/index/codec/PerFieldMapperCodecTests.java b/server/src/test/java/org/elasticsearch/index/codec/PerFieldMapperCodecTests.java index 4ce20e35869cb..74657842488b5 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/PerFieldMapperCodecTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/PerFieldMapperCodecTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.index.codec; -import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; @@ -63,7 +62,7 @@ public class PerFieldMapperCodecTests extends ESTestCase { """; public void testUseBloomFilter() throws IOException { - PerFieldMapperCodec perFieldMapperCodec = createCodec(false, randomBoolean(), false); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(false, randomBoolean(), false); assertThat(perFieldMapperCodec.useBloomFilter("_id"), is(true)); assertThat(perFieldMapperCodec.getPostingsFormatForField("_id"), instanceOf(ES87BloomFilterPostingsFormat.class)); assertThat(perFieldMapperCodec.useBloomFilter("another_field"), is(false)); @@ -71,7 +70,7 @@ public void testUseBloomFilter() throws IOException { } public void testUseBloomFilterWithTimestampFieldEnabled() throws IOException { - PerFieldMapperCodec perFieldMapperCodec = createCodec(true, true, false); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(true, true, false); assertThat(perFieldMapperCodec.useBloomFilter("_id"), is(true)); assertThat(perFieldMapperCodec.getPostingsFormatForField("_id"), instanceOf(ES87BloomFilterPostingsFormat.class)); assertThat(perFieldMapperCodec.useBloomFilter("another_field"), is(false)); @@ -79,13 +78,13 @@ public void testUseBloomFilterWithTimestampFieldEnabled() throws IOException { } public void testUseBloomFilterWithTimestampFieldEnabled_noTimeSeriesMode() throws IOException { - PerFieldMapperCodec perFieldMapperCodec = createCodec(true, false, false); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(true, false, false); assertThat(perFieldMapperCodec.useBloomFilter("_id"), is(false)); assertThat(perFieldMapperCodec.getPostingsFormatForField("_id"), instanceOf(ES812PostingsFormat.class)); } public void testUseBloomFilterWithTimestampFieldEnabled_disableBloomFilter() throws IOException { - PerFieldMapperCodec perFieldMapperCodec = createCodec(true, true, true); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(true, true, true); assertThat(perFieldMapperCodec.useBloomFilter("_id"), is(false)); assertThat(perFieldMapperCodec.getPostingsFormatForField("_id"), instanceOf(ES812PostingsFormat.class)); assertWarnings( @@ -94,28 +93,29 @@ public void testUseBloomFilterWithTimestampFieldEnabled_disableBloomFilter() thr } public void testUseES87TSDBEncodingForTimestampField() throws IOException { - PerFieldMapperCodec perFieldMapperCodec = createCodec(true, true, true); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(true, true, true); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("@timestamp")), is(true)); } public void testDoNotUseES87TSDBEncodingForTimestampFieldNonTimeSeriesIndex() throws IOException { - PerFieldMapperCodec perFieldMapperCodec = createCodec(true, false, true); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(true, false, true); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("@timestamp")), is(false)); } public void testEnableES87TSDBCodec() throws IOException { - PerFieldMapperCodec perFieldMapperCodec = createCodec(true, true, MAPPING_1); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(true, true, MAPPING_1); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("gauge")), is(true)); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("@timestamp")), is(true)); } public void testDisableES87TSDBCodec() throws IOException { - PerFieldMapperCodec perFieldMapperCodec = createCodec(false, true, MAPPING_1); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(false, true, MAPPING_1); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("gauge")), is(false)); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("@timestamp")), is(false)); } - private PerFieldMapperCodec createCodec(boolean timestampField, boolean timeSeries, boolean disableBloomFilter) throws IOException { + private PerFieldFormatSupplier createFormatSupplier(boolean timestampField, boolean timeSeries, boolean disableBloomFilter) + throws IOException { Settings.Builder settings = Settings.builder(); if (timeSeries) { settings.put(IndexSettings.MODE.getKey(), "time_series"); @@ -140,31 +140,32 @@ private PerFieldMapperCodec createCodec(boolean timestampField, boolean timeSeri """; mapperService.merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); } - return new PerFieldMapperCodec(Lucene99Codec.Mode.BEST_SPEED, mapperService, BigArrays.NON_RECYCLING_INSTANCE); + return new PerFieldFormatSupplier(mapperService, BigArrays.NON_RECYCLING_INSTANCE); } public void testUseES87TSDBEncodingSettingDisabled() throws IOException { - PerFieldMapperCodec perFieldMapperCodec = createCodec(false, true, MAPPING_2); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(false, true, MAPPING_2); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("@timestamp")), is(false)); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("counter")), is(false)); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("gauge")), is(false)); } public void testUseTimeSeriesModeDisabledCodecDisabled() throws IOException { - PerFieldMapperCodec perFieldMapperCodec = createCodec(true, false, MAPPING_2); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(true, false, MAPPING_2); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("@timestamp")), is(false)); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("counter")), is(false)); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("gauge")), is(false)); } public void testUseTimeSeriesModeAndCodecEnabled() throws IOException { - PerFieldMapperCodec perFieldMapperCodec = createCodec(true, true, MAPPING_2); + PerFieldFormatSupplier perFieldMapperCodec = createFormatSupplier(true, true, MAPPING_2); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("@timestamp")), is(true)); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("counter")), is(true)); assertThat((perFieldMapperCodec.useTSDBDocValuesFormat("gauge")), is(true)); } - private PerFieldMapperCodec createCodec(boolean enableES87TSDBCodec, boolean timeSeries, String mapping) throws IOException { + private PerFieldFormatSupplier createFormatSupplier(boolean enableES87TSDBCodec, boolean timeSeries, String mapping) + throws IOException { Settings.Builder settings = Settings.builder(); if (timeSeries) { settings.put(IndexSettings.MODE.getKey(), "time_series"); @@ -173,7 +174,7 @@ private PerFieldMapperCodec createCodec(boolean enableES87TSDBCodec, boolean tim settings.put(IndexSettings.TIME_SERIES_ES87TSDB_CODEC_ENABLED_SETTING.getKey(), enableES87TSDBCodec); MapperService mapperService = MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), settings.build(), "test"); mapperService.merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); - return new PerFieldMapperCodec(Lucene99Codec.Mode.BEST_SPEED, mapperService, BigArrays.NON_RECYCLING_INSTANCE); + return new PerFieldFormatSupplier(mapperService, BigArrays.NON_RECYCLING_INSTANCE); } } diff --git a/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestCompressionStoredFieldsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestCompressionStoredFieldsFormatTests.java new file mode 100644 index 0000000000000..1679813ed1340 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestCompressionStoredFieldsFormatTests.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.codec.zstd; + +import org.apache.lucene.codecs.Codec; +import org.apache.lucene.tests.index.BaseStoredFieldsFormatTestCase; +import org.elasticsearch.index.codec.Elasticsearch814Codec; + +public class Zstd814BestCompressionStoredFieldsFormatTests extends BaseStoredFieldsFormatTestCase { + + private final Codec codec = new Elasticsearch814Codec(Zstd814StoredFieldsFormat.Mode.BEST_COMPRESSION); + + @Override + protected Codec getCodec() { + return codec; + } +} diff --git a/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestSpeedStoredFieldsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestSpeedStoredFieldsFormatTests.java new file mode 100644 index 0000000000000..5acdd4f5730e9 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestSpeedStoredFieldsFormatTests.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.codec.zstd; + +import org.apache.lucene.codecs.Codec; +import org.apache.lucene.tests.index.BaseStoredFieldsFormatTestCase; +import org.elasticsearch.index.codec.Elasticsearch814Codec; + +public class Zstd814BestSpeedStoredFieldsFormatTests extends BaseStoredFieldsFormatTestCase { + + private final Codec codec = new Elasticsearch814Codec(Zstd814StoredFieldsFormat.Mode.BEST_SPEED); + + @Override + protected Codec getCodec() { + return codec; + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java index 09c6eed08bf28..620db8dc83510 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java @@ -10,7 +10,6 @@ import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.standard.StandardAnalyzer; -import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriterConfig; @@ -43,6 +42,7 @@ import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.codec.PerFieldMapperCodec; +import org.elasticsearch.index.codec.zstd.Zstd814StoredFieldsFormat; import org.elasticsearch.index.fielddata.FieldDataContext; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldDataCache; @@ -243,7 +243,7 @@ protected static void withLuceneIndex( CheckedConsumer test ) throws IOException { IndexWriterConfig iwc = new IndexWriterConfig(IndexShard.buildIndexAnalyzer(mapperService)).setCodec( - new PerFieldMapperCodec(Lucene99Codec.Mode.BEST_SPEED, mapperService, BigArrays.NON_RECYCLING_INSTANCE) + new PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode.BEST_SPEED, mapperService, BigArrays.NON_RECYCLING_INSTANCE) ); try (Directory dir = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc)) { builder.accept(iw); From 658f7aa21c4a4165e9814aada63bc74b2366e403 Mon Sep 17 00:00:00 2001 From: Chris Hegarty <62058229+ChrisHegarty@users.noreply.github.com> Date: Tue, 9 Apr 2024 08:38:13 +0100 Subject: [PATCH 095/173] Unwrap exceptions in ES|QL Async Query GET (#107227) This commit updates the implementation of the ES|QL Async Query Get transport action so that exceptions are unwrapped before being exposed. This ensures that the exceptions seen by the client remain the same between sync and async. Specifically, only ParsingException and VerificationException are unwrapped, since these are currently the only ones that are returned. --- .../NotSerializableExceptionWrapper.java | 2 +- .../xpack/esql/action/EsqlActionIT.java | 16 +---- .../xpack/esql/action/EsqlAsyncActionIT.java | 24 -------- .../TransportEsqlAsyncGetResultsAction.java | 58 +++++++++++++++++++ 4 files changed, 62 insertions(+), 38 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/io/stream/NotSerializableExceptionWrapper.java b/server/src/main/java/org/elasticsearch/common/io/stream/NotSerializableExceptionWrapper.java index bd97ec0c2f63f..056fc59b4fdd5 100644 --- a/server/src/main/java/org/elasticsearch/common/io/stream/NotSerializableExceptionWrapper.java +++ b/server/src/main/java/org/elasticsearch/common/io/stream/NotSerializableExceptionWrapper.java @@ -58,7 +58,7 @@ protected void writeTo(StreamOutput out, Writer nestedExceptionsWrite } @Override - protected String getExceptionName() { + public String getExceptionName() { return name; } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java index 17082e9855761..686fb831aa042 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java @@ -994,29 +994,19 @@ public void testOverlappingIndexPatterns() throws Exception { .add(new IndexRequest("test_overlapping_index_patterns_2").id("1").source("field", "foo")) .get(); - assertVerificationException("from test_overlapping_index_patterns_* | sort field"); + assertThrows(VerificationException.class, () -> run("from test_overlapping_index_patterns_* | sort field")); } public void testErrorMessageForUnknownColumn() { - var e = assertVerificationException("row a = 1 | eval x = b"); + var e = expectThrows(VerificationException.class, () -> run("row a = 1 | eval x = b")); assertThat(e.getMessage(), containsString("Unknown column [b]")); } - // Straightforward verification. Subclasses can override. - protected Exception assertVerificationException(String esqlCommand) { - return expectThrows(VerificationException.class, () -> run(esqlCommand)); - } - public void testErrorMessageForEmptyParams() { - var e = assertParsingException("row a = 1 | eval x = ?"); + var e = expectThrows(ParsingException.class, () -> run("row a = 1 | eval x = ?")); assertThat(e.getMessage(), containsString("Not enough actual parameters 0")); } - // Straightforward verification. Subclasses can override. - protected Exception assertParsingException(String esqlCommand) { - return expectThrows(ParsingException.class, () -> run(esqlCommand)); - } - public void testEmptyIndex() { assertAcked(client().admin().indices().prepareCreate("test_empty").setMapping("k", "type=keyword", "v", "type=long").get()); try (EsqlQueryResponse results = run("from test_empty")) { diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java index e884b67fb5d24..e2e635917ed1c 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java @@ -10,7 +10,6 @@ import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.common.io.stream.NotSerializableExceptionWrapper; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; @@ -25,8 +24,6 @@ import org.elasticsearch.xpack.core.async.TransportDeleteAsyncResultAction; import org.elasticsearch.xpack.core.esql.action.ColumnInfo; import org.elasticsearch.xpack.esql.TestBlockFactory; -import org.elasticsearch.xpack.esql.VerificationException; -import org.elasticsearch.xpack.esql.parser.ParsingException; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; import java.nio.file.Path; @@ -37,7 +34,6 @@ import java.util.concurrent.TimeUnit; import static org.elasticsearch.core.TimeValue.timeValueSeconds; -import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.is; import static org.hamcrest.core.IsEqual.equalTo; @@ -122,26 +118,6 @@ AcknowledgedResponse deleteAsyncId(String id) { } } - // Overridden to allow for not-serializable wrapper. - @Override - protected Exception assertVerificationException(String esqlCommand) { - var e = expectThrowsAnyOf(List.of(NotSerializableExceptionWrapper.class, VerificationException.class), () -> run(esqlCommand)); - if (e instanceof NotSerializableExceptionWrapper wrapper) { - assertThat(wrapper.unwrapCause().getMessage(), containsString("verification_exception")); - } - return e; - } - - // Overridden to allow for not-serializable wrapper. - @Override - protected Exception assertParsingException(String esqlCommand) { - var e = expectThrowsAnyOf(List.of(NotSerializableExceptionWrapper.class, ParsingException.class), () -> run(esqlCommand)); - if (e instanceof NotSerializableExceptionWrapper wrapper) { - assertThat(wrapper.unwrapCause().getMessage(), containsString("parsing_exception")); - } - return e; - } - public static class LocalStateEsqlAsync extends LocalStateCompositeXPackPlugin { public LocalStateEsqlAsync(final Settings settings, final Path configPath) { super(settings, configPath); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncGetResultsAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncGetResultsAction.java index 8785b8f5de887..afb7ee6f53029 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncGetResultsAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncGetResultsAction.java @@ -7,20 +7,29 @@ package org.elasticsearch.xpack.esql.plugin; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchWrapperException; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.NotSerializableExceptionWrapper; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.async.GetAsyncResultRequest; +import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.action.EsqlAsyncGetResultAction; import org.elasticsearch.xpack.esql.action.EsqlQueryResponse; import org.elasticsearch.xpack.esql.action.EsqlQueryTask; +import org.elasticsearch.xpack.esql.parser.ParsingException; import org.elasticsearch.xpack.ql.plugin.AbstractTransportQlAsyncGetResultsAction; +import org.elasticsearch.xpack.ql.tree.Source; public class TransportEsqlAsyncGetResultsAction extends AbstractTransportQlAsyncGetResultsAction { @@ -51,8 +60,57 @@ public TransportEsqlAsyncGetResultsAction( this.blockFactory = blockFactory; } + @Override + protected void doExecute(Task task, GetAsyncResultRequest request, ActionListener listener) { + super.doExecute(task, request, unwrapListener(listener)); + } + @Override public Writeable.Reader responseReader() { return EsqlQueryResponse.reader(blockFactory); } + + static final String PARSE_EX_NAME = ElasticsearchException.getExceptionName(new ParsingException(Source.EMPTY, "")); + static final String VERIFY_EX_NAME = ElasticsearchException.getExceptionName(new VerificationException("")); + + /** + * Unwraps the exception in the case of failure. This keeps the exception types + * the same as the sync API, namely ParsingException and ParsingException. + */ + static ActionListener unwrapListener(ActionListener listener) { + return new ActionListener<>() { + @Override + public void onResponse(R o) { + listener.onResponse(o); + } + + @Override + public void onFailure(Exception e) { + if (e instanceof ElasticsearchWrapperException && e instanceof ElasticsearchException ee) { + e = unwrapEsException(ee); + } + if (e instanceof NotSerializableExceptionWrapper wrapper) { + String name = wrapper.getExceptionName(); + if (PARSE_EX_NAME.equals(name)) { + e = new ParsingException(Source.EMPTY, e.getMessage()); + e.setStackTrace(wrapper.getStackTrace()); + e.addSuppressed(wrapper); + } else if (VERIFY_EX_NAME.contains(name)) { + e = new VerificationException(e.getMessage()); + e.setStackTrace(wrapper.getStackTrace()); + e.addSuppressed(wrapper); + } + } + listener.onFailure(e); + } + }; + } + + static RuntimeException unwrapEsException(ElasticsearchException esEx) { + Throwable root = esEx.unwrapCause(); + if (root instanceof RuntimeException runtimeException) { + return runtimeException; + } + return esEx; + } } From 36049730fde3bd672c1904497bd44f237ed8ca80 Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 9 Apr 2024 09:03:36 +0100 Subject: [PATCH 096/173] Remove executor lookups from `TransportBulkAction` (#106941) Replaces the `String` names with proper `Executor` instances. Relates #106279 (removes another usage of `SAME`) Relates #106940, #106938, #105460, #99787, #97879 etc. --- .../action/bulk/BulkOperation.java | 17 +-- .../action/bulk/TransportBulkAction.java | 48 +++---- .../bulk/TransportSimulateBulkAction.java | 3 +- .../elasticsearch/ingest/IngestService.java | 7 +- .../action/bulk/BulkOperationTests.java | 4 +- ...ActionIndicesThatCannotBeCreatedTests.java | 3 +- .../bulk/TransportBulkActionIngestTests.java | 119 +++++++++++++++--- .../action/bulk/TransportBulkActionTests.java | 28 ++++- .../bulk/TransportBulkActionTookTests.java | 9 +- .../TransportSimulateBulkActionTests.java | 2 +- .../ingest/IngestServiceTests.java | 49 ++++---- .../threadpool/TestThreadPool.java | 77 ------------ 12 files changed, 202 insertions(+), 164 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java index 1e9b1446850af..412e4f3c875e8 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java @@ -49,6 +49,7 @@ import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentLinkedQueue; +import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; import java.util.function.BiConsumer; import java.util.function.Consumer; @@ -74,16 +75,16 @@ final class BulkOperation extends ActionRunnable { private final long startTimeNanos; private final ClusterStateObserver observer; private final Map indicesThatCannotBeCreated; - private final String executorName; + private final Executor executor; private final LongSupplier relativeTimeProvider; private final FailureStoreDocumentConverter failureStoreDocumentConverter; - private IndexNameExpressionResolver indexNameExpressionResolver; - private NodeClient client; + private final IndexNameExpressionResolver indexNameExpressionResolver; + private final NodeClient client; BulkOperation( Task task, ThreadPool threadPool, - String executorName, + Executor executor, ClusterService clusterService, BulkRequest bulkRequest, NodeClient client, @@ -97,7 +98,7 @@ final class BulkOperation extends ActionRunnable { this( task, threadPool, - executorName, + executor, clusterService, bulkRequest, client, @@ -115,7 +116,7 @@ final class BulkOperation extends ActionRunnable { BulkOperation( Task task, ThreadPool threadPool, - String executorName, + Executor executor, ClusterService clusterService, BulkRequest bulkRequest, NodeClient client, @@ -137,7 +138,7 @@ final class BulkOperation extends ActionRunnable { this.listener = listener; this.startTimeNanos = startTimeNanos; this.indicesThatCannotBeCreated = indicesThatCannotBeCreated; - this.executorName = executorName; + this.executor = executor; this.relativeTimeProvider = relativeTimeProvider; this.indexNameExpressionResolver = indexNameExpressionResolver; this.client = client; @@ -543,7 +544,7 @@ public void onTimeout(TimeValue timeout) { } private void dispatchRetry() { - threadPool.executor(executorName).submit(operation); + executor.execute(operation); } }); } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java index bf50fd06d056b..3494701cf5b7a 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java @@ -70,6 +70,7 @@ import java.util.Optional; import java.util.Set; import java.util.SortedMap; +import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; import java.util.function.LongSupplier; import java.util.stream.Collectors; @@ -101,6 +102,9 @@ public class TransportBulkAction extends HandledTransportAction releasingListener = ActionListener.runBefore(listener, releasable::close); - final String executorName = isOnlySystem ? Names.SYSTEM_WRITE : Names.WRITE; - ensureClusterStateThenForkAndExecute(task, bulkRequest, executorName, releasingListener); + final Executor executor = isOnlySystem ? systemWriteExecutor : writeExecutor; + ensureClusterStateThenForkAndExecute(task, bulkRequest, executor, releasingListener); } private void ensureClusterStateThenForkAndExecute( Task task, BulkRequest bulkRequest, - String executorName, + Executor executor, ActionListener releasingListener ) { final ClusterState initialState = clusterService.state(); @@ -274,7 +280,7 @@ private void ensureClusterStateThenForkAndExecute( clusterStateObserver.waitForNextChange(new ClusterStateObserver.Listener() { @Override public void onNewClusterState(ClusterState state) { - forkAndExecute(task, bulkRequest, executorName, releasingListener); + forkAndExecute(task, bulkRequest, executor, releasingListener); } @Override @@ -288,20 +294,20 @@ public void onTimeout(TimeValue timeout) { } }, newState -> false == newState.blocks().hasGlobalBlockWithLevel(ClusterBlockLevel.WRITE)); } else { - forkAndExecute(task, bulkRequest, executorName, releasingListener); + forkAndExecute(task, bulkRequest, executor, releasingListener); } } - private void forkAndExecute(Task task, BulkRequest bulkRequest, String executorName, ActionListener releasingListener) { - threadPool.executor(executorName).execute(new ActionRunnable<>(releasingListener) { + private void forkAndExecute(Task task, BulkRequest bulkRequest, Executor executor, ActionListener releasingListener) { + executor.execute(new ActionRunnable<>(releasingListener) { @Override protected void doRun() { - doInternalExecute(task, bulkRequest, executorName, releasingListener); + doInternalExecute(task, bulkRequest, executor, releasingListener); } }); } - protected void doInternalExecute(Task task, BulkRequest bulkRequest, String executorName, ActionListener listener) { + protected void doInternalExecute(Task task, BulkRequest bulkRequest, Executor executor, ActionListener listener) { final long startTime = relativeTime(); boolean hasIndexRequestsWithPipelines = false; @@ -334,7 +340,7 @@ protected void doInternalExecute(Task task, BulkRequest bulkRequest, String exec assert arePipelinesResolved : bulkRequest; } if (clusterService.localNode().isIngestNode()) { - processBulkIndexIngestRequest(task, bulkRequest, executorName, metadata, l); + processBulkIndexIngestRequest(task, bulkRequest, executor, metadata, l); } else { ingestForwarder.forwardIngestRequest(bulkAction, bulkRequest, l); } @@ -385,7 +391,7 @@ protected void doInternalExecute(Task task, BulkRequest bulkRequest, String exec createMissingIndicesAndIndexData( task, bulkRequest, - executorName, + executor, listener, indicesToAutoCreate, dataStreamsToBeRolledOver, @@ -401,7 +407,7 @@ protected void doInternalExecute(Task task, BulkRequest bulkRequest, String exec protected void createMissingIndicesAndIndexData( Task task, BulkRequest bulkRequest, - String executorName, + Executor executor, ActionListener listener, Map indicesToAutoCreate, Set dataStreamsToBeRolledOver, @@ -411,13 +417,13 @@ protected void createMissingIndicesAndIndexData( final AtomicArray responses = new AtomicArray<>(bulkRequest.requests.size()); // Optimizing when there are no prerequisite actions if (indicesToAutoCreate.isEmpty() && dataStreamsToBeRolledOver.isEmpty()) { - executeBulk(task, bulkRequest, startTime, listener, executorName, responses, indicesThatCannotBeCreated); + executeBulk(task, bulkRequest, startTime, listener, executor, responses, indicesThatCannotBeCreated); return; } - Runnable executeBulkRunnable = () -> threadPool.executor(executorName).execute(new ActionRunnable<>(listener) { + Runnable executeBulkRunnable = () -> executor.execute(new ActionRunnable<>(listener) { @Override protected void doRun() { - executeBulk(task, bulkRequest, startTime, listener, executorName, responses, indicesThatCannotBeCreated); + executeBulk(task, bulkRequest, startTime, listener, executor, responses, indicesThatCannotBeCreated); } }); try (RefCountingRunnable refs = new RefCountingRunnable(executeBulkRunnable)) { @@ -636,14 +642,14 @@ void executeBulk( BulkRequest bulkRequest, long startTimeNanos, ActionListener listener, - String executorName, + Executor executor, AtomicArray responses, Map indicesThatCannotBeCreated ) { new BulkOperation( task, threadPool, - executorName, + executor, clusterService, bulkRequest, client, @@ -663,7 +669,7 @@ private long relativeTime() { private void processBulkIndexIngestRequest( Task task, BulkRequest original, - String executorName, + Executor executor, Metadata metadata, ActionListener listener ) { @@ -696,7 +702,7 @@ private void processBulkIndexIngestRequest( ActionRunnable runnable = new ActionRunnable<>(actionListener) { @Override protected void doRun() { - doInternalExecute(task, bulkRequest, executorName, actionListener); + doInternalExecute(task, bulkRequest, executor, actionListener); } @Override @@ -713,12 +719,12 @@ public boolean isForceExecution() { if (originalThread == Thread.currentThread()) { runnable.run(); } else { - threadPool.executor(executorName).execute(runnable); + executor.execute(runnable); } } } }, - executorName + executor ); } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java index f65d0f462fde6..1b3949f3c00ac 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java @@ -30,6 +30,7 @@ import java.util.Map; import java.util.Set; +import java.util.concurrent.Executor; public class TransportSimulateBulkAction extends TransportBulkAction { @Inject @@ -70,7 +71,7 @@ public TransportSimulateBulkAction( protected void createMissingIndicesAndIndexData( Task task, BulkRequest bulkRequest, - String executorName, + Executor executor, ActionListener listener, Map indicesToAutoCreate, Set dataStreamsToRollover, diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestService.java b/server/src/main/java/org/elasticsearch/ingest/IngestService.java index f406684c50948..be1906ab8d05e 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestService.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestService.java @@ -83,6 +83,7 @@ import java.util.Set; import java.util.TreeMap; import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.Executor; import java.util.function.BiConsumer; import java.util.function.BiFunction; import java.util.function.Consumer; @@ -696,7 +697,7 @@ private static IngestPipelinesExecutionResult failAndStoreFor(String index, Exce * @param onCompletion A callback executed once all documents have been processed. Accepts the thread * that ingestion completed on or an exception in the event that the entire operation * has failed. - * @param executorName Which executor the bulk request should be executed on. + * @param executor Which executor the bulk request should be executed on. */ public void executeBulkRequest( final int numberOfActionRequests, @@ -706,11 +707,11 @@ public void executeBulkRequest( final TriConsumer onStoreFailure, final BiConsumer onFailure, final BiConsumer onCompletion, - final String executorName + final Executor executor ) { assert numberOfActionRequests > 0 : "numberOfActionRequests must be greater than 0 but was [" + numberOfActionRequests + "]"; - threadPool.executor(executorName).execute(new AbstractRunnable() { + executor.execute(new AbstractRunnable() { @Override public void onFailure(Exception e) { diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkOperationTests.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkOperationTests.java index 2226c40b618f4..23395556761f1 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkOperationTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkOperationTests.java @@ -36,6 +36,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AtomicArray; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.index.IndexNotFoundException; @@ -48,7 +49,6 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.client.NoOpNodeClient; import org.elasticsearch.threadpool.TestThreadPool; -import org.elasticsearch.threadpool.ThreadPool; import org.junit.After; import org.junit.Assume; import org.junit.Before; @@ -843,7 +843,7 @@ private BulkOperation newBulkOperation( return new BulkOperation( null, threadPool, - ThreadPool.Names.SAME, + EsExecutors.DIRECT_EXECUTOR_SERVICE, clusterService, request, client, diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIndicesThatCannotBeCreatedTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIndicesThatCannotBeCreatedTests.java index 3057b00553a22..20d826b11c1e7 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIndicesThatCannotBeCreatedTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIndicesThatCannotBeCreatedTests.java @@ -39,6 +39,7 @@ import java.util.Map; import java.util.Set; +import java.util.concurrent.Executor; import java.util.function.Consumer; import java.util.function.Function; @@ -137,7 +138,7 @@ void executeBulk( BulkRequest bulkRequest, long startTimeNanos, ActionListener listener, - String executorName, + Executor executor, AtomicArray responses, Map indicesThatCannotBeCreated ) { diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java index b97e8303a8eb5..52d50b3a23a0d 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java @@ -47,9 +47,7 @@ import org.elasticsearch.ingest.IngestService; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.MockUtils; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.threadpool.ThreadPool.Names; import org.elasticsearch.transport.TransportResponseHandler; import org.elasticsearch.transport.TransportService; import org.junit.Before; @@ -57,13 +55,18 @@ import org.mockito.Captor; import org.mockito.MockitoAnnotations; -import java.io.IOException; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.concurrent.Callable; +import java.util.concurrent.Executor; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.BiConsumer; import java.util.function.Predicate; @@ -73,6 +76,7 @@ import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyInt; import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.ArgumentMatchers.same; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; @@ -95,6 +99,9 @@ public class TransportBulkActionIngestTests extends ESTestCase { private static final Thread DUMMY_WRITE_THREAD = new Thread(ThreadPool.Names.WRITE); private FeatureService mockFeatureService; + private static final ExecutorService writeExecutor = new NamedDirectExecutorService("write"); + private static final ExecutorService systemWriteExecutor = new NamedDirectExecutorService("system_write"); + /** Services needed by bulk action */ TransportService transportService; ClusterService clusterService; @@ -158,7 +165,7 @@ void executeBulk( BulkRequest bulkRequest, long startTimeNanos, ActionListener listener, - String executorName, + Executor executor, AtomicArray responses, Map indicesThatCannotBeCreated ) { @@ -186,13 +193,95 @@ class TestSingleItemBulkWriteAction extends TransportSingleItemBulkWriteAction shutdownNow() { + return fail(null, "shutdown not supported"); + } + + @Override + public boolean isShutdown() { + return fail(null, "shutdown not supported"); + } + + @Override + public boolean isTerminated() { + return fail(null, "shutdown not supported"); + } + + @Override + public boolean awaitTermination(long timeout, TimeUnit unit) { + return fail(null, "shutdown not supported"); + } + + @Override + public Future submit(Callable task) { + return fail(null, "shutdown not supported"); + } + + @Override + public Future submit(Runnable task, T result) { + return fail(null, "shutdown not supported"); + } + + @Override + public Future submit(Runnable task) { + return fail(null, "shutdown not supported"); + } + + @Override + public List> invokeAll(Collection> tasks) { + return null; + } + + @Override + public List> invokeAll(Collection> tasks, long timeout, TimeUnit unit) { + return fail(null, "shutdown not supported"); + } + + @Override + public T invokeAny(Collection> tasks) { + return fail(null, "shutdown not supported"); + } + + @Override + public T invokeAny(Collection> tasks, long timeout, TimeUnit unit) { + return fail(null, "shutdown not supported"); + } + } + @Before - public void setupAction() throws IOException { + public void setupAction() { // initialize captors, which must be members to use @Capture because of generics threadPool = mock(ThreadPool.class); + when(threadPool.executor(eq(ThreadPool.Names.WRITE))).thenReturn(writeExecutor); + when(threadPool.executor(eq(ThreadPool.Names.SYSTEM_WRITE))).thenReturn(systemWriteExecutor); MockitoAnnotations.openMocks(this); // setup services that will be called by action - transportService = MockUtils.setupTransportServiceWithThreadpoolExecutor(threadPool); + transportService = mock(TransportService.class); + when(transportService.getThreadPool()).thenReturn(threadPool); clusterService = mock(ClusterService.class); localIngest = true; // setup nodes for local and remote @@ -312,7 +401,7 @@ public void testIngestLocal() throws Exception { redirectHandler.capture(), failureHandler.capture(), completionHandler.capture(), - eq(Names.WRITE) + same(writeExecutor) ); completionHandler.getValue().accept(null, exception); assertTrue(failureCalled.get()); @@ -360,7 +449,7 @@ public void testSingleItemBulkActionIngestLocal() throws Exception { any(), failureHandler.capture(), completionHandler.capture(), - eq(Names.WRITE) + same(writeExecutor) ); completionHandler.getValue().accept(null, exception); assertTrue(failureCalled.get()); @@ -408,7 +497,7 @@ public void testIngestSystemLocal() throws Exception { any(), failureHandler.capture(), completionHandler.capture(), - eq(Names.SYSTEM_WRITE) + same(systemWriteExecutor) ); completionHandler.getValue().accept(null, exception); assertTrue(failureCalled.get()); @@ -567,7 +656,7 @@ private void validatePipelineWithBulkUpsert(@Nullable String indexRequestIndexNa any(), failureHandler.capture(), completionHandler.capture(), - eq(Names.WRITE) + same(writeExecutor) ); assertEquals(indexRequest1.getPipeline(), "default_pipeline"); assertEquals(indexRequest2.getPipeline(), "default_pipeline"); @@ -617,7 +706,7 @@ public void testDoExecuteCalledTwiceCorrectly() throws Exception { any(), failureHandler.capture(), completionHandler.capture(), - eq(Names.WRITE) + same(writeExecutor) ); completionHandler.getValue().accept(null, exception); assertFalse(action.indexCreated); // still no index yet, the ingest node failed. @@ -713,7 +802,7 @@ public void testFindDefaultPipelineFromTemplateMatch() { any(), failureHandler.capture(), completionHandler.capture(), - eq(Names.WRITE) + same(writeExecutor) ); } @@ -753,7 +842,7 @@ public void testFindDefaultPipelineFromV2TemplateMatch() { any(), failureHandler.capture(), completionHandler.capture(), - eq(Names.WRITE) + same(writeExecutor) ); } @@ -782,7 +871,7 @@ public void testIngestCallbackExceptionHandled() throws Exception { any(), failureHandler.capture(), completionHandler.capture(), - eq(Names.WRITE) + same(writeExecutor) ); indexRequest1.autoGenerateId(); completionHandler.getValue().accept(Thread.currentThread(), null); @@ -821,7 +910,7 @@ private void validateDefaultPipeline(IndexRequest indexRequest) { any(), failureHandler.capture(), completionHandler.capture(), - eq(Names.WRITE) + same(writeExecutor) ); assertEquals(indexRequest.getPipeline(), "default_pipeline"); completionHandler.getValue().accept(null, exception); diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java index 1a16d9083df55..960397033f602 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java @@ -59,12 +59,14 @@ import java.util.Map; import java.util.SortedMap; import java.util.TreeMap; +import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import static org.elasticsearch.action.bulk.TransportBulkAction.prohibitCustomRoutingOnDataStream; import static org.elasticsearch.cluster.metadata.MetadataCreateDataStreamServiceTests.createDataStream; import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.is; import static org.junit.Assume.assumeThat; import static org.mockito.ArgumentMatchers.any; @@ -321,31 +323,45 @@ public void testOnlySystem() { assertFalse(TransportBulkAction.isOnlySystem(buildBulkRequest(mixed), indicesLookup, systemIndices)); } - public void testRejectCoordination() throws Exception { + private void blockWriteThreadPool(CountDownLatch blockingLatch) { + assertThat(blockingLatch.getCount(), greaterThan(0L)); + final var executor = threadPool.executor(ThreadPool.Names.WRITE); + // Add tasks repeatedly until we get an EsRejectedExecutionException which indicates that the threadpool and its queue are full. + expectThrows(EsRejectedExecutionException.class, () -> { + // noinspection InfiniteLoopStatement + while (true) { + executor.execute(() -> safeAwait(blockingLatch)); + } + }); + } + + public void testRejectCoordination() { BulkRequest bulkRequest = new BulkRequest().add(new IndexRequest("index").id("id").source(Collections.emptyMap())); + final var blockingLatch = new CountDownLatch(1); try { - threadPool.startForcingRejections(); + blockWriteThreadPool(blockingLatch); PlainActionFuture future = new PlainActionFuture<>(); ActionTestUtils.execute(bulkAction, null, bulkRequest, future); expectThrows(EsRejectedExecutionException.class, future); } finally { - threadPool.stopForcingRejections(); + blockingLatch.countDown(); } } - public void testRejectionAfterCreateIndexIsPropagated() throws Exception { + public void testRejectionAfterCreateIndexIsPropagated() { BulkRequest bulkRequest = new BulkRequest().add(new IndexRequest("index").id("id").source(Collections.emptyMap())); bulkAction.failIndexCreation = randomBoolean(); + final var blockingLatch = new CountDownLatch(1); try { - bulkAction.beforeIndexCreation = threadPool::startForcingRejections; + bulkAction.beforeIndexCreation = () -> blockWriteThreadPool(blockingLatch); PlainActionFuture future = new PlainActionFuture<>(); ActionTestUtils.execute(bulkAction, null, bulkRequest, future); expectThrows(EsRejectedExecutionException.class, future); assertTrue(bulkAction.indexCreated); } finally { - threadPool.stopForcingRejections(); + blockingLatch.countDown(); } } diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java index cb9bdd1f3a827..09513351652b8 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java @@ -50,6 +50,7 @@ import java.util.Collections; import java.util.HashSet; import java.util.Map; +import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import java.util.function.LongSupplier; @@ -140,12 +141,12 @@ void executeBulk( BulkRequest bulkRequest, long startTimeNanos, ActionListener listener, - String executorName, + Executor executor, AtomicArray responses, Map indicesThatCannotBeCreated ) { expected.set(1000000); - super.executeBulk(task, bulkRequest, startTimeNanos, listener, executorName, responses, indicesThatCannotBeCreated); + super.executeBulk(task, bulkRequest, startTimeNanos, listener, executor, responses, indicesThatCannotBeCreated); } }; } else { @@ -165,13 +166,13 @@ void executeBulk( BulkRequest bulkRequest, long startTimeNanos, ActionListener listener, - String executorName, + Executor executor, AtomicArray responses, Map indicesThatCannotBeCreated ) { long elapsed = spinForAtLeastOneMillisecond(); expected.set(elapsed); - super.executeBulk(task, bulkRequest, startTimeNanos, listener, executorName, responses, indicesThatCannotBeCreated); + super.executeBulk(task, bulkRequest, startTimeNanos, listener, executor, responses, indicesThatCannotBeCreated); } }; } diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java index 2657bdef8c09d..fc9e9f05542c9 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java @@ -200,7 +200,7 @@ public void onFailure(Exception e) { bulkAction.createMissingIndicesAndIndexData( task, bulkRequest, - randomAlphaOfLength(10), + r -> fail("executor is unused"), listener, indicesToAutoCreate, dataStreamsToRollover, diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java index 9582a6e76d539..084eb94852524 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java @@ -66,7 +66,6 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.MockLogAppender; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.threadpool.ThreadPool.Names; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; @@ -219,7 +218,7 @@ public void testExecuteIndexPipelineDoesNotExist() { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); assertTrue(failure.get()); @@ -1127,7 +1126,7 @@ public String getType() { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); assertTrue(failure.get()); @@ -1172,7 +1171,7 @@ public void testExecuteBulkPipelineDoesNotExist() { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(failureHandler, times(1)).accept( argThat(item -> item == 2), @@ -1249,7 +1248,7 @@ public DocumentSizeObserver newFixedSizeDocumentObserver(long normalisedBytesPar (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); assertThat(wrappedObserverWasUsed.get(), equalTo(2)); assertThat(parsedValueWasUsed.get(), equalTo(2)); @@ -1284,7 +1283,7 @@ public void testExecuteSuccess() { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(failureHandler, never()).accept(any(), any()); verify(completionHandler, times(1)).accept(Thread.currentThread(), null); @@ -1326,7 +1325,7 @@ public void testDynamicTemplates() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); latch.await(); assertThat(indexRequest.getDynamicTemplates(), equalTo(Map.of("foo", "bar", "foo.bar", "baz"))); @@ -1356,7 +1355,7 @@ public void testExecuteEmptyPipeline() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(failureHandler, never()).accept(any(), any()); verify(completionHandler, times(1)).accept(Thread.currentThread(), null); @@ -1419,7 +1418,7 @@ public void testExecutePropagateAllMetadataUpdates() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(processor).execute(any(), any()); verify(failureHandler, never()).accept(any(), any()); @@ -1477,7 +1476,7 @@ public void testExecuteFailure() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(processor).execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), Map.of()), any()); verify(failureHandler, times(1)).accept(eq(0), any(RuntimeException.class)); @@ -1535,7 +1534,7 @@ public void testExecuteSuccessWithOnFailure() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(failureHandler, never()).accept(eq(0), any(IngestProcessorException.class)); verify(completionHandler, times(1)).accept(Thread.currentThread(), null); @@ -1587,7 +1586,7 @@ public void testExecuteFailureWithNestedOnFailure() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(processor).execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), Map.of()), any()); verify(failureHandler, times(1)).accept(eq(0), any(RuntimeException.class)); @@ -1650,7 +1649,7 @@ public void testBulkRequestExecutionWithFailures() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), requestItemErrorHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(requestItemErrorHandler, times(numIndexRequests)).accept(anyInt(), argThat(e -> e.getCause().equals(error))); @@ -1704,7 +1703,7 @@ public void testExecuteFailureRedirection() throws Exception { redirectHandler, failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(processor).execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), Map.of()), any()); verify(redirectHandler, times(1)).apply(eq(0), eq(indexRequest.index()), any(RuntimeException.class)); @@ -1761,7 +1760,7 @@ public void testExecuteFailureRedirectionWithNestedOnFailure() throws Exception redirectHandler, failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(processor).execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), Map.of()), any()); verify(redirectHandler, times(1)).apply(eq(0), eq(indexRequest.index()), any(RuntimeException.class)); @@ -1827,7 +1826,7 @@ public void testBulkRequestExecutionWithRedirectedFailures() throws Exception { requestItemRedirectHandler, requestItemErrorHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(requestItemRedirectHandler, times(numIndexRequests)).apply(anyInt(), anyString(), argThat(e -> e.getCause().equals(error))); @@ -1888,7 +1887,7 @@ public void testBulkRequestExecution() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), requestItemErrorHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(requestItemErrorHandler, never()).accept(any(), any()); @@ -2003,7 +2002,7 @@ public String execute() { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), (integer, e) -> {}, (thread, e) -> {}, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); { @@ -2083,7 +2082,7 @@ public void testStats() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); final IngestStats afterFirstRequestStats = ingestService.stats(); assertThat(afterFirstRequestStats.pipelineStats().size(), equalTo(2)); @@ -2109,7 +2108,7 @@ public void testStats() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); final IngestStats afterSecondRequestStats = ingestService.stats(); assertThat(afterSecondRequestStats.pipelineStats().size(), equalTo(2)); @@ -2140,7 +2139,7 @@ public void testStats() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); final IngestStats afterThirdRequestStats = ingestService.stats(); assertThat(afterThirdRequestStats.pipelineStats().size(), equalTo(2)); @@ -2172,7 +2171,7 @@ public void testStats() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); final IngestStats afterForthRequestStats = ingestService.stats(); assertThat(afterForthRequestStats.pipelineStats().size(), equalTo(2)); @@ -2269,7 +2268,7 @@ public String getDescription() { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); verify(failureHandler, never()).accept(any(), any()); verify(completionHandler, times(1)).accept(Thread.currentThread(), null); @@ -2359,7 +2358,7 @@ public void testCBORParsing() throws Exception { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), (integer, e) -> {}, (thread, e) -> {}, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); } @@ -2439,7 +2438,7 @@ public void testSetsRawTimestamp() { (slot, targetIndex, e) -> fail("Should not be redirecting failures"), (integer, e) -> {}, (thread, e) -> {}, - Names.WRITE + EsExecutors.DIRECT_EXECUTOR_SERVICE ); assertThat(indexRequest1.getRawTimestamp(), nullValue()); diff --git a/test/framework/src/main/java/org/elasticsearch/threadpool/TestThreadPool.java b/test/framework/src/main/java/org/elasticsearch/threadpool/TestThreadPool.java index ce8e3a2574f3e..e2fa31c31a46f 100644 --- a/test/framework/src/main/java/org/elasticsearch/threadpool/TestThreadPool.java +++ b/test/framework/src/main/java/org/elasticsearch/threadpool/TestThreadPool.java @@ -9,23 +9,14 @@ package org.elasticsearch.threadpool; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.Releasable; import org.elasticsearch.node.Node; import org.elasticsearch.telemetry.metric.MeterRegistry; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.ThreadFactory; -import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; public class TestThreadPool extends ThreadPool implements Releasable { - private final CountDownLatch blockingLatch = new CountDownLatch(1); - private volatile boolean returnRejectingExecutor = false; - private volatile ThreadPoolExecutor rejectingExecutor; - public TestThreadPool(String name, ExecutorBuilder... customBuilders) { this(name, Settings.EMPTY, customBuilders); } @@ -34,74 +25,6 @@ public TestThreadPool(String name, Settings settings, ExecutorBuilder... cust super(Settings.builder().put(Node.NODE_NAME_SETTING.getKey(), name).put(settings).build(), MeterRegistry.NOOP, customBuilders); } - @Override - public ExecutorService executor(String name) { - if (returnRejectingExecutor) { - return rejectingExecutor; - } else { - return super.executor(name); - } - } - - public void startForcingRejections() { - if (rejectingExecutor == null) { - createRejectingExecutor(); - } - returnRejectingExecutor = true; - } - - public void stopForcingRejections() { - returnRejectingExecutor = false; - } - - @Override - public void shutdown() { - blockingLatch.countDown(); - if (rejectingExecutor != null) { - rejectingExecutor.shutdown(); - } - super.shutdown(); - } - - @Override - public void shutdownNow() { - blockingLatch.countDown(); - if (rejectingExecutor != null) { - rejectingExecutor.shutdownNow(); - } - super.shutdownNow(); - } - - private synchronized void createRejectingExecutor() { - if (rejectingExecutor != null) { - return; - } - ThreadFactory factory = EsExecutors.daemonThreadFactory("reject_thread"); - rejectingExecutor = EsExecutors.newFixed( - "rejecting", - 1, - 0, - factory, - getThreadContext(), - EsExecutors.TaskTrackingConfig.DO_NOT_TRACK - ); - - CountDownLatch startedLatch = new CountDownLatch(1); - rejectingExecutor.execute(() -> { - try { - startedLatch.countDown(); - blockingLatch.await(); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - }); - try { - startedLatch.await(); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - } - @Override public void close() { ThreadPool.terminate(this, 10, TimeUnit.SECONDS); From d99323e827c1f0e4a51b4b2bbd8feec69decc50c Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Tue, 9 Apr 2024 11:34:58 +0300 Subject: [PATCH 097/173] Add retries in concurrent downsampling action (#107213) --- .../xpack/downsample/DownsampleActionSingleNodeTests.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java index 2057518307fc0..d23f1e4b89a8c 100644 --- a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java +++ b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java @@ -553,7 +553,10 @@ public void onFailure(Exception e) { fail("downsample index has not been created"); } }); - downsample(sourceIndex, downsampleIndex, config); + + // Downsample with retries, in case the downsampled index is not ready. + assertBusy(() -> downsample(sourceIndex, downsampleIndex, config), 120, TimeUnit.SECONDS); + // We must wait until the in-progress downsample ends, otherwise data will not be cleaned up assertBusy(() -> assertTrue("In progress downsample did not complete", downsampleListener.success), 60, TimeUnit.SECONDS); } From 2588c72a5218756aaaacb81ec9adfb625fc76921 Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Tue, 9 Apr 2024 10:41:34 +0200 Subject: [PATCH 098/173] ES|QL: Add unit tests and docs for DATE_TRUNC() (#107145) --- .../functions/date-time-functions.asciidoc | 2 +- .../functions/description/date_trunc.asciidoc | 5 + .../{ => examples}/date_trunc.asciidoc | 29 +--- .../esql/functions/layout/date_trunc.asciidoc | 15 ++ .../functions/parameters/date_trunc.asciidoc | 9 ++ .../esql/functions/signature/date_trunc.svg | 1 + .../esql/functions/types/date_trunc.asciidoc | 10 ++ .../src/main/resources/meta.csv-spec | 4 +- .../esql/expression/function/Example.java | 6 + .../function/scalar/date/DateTrunc.java | 23 ++- .../xpack/esql/analysis/AnalyzerTests.java | 16 +- .../function/AbstractFunctionTestCase.java | 14 +- .../function/scalar/date/DateTruncTests.java | 152 +++++++++++------- 13 files changed, 187 insertions(+), 99 deletions(-) create mode 100644 docs/reference/esql/functions/description/date_trunc.asciidoc rename docs/reference/esql/functions/{ => examples}/date_trunc.asciidoc (68%) create mode 100644 docs/reference/esql/functions/layout/date_trunc.asciidoc create mode 100644 docs/reference/esql/functions/parameters/date_trunc.asciidoc create mode 100644 docs/reference/esql/functions/signature/date_trunc.svg create mode 100644 docs/reference/esql/functions/types/date_trunc.asciidoc diff --git a/docs/reference/esql/functions/date-time-functions.asciidoc b/docs/reference/esql/functions/date-time-functions.asciidoc index e9d6628c63894..c1cd36e376a1c 100644 --- a/docs/reference/esql/functions/date-time-functions.asciidoc +++ b/docs/reference/esql/functions/date-time-functions.asciidoc @@ -22,5 +22,5 @@ include::date_diff.asciidoc[] include::date_extract.asciidoc[] include::date_format.asciidoc[] include::date_parse.asciidoc[] -include::date_trunc.asciidoc[] +include::layout/date_trunc.asciidoc[] include::now.asciidoc[] diff --git a/docs/reference/esql/functions/description/date_trunc.asciidoc b/docs/reference/esql/functions/description/date_trunc.asciidoc new file mode 100644 index 0000000000000..1fb874e3bd9cd --- /dev/null +++ b/docs/reference/esql/functions/description/date_trunc.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Rounds down a date to the closest interval. diff --git a/docs/reference/esql/functions/date_trunc.asciidoc b/docs/reference/esql/functions/examples/date_trunc.asciidoc similarity index 68% rename from docs/reference/esql/functions/date_trunc.asciidoc rename to docs/reference/esql/functions/examples/date_trunc.asciidoc index 4aa228dc14e65..d7cece9aff58b 100644 --- a/docs/reference/esql/functions/date_trunc.asciidoc +++ b/docs/reference/esql/functions/examples/date_trunc.asciidoc @@ -1,26 +1,4 @@ -[discrete] -[[esql-date_trunc]] -=== `DATE_TRUNC` - -*Syntax* - -[source,esql] ----- -DATE_TRUNC(interval, date) ----- - -*Parameters* - -`interval`:: -Interval, expressed using the <>. If `null`, the function returns `null`. - -`date`:: -Date expression. If `null`, the function returns `null`. - -*Description* - -Rounds down a date to the closest interval. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Examples* @@ -32,10 +10,8 @@ include::{esql-specs}/date.csv-spec[tag=docsDateTrunc] |=== include::{esql-specs}/date.csv-spec[tag=docsDateTrunc-result] |=== - Combine `DATE_TRUNC` with <> to create date histograms. For example, the number of hires per year: - [source.merge.styled,esql] ---- include::{esql-specs}/date.csv-spec[tag=docsDateTruncHistogram] @@ -44,9 +20,7 @@ include::{esql-specs}/date.csv-spec[tag=docsDateTruncHistogram] |=== include::{esql-specs}/date.csv-spec[tag=docsDateTruncHistogram-result] |=== - Or an hourly error rate: - [source.merge.styled,esql] ---- include::{esql-specs}/conditional.csv-spec[tag=docsCaseHourlyErrorRate] @@ -55,3 +29,4 @@ include::{esql-specs}/conditional.csv-spec[tag=docsCaseHourlyErrorRate] |=== include::{esql-specs}/conditional.csv-spec[tag=docsCaseHourlyErrorRate-result] |=== + diff --git a/docs/reference/esql/functions/layout/date_trunc.asciidoc b/docs/reference/esql/functions/layout/date_trunc.asciidoc new file mode 100644 index 0000000000000..0bd9ce4b4dbe4 --- /dev/null +++ b/docs/reference/esql/functions/layout/date_trunc.asciidoc @@ -0,0 +1,15 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-date_trunc]] +=== `DATE_TRUNC` + +*Syntax* + +[.text-center] +image::esql/functions/signature/date_trunc.svg[Embedded,opts=inline] + +include::../parameters/date_trunc.asciidoc[] +include::../description/date_trunc.asciidoc[] +include::../types/date_trunc.asciidoc[] +include::../examples/date_trunc.asciidoc[] diff --git a/docs/reference/esql/functions/parameters/date_trunc.asciidoc b/docs/reference/esql/functions/parameters/date_trunc.asciidoc new file mode 100644 index 0000000000000..19f7cb6cd7c74 --- /dev/null +++ b/docs/reference/esql/functions/parameters/date_trunc.asciidoc @@ -0,0 +1,9 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Parameters* + +`interval`:: +Interval; expressed using the timespan literal syntax. + +`date`:: +Date expression diff --git a/docs/reference/esql/functions/signature/date_trunc.svg b/docs/reference/esql/functions/signature/date_trunc.svg new file mode 100644 index 0000000000000..c82cd04ed5c88 --- /dev/null +++ b/docs/reference/esql/functions/signature/date_trunc.svg @@ -0,0 +1 @@ +DATE_TRUNC(interval,date) \ No newline at end of file diff --git a/docs/reference/esql/functions/types/date_trunc.asciidoc b/docs/reference/esql/functions/types/date_trunc.asciidoc new file mode 100644 index 0000000000000..8df45cfef54a8 --- /dev/null +++ b/docs/reference/esql/functions/types/date_trunc.asciidoc @@ -0,0 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +interval | date | result +date_period | datetime | datetime +time_duration | datetime | datetime +|=== diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec index 33b61c95ed0ed..d344b50c0364f 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -22,7 +22,7 @@ synopsis:keyword "long date_extract(datePart:keyword|text, date:date)" "keyword date_format(?dateFormat:keyword|text, date:date)" "date date_parse(?datePattern:keyword|text, dateString:keyword|text)" -"date date_trunc(interval:keyword, date:date)" +"date date_trunc(interval:date_period|time_duration, date:date)" double e() "boolean ends_with(str:keyword|text, suffix:keyword|text)" "double|integer|long|unsigned_long floor(number:double|integer|long|unsigned_long)" @@ -132,7 +132,7 @@ date_diff |[unit, startTimestamp, endTimestamp]|["keyword|text", date, date] date_extract |[datePart, date] |["keyword|text", date] |[Part of the date to extract. Can be: aligned_day_of_week_in_month; aligned_day_of_week_in_year; aligned_week_of_month; aligned_week_of_year; ampm_of_day; clock_hour_of_ampm; clock_hour_of_day; day_of_month; day_of_week; day_of_year; epoch_day; era; hour_of_ampm; hour_of_day; instant_seconds; micro_of_day; micro_of_second; milli_of_day; milli_of_second; minute_of_day; minute_of_hour; month_of_year; nano_of_day; nano_of_second; offset_seconds; proleptic_month; second_of_day; second_of_minute; year; or year_of_era., Date expression] date_format |[dateFormat, date] |["keyword|text", date] |[A valid date pattern, Date expression] date_parse |[datePattern, dateString] |["keyword|text", "keyword|text"] |[A valid date pattern, A string representing a date] -date_trunc |[interval, date] |[keyword, date] |[Interval; expressed using the timespan literal syntax., Date expression] +date_trunc |[interval, date] |["date_period|time_duration", date] |[Interval; expressed using the timespan literal syntax., Date expression] e |null |null |null ends_with |[str, suffix] |["keyword|text", "keyword|text"] |[, ] floor |number |"double|integer|long|unsigned_long" |[""] diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/Example.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/Example.java index 0cee9d2c53cde..7c9a788eed36c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/Example.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/Example.java @@ -18,6 +18,12 @@ @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.CONSTRUCTOR) public @interface Example { + + /** + * The description that will appear before the example + */ + String description() default ""; + /** * The test file that contains the example. */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java index 0f35b95a287ad..39ad0351b199f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java @@ -13,6 +13,7 @@ import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; @@ -34,14 +35,26 @@ public class DateTrunc extends BinaryDateTimeFunction implements EvaluatorMapper { - @FunctionInfo(returnType = "date", description = "Rounds down a date to the closest interval.") + @FunctionInfo( + returnType = "date", + description = "Rounds down a date to the closest interval.", + examples = { + @Example(file = "date", tag = "docsDateTrunc"), + @Example( + description = "Combine `DATE_TRUNC` with <> to create date histograms. For\n" + + "example, the number of hires per year:", + file = "date", + tag = "docsDateTruncHistogram" + ), + @Example(description = "Or an hourly error rate:", file = "conditional", tag = "docsCaseHourlyErrorRate") } + ) public DateTrunc( Source source, // Need to replace the commas in the description here with semi-colon as there's a bug in the CSV parser // used in the CSVTests and fixing it is not trivial @Param( name = "interval", - type = { "keyword" }, + type = { "date_period", "time_duration" }, description = "Interval; expressed using the timespan literal syntax." ) Expression interval, @Param(name = "date", type = { "date" }, description = "Date expression") Expression field @@ -55,8 +68,8 @@ protected TypeResolution resolveType() { return new TypeResolution("Unresolved children"); } - return isDate(timestampField(), sourceText(), FIRST).and( - isType(interval(), EsqlDataTypes::isTemporalAmount, sourceText(), SECOND, "dateperiod", "timeduration") + return isType(interval(), EsqlDataTypes::isTemporalAmount, sourceText(), FIRST, "dateperiod", "timeduration").and( + isDate(timestampField(), sourceText(), SECOND) ); } @@ -105,7 +118,7 @@ private static Rounding.Prepared createRounding(final Period period, final ZoneI long periods = period.getUnits().stream().filter(unit -> period.get(unit) != 0).count(); if (periods != 1) { - throw new IllegalArgumentException("Time interval is not supported"); + throw new IllegalArgumentException("Time interval with multiple periods is not supported"); } final Rounding.Builder rounding; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index aedc789620480..f4ecf38915a29 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -1118,36 +1118,36 @@ public void testDateParseOnIntPattern() { public void testDateTruncOnInt() { verifyUnsupported(""" from test - | eval date_trunc("1M", int) - """, "first argument of [date_trunc(\"1M\", int)] must be [datetime], found value [int] type [integer]"); + | eval date_trunc(1 month, int) + """, "second argument of [date_trunc(1 month, int)] must be [datetime], found value [int] type [integer]"); } public void testDateTruncOnFloat() { verifyUnsupported(""" from test - | eval date_trunc("1M", float) - """, "first argument of [date_trunc(\"1M\", float)] must be [datetime], found value [float] type [double]"); + | eval date_trunc(1 month, float) + """, "second argument of [date_trunc(1 month, float)] must be [datetime], found value [float] type [double]"); } public void testDateTruncOnText() { verifyUnsupported(""" from test - | eval date_trunc("1M", keyword) - """, "first argument of [date_trunc(\"1M\", keyword)] must be [datetime], found value [keyword] type [keyword]"); + | eval date_trunc(1 month, keyword) + """, "second argument of [date_trunc(1 month, keyword)] must be [datetime], found value [keyword] type [keyword]"); } public void testDateTruncWithNumericInterval() { verifyUnsupported(""" from test | eval date_trunc(1, date) - """, "second argument of [date_trunc(1, date)] must be [dateperiod or timeduration], found value [1] type [integer]"); + """, "first argument of [date_trunc(1, date)] must be [dateperiod or timeduration], found value [1] type [integer]"); } public void testDateTruncWithDateInterval() { verifyUnsupported(""" from test | eval date_trunc(date, date) - """, "second argument of [date_trunc(date, date)] must be [dateperiod or timeduration], found value [date] type [datetime]"); + """, "first argument of [date_trunc(date, date)] must be [dateperiod or timeduration], found value [date] type [datetime]"); } // check field declaration is validated even across duplicated declarations diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index b97622f28520c..0772b03bf3210 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -618,7 +618,7 @@ public static void testFunctionInfo() { for (Map.Entry, DataType> entry : signatures.entrySet()) { List types = entry.getKey(); for (int i = 0; i < args.size() && i < types.size(); i++) { - typesFromSignature.get(i).add(types.get(i).esType()); + typesFromSignature.get(i).add(signatureType(types.get(i))); } returnFromSignature.add(entry.getValue().esType()); } @@ -637,6 +637,10 @@ public static void testFunctionInfo() { } + private static String signatureType(DataType type) { + return type.esType() != null ? type.esType() : type.typeName(); + } + /** * Adds cases with {@code null} and asserts that the result is {@code null}. *

    @@ -894,6 +898,7 @@ protected static String typeErrorMessage(boolean includeOrdinal, List 0) { + builder.append(example.description()); + builder.append("\n"); + } builder.append(""" [source.merge.styled,esql] ---- diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncTests.java index 4f897c47d73b8..98fbff6a816c3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncTests.java @@ -7,28 +7,56 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.date; +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + import org.elasticsearch.common.Rounding; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.esql.SerializationTestUtils; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; -import org.elasticsearch.xpack.ql.expression.FieldAttribute; -import org.elasticsearch.xpack.ql.expression.Literal; +import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataTypes; -import org.elasticsearch.xpack.ql.type.DateEsField; -import org.elasticsearch.xpack.ql.type.EsField; import java.time.Duration; import java.time.Instant; import java.time.Period; -import java.util.Collections; -import java.util.Map; +import java.util.List; +import java.util.function.Supplier; import static org.elasticsearch.xpack.esql.expression.function.scalar.date.DateTrunc.createRounding; import static org.elasticsearch.xpack.esql.expression.function.scalar.date.DateTrunc.process; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; + +public class DateTruncTests extends AbstractFunctionTestCase { -public class DateTruncTests extends ESTestCase { + public DateTruncTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + long ts = toMillis("2023-02-17T10:25:33.38Z"); + List suppliers = List.of( + ofDatePeriod(Period.ofDays(1), ts, "2023-02-17T00:00:00.00Z"), + ofDatePeriod(Period.ofMonths(1), ts, "2023-02-01T00:00:00.00Z"), + ofDatePeriod(Period.ofYears(1), ts, "2023-01-01T00:00:00.00Z"), + ofDatePeriod(Period.ofDays(10), ts, "2023-02-12T00:00:00.00Z"), + // 7 days period should return weekly rounding + ofDatePeriod(Period.ofDays(7), ts, "2023-02-13T00:00:00.00Z"), + // 3 months period should return quarterly + ofDatePeriod(Period.ofMonths(3), ts, "2023-01-01T00:00:00.00Z"), + ofDuration(Duration.ofHours(1), ts, "2023-02-17T10:00:00.00Z"), + ofDuration(Duration.ofMinutes(1), ts, "2023-02-17T10:25:00.00Z"), + ofDuration(Duration.ofSeconds(1), ts, "2023-02-17T10:25:33.00Z"), + ofDuration(Duration.ofHours(3), ts, "2023-02-17T09:00:00.00Z"), + ofDuration(Duration.ofMinutes(15), ts, "2023-02-17T10:15:00.00Z"), + ofDuration(Duration.ofSeconds(30), ts, "2023-02-17T10:25:30.00Z"), + randomSecond() + ); + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); + } public void testCreateRoundingDuration() { Rounding.Prepared rounding; @@ -71,7 +99,7 @@ public void testCreateRoundingPeriod() { assertThat(e.getMessage(), containsString("Zero or negative time interval is not supported")); e = expectThrows(IllegalArgumentException.class, () -> createRounding(Period.of(0, 1, 1))); - assertThat(e.getMessage(), containsString("Time interval is not supported")); + assertThat(e.getMessage(), containsString("Time interval with multiple periods is not supported")); rounding = createRounding(Period.ofDays(1)); assertEquals(1, rounding.roundingSize(Rounding.DateTimeUnit.DAY_OF_MONTH), 0d); @@ -103,25 +131,6 @@ public void testCreateRoundingNullInterval() { public void testDateTruncFunction() { long ts = toMillis("2023-02-17T10:25:33.38Z"); - assertEquals(toMillis("2023-02-17T00:00:00.00Z"), process(ts, createRounding(Period.ofDays(1)))); - assertEquals(toMillis("2023-02-01T00:00:00.00Z"), process(ts, createRounding(Period.ofMonths(1)))); - assertEquals(toMillis("2023-01-01T00:00:00.00Z"), process(ts, createRounding(Period.ofYears(1)))); - - assertEquals(toMillis("2023-02-12T00:00:00.00Z"), process(ts, createRounding(Period.ofDays(10)))); - // 7 days period should return weekly rounding - assertEquals(toMillis("2023-02-13T00:00:00.00Z"), process(ts, createRounding(Period.ofDays(7)))); - // 3 months period should return quarterly - assertEquals(toMillis("2023-01-01T00:00:00.00Z"), process(ts, createRounding(Period.ofMonths(3)))); - - assertEquals(toMillis("2023-02-17T10:00:00.00Z"), process(ts, createRounding(Duration.ofHours(1)))); - assertEquals(toMillis("2023-02-17T10:25:00.00Z"), process(ts, createRounding(Duration.ofMinutes(1)))); - assertEquals(toMillis("2023-02-17T10:25:33.00Z"), process(ts, createRounding(Duration.ofSeconds(1)))); - - assertEquals(toMillis("2023-02-17T09:00:00.00Z"), process(ts, createRounding(Duration.ofHours(3)))); - assertEquals(toMillis("2023-02-17T10:15:00.00Z"), process(ts, createRounding(Duration.ofMinutes(15)))); - assertEquals(toMillis("2023-02-17T10:25:30.00Z"), process(ts, createRounding(Duration.ofSeconds(30)))); - assertEquals(toMillis("2023-02-17T10:25:30.00Z"), process(ts, createRounding(Duration.ofSeconds(30)))); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> process(ts, createRounding(Period.ofDays(-1)))); assertThat(e.getMessage(), containsString("Zero or negative time interval is not supported")); @@ -129,36 +138,71 @@ public void testDateTruncFunction() { assertThat(e.getMessage(), containsString("Zero or negative time interval is not supported")); } - private static long toMillis(String timestamp) { - return Instant.parse(timestamp).toEpochMilli(); + private static TestCaseSupplier ofDatePeriod(Period period, long value, String expectedDate) { + return new TestCaseSupplier( + List.of(EsqlDataTypes.DATE_PERIOD, DataTypes.DATETIME), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(period, EsqlDataTypes.DATE_PERIOD, "interval"), + new TestCaseSupplier.TypedData(value, DataTypes.DATETIME, "date") + ), + "DateTruncEvaluator[date=Attribute[channel=1], interval=Attribute[channel=0]]", + DataTypes.DATETIME, + equalTo(toMillis(expectedDate)) + ) + ); + } + + private static TestCaseSupplier ofDuration(Duration duration, long value, String expectedDate) { + return new TestCaseSupplier( + List.of(EsqlDataTypes.TIME_DURATION, DataTypes.DATETIME), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(duration, EsqlDataTypes.TIME_DURATION, "interval"), + new TestCaseSupplier.TypedData(value, DataTypes.DATETIME, "date") + ), + "DateTruncEvaluator[date=Attribute[channel=1], interval=Attribute[channel=0]]", + DataTypes.DATETIME, + equalTo(toMillis(expectedDate)) + ) + ); } - public void testSerialization() { - var dateTrunc = new DateTrunc(Source.EMPTY, randomDateIntervalLiteral(), randomDateField()); - SerializationTestUtils.assertSerialization(dateTrunc); + private static TestCaseSupplier randomSecond() { + return new TestCaseSupplier("random second", List.of(EsqlDataTypes.TIME_DURATION, DataTypes.DATETIME), () -> { + String dateFragment = randomIntBetween(2000, 2050) + + "-" + + pad(randomIntBetween(1, 12)) + + "-" + + pad(randomIntBetween(1, 28)) + + "T" + + pad(randomIntBetween(0, 23)) + + ":" + + pad(randomIntBetween(0, 59)) + + ":" + + pad(randomIntBetween(0, 59)); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(Duration.ofSeconds(1), EsqlDataTypes.TIME_DURATION, "interval"), + new TestCaseSupplier.TypedData(toMillis(dateFragment + ".38Z"), DataTypes.DATETIME, "date") + ), + "DateTruncEvaluator[date=Attribute[channel=1], interval=Attribute[channel=0]]", + DataTypes.DATETIME, + equalTo(toMillis(dateFragment + ".00Z")) + ); + }); } - private static FieldAttribute randomDateField() { - String fieldName = randomAlphaOfLength(randomIntBetween(1, 25)); - String dateName = randomAlphaOfLength(randomIntBetween(1, 25)); - boolean hasDocValues = randomBoolean(); - if (randomBoolean()) { - return new FieldAttribute(Source.EMPTY, fieldName, new EsField(dateName, DataTypes.DATETIME, Map.of(), hasDocValues)); - } else { - return new FieldAttribute(Source.EMPTY, fieldName, DateEsField.dateEsField(dateName, Collections.emptyMap(), hasDocValues)); - } + private static String pad(int i) { + return i > 9 ? "" + i : "0" + i; + } + + private static long toMillis(String timestamp) { + return Instant.parse(timestamp).toEpochMilli(); } - private static Literal randomDateIntervalLiteral() { - Duration duration = switch (randomInt(5)) { - case 0 -> Duration.ofNanos(randomIntBetween(1, 100000)); - case 1 -> Duration.ofMillis(randomIntBetween(1, 1000)); - case 2 -> Duration.ofSeconds(randomIntBetween(1, 1000)); - case 3 -> Duration.ofMinutes(randomIntBetween(1, 1000)); - case 4 -> Duration.ofHours(randomIntBetween(1, 100)); - case 5 -> Duration.ofDays(randomIntBetween(1, 60)); - default -> throw new AssertionError(); - }; - return new Literal(Source.EMPTY, duration, EsqlDataTypes.TIME_DURATION); + @Override + protected Expression build(Source source, List args) { + return new DateTrunc(source, args.get(0), args.get(1)); } } From bdc98737862ed5da0fb7a9a92af386dc583b48f0 Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 9 Apr 2024 10:28:08 +0100 Subject: [PATCH 099/173] Remove unused `ThreadPool.Names#SAME` (#107249) `SAME` is a distinguished threadpool name that callers could use to obtain a special `ExecutorService` that runs tasks immediately, directly on the calling thread. In fact there are no callers that use this name any more, so we can remove it and all the associated special handling. Relates #106279 --- .../elasticsearch/threadpool/ThreadPool.java | 21 +++++++------------ .../threadpool/ESThreadPoolTestCase.java | 7 +++---- .../UpdateThreadPoolSettingsTests.java | 10 ++------- .../concurrent/DeterministicTaskQueue.java | 2 +- .../DeterministicTaskQueueTests.java | 16 -------------- .../MlDailyMaintenanceServiceIT.java | 3 --- .../MlInitializationServiceIT.java | 4 +--- 7 files changed, 14 insertions(+), 49 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java index 507eff05780b8..9679bc88319d0 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java +++ b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.node.Node; import org.elasticsearch.node.ReportingService; import org.elasticsearch.telemetry.metric.Instrument; @@ -64,7 +65,6 @@ public class ThreadPool implements ReportingService, Scheduler { private static final Logger logger = LogManager.getLogger(ThreadPool.class); public static class Names { - public static final String SAME = "same"; public static final String GENERIC = "generic"; public static final String CLUSTER_COORDINATION = "cluster_coordination"; public static final String GET = "get"; @@ -99,9 +99,13 @@ public static class Names { public static final String THREAD_POOL_METRIC_NAME_REJECTED = ".threads.rejected.total"; public enum ThreadPoolType { + @Deprecated(forRemoval = true) + @UpdateForV9 // no longer used, remove in v9 DIRECT("direct"), FIXED("fixed"), - FIXED_AUTO_QUEUE_SIZE("fixed_auto_queue_size"), // TODO: remove in 9.0 + @Deprecated(forRemoval = true) + @UpdateForV9 // no longer used, remove in v9 + FIXED_AUTO_QUEUE_SIZE("fixed_auto_queue_size"), SCALING("scaling"); private final String type; @@ -127,7 +131,6 @@ public static ThreadPoolType fromType(String type) { } public static final Map THREAD_POOL_TYPES = Map.ofEntries( - entry(Names.SAME, ThreadPoolType.DIRECT), entry(Names.GENERIC, ThreadPoolType.SCALING), entry(Names.GET, ThreadPoolType.FIXED), entry(Names.ANALYZE, ThreadPoolType.FIXED), @@ -335,16 +338,10 @@ public ThreadPool(final Settings settings, MeterRegistry meterRegistry, final Ex executors.put(entry.getKey(), executorHolder); } - executors.put(Names.SAME, new ExecutorHolder(EsExecutors.DIRECT_EXECUTOR_SERVICE, new Info(Names.SAME, ThreadPoolType.DIRECT))); this.executors = Map.copyOf(executors); this.executors.forEach((k, v) -> instruments.put(k, setupMetrics(meterRegistry, k, v))); this.instruments = instruments; - final List infos = executors.values() - .stream() - .filter(holder -> holder.info.getName().equals("same") == false) - .map(holder -> holder.info) - .toList(); - this.threadPoolInfo = new ThreadPoolInfo(infos); + this.threadPoolInfo = new ThreadPoolInfo(executors.values().stream().map(holder -> holder.info).toList()); this.scheduler = Scheduler.initScheduler(settings, "scheduler"); this.slowSchedulerWarnThresholdNanos = SLOW_SCHEDULER_TASK_WARN_THRESHOLD_SETTING.get(settings).nanos(); this.cachedTimeThread = new CachedTimeThread( @@ -481,10 +478,6 @@ public ThreadPoolStats stats() { List stats = new ArrayList<>(); for (ExecutorHolder holder : executors.values()) { final String name = holder.info.getName(); - // no need to have info on "same" thread pool - if ("same".equals(name)) { - continue; - } int threads = -1; int queue = -1; int active = -1; diff --git a/server/src/test/java/org/elasticsearch/threadpool/ESThreadPoolTestCase.java b/server/src/test/java/org/elasticsearch/threadpool/ESThreadPoolTestCase.java index 40115e1402495..4f7d900f7cdb8 100644 --- a/server/src/test/java/org/elasticsearch/threadpool/ESThreadPoolTestCase.java +++ b/server/src/test/java/org/elasticsearch/threadpool/ESThreadPoolTestCase.java @@ -20,8 +20,7 @@ protected final ThreadPool.Info info(final ThreadPool threadPool, final String n return info; } } - assert "same".equals(name); - return null; + return fail(null, "unknown threadpool name: " + name); } protected final ThreadPoolStats.Stats stats(final ThreadPool threadPool, final String name) { @@ -30,10 +29,10 @@ protected final ThreadPoolStats.Stats stats(final ThreadPool threadPool, final S return stats; } } - throw new IllegalArgumentException(name); + return fail(null, "unknown threadpool name: " + name); } - protected final void terminateThreadPoolIfNeeded(final ThreadPool threadPool) throws InterruptedException { + protected final void terminateThreadPoolIfNeeded(final ThreadPool threadPool) { if (threadPool != null) { terminate(threadPool); } diff --git a/server/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java b/server/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java index 5644e0b613651..b68f3ef76bbac 100644 --- a/server/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java +++ b/server/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java @@ -29,7 +29,7 @@ public class UpdateThreadPoolSettingsTests extends ESThreadPoolTestCase { - public void testCorrectThreadPoolTypePermittedInSettings() throws InterruptedException { + public void testCorrectThreadPoolTypePermittedInSettings() { String threadPoolName = randomThreadPoolName(); ThreadPool.ThreadPoolType correctThreadPoolType = ThreadPool.THREAD_POOL_TYPES.get(threadPoolName); ThreadPool threadPool = null; @@ -41,13 +41,7 @@ public void testCorrectThreadPoolTypePermittedInSettings() throws InterruptedExc .build(), MeterRegistry.NOOP ); - ThreadPool.Info info = info(threadPool, threadPoolName); - if (ThreadPool.Names.SAME.equals(threadPoolName)) { - assertNull(info); // we don't report on the "same" thread pool - } else { - // otherwise check we have the expected type - assertEquals(info.getThreadPoolType(), correctThreadPoolType); - } + assertEquals(info(threadPool, threadPoolName).getThreadPoolType(), correctThreadPoolType); } finally { terminateThreadPoolIfNeeded(threadPool); } diff --git a/test/framework/src/main/java/org/elasticsearch/common/util/concurrent/DeterministicTaskQueue.java b/test/framework/src/main/java/org/elasticsearch/common/util/concurrent/DeterministicTaskQueue.java index e89a6c8a84bf7..1fac5a9917807 100644 --- a/test/framework/src/main/java/org/elasticsearch/common/util/concurrent/DeterministicTaskQueue.java +++ b/test/framework/src/main/java/org/elasticsearch/common/util/concurrent/DeterministicTaskQueue.java @@ -379,7 +379,7 @@ public ExecutorService generic() { @Override public ExecutorService executor(String name) { - return Names.SAME.equals(name) ? EsExecutors.DIRECT_EXECUTOR_SERVICE : forkingExecutor; + return forkingExecutor; } @Override diff --git a/test/framework/src/test/java/org/elasticsearch/common/util/concurrent/DeterministicTaskQueueTests.java b/test/framework/src/test/java/org/elasticsearch/common/util/concurrent/DeterministicTaskQueueTests.java index f4677dc603e64..0e79dfa6e1e79 100644 --- a/test/framework/src/test/java/org/elasticsearch/common/util/concurrent/DeterministicTaskQueueTests.java +++ b/test/framework/src/test/java/org/elasticsearch/common/util/concurrent/DeterministicTaskQueueTests.java @@ -443,20 +443,4 @@ public void testThreadPoolSchedulesPeriodicFutureTasks() { assertThat(strings, contains("periodic-0", "periodic-1", "periodic-2")); } - public void testSameExecutor() { - final DeterministicTaskQueue taskQueue = new DeterministicTaskQueue(); - final ThreadPool threadPool = taskQueue.getThreadPool(); - final AtomicBoolean executed = new AtomicBoolean(false); - final AtomicBoolean executedNested = new AtomicBoolean(false); - threadPool.generic().execute(() -> { - final var executor = threadPool.executor(ThreadPool.Names.SAME); - assertSame(EsExecutors.DIRECT_EXECUTOR_SERVICE, executor); - executor.execute(() -> assertTrue(executedNested.compareAndSet(false, true))); - assertThat(executedNested.get(), is(true)); - assertTrue(executed.compareAndSet(false, true)); - }); - taskQueue.runAllRunnableTasks(); - assertThat(executed.get(), is(true)); - } - } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlDailyMaintenanceServiceIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlDailyMaintenanceServiceIT.java index 20ca6d8847d79..4fe3ed61114c3 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlDailyMaintenanceServiceIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlDailyMaintenanceServiceIT.java @@ -9,7 +9,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.threadpool.ThreadPool; @@ -35,7 +34,6 @@ import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.is; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; public class MlDailyMaintenanceServiceIT extends MlNativeAutodetectIntegTestCase { @@ -46,7 +44,6 @@ public class MlDailyMaintenanceServiceIT extends MlNativeAutodetectIntegTestCase public void setUpMocks() { jobConfigProvider = new JobConfigProvider(client(), xContentRegistry()); threadPool = mock(ThreadPool.class); - when(threadPool.executor(ThreadPool.Names.SAME)).thenReturn(EsExecutors.DIRECT_EXECUTOR_SERVICE); } public void testTriggerDeleteJobsInStateDeletingWithoutDeletionTask() throws InterruptedException { diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlInitializationServiceIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlInitializationServiceIT.java index 0a7cee96df145..30f84a97bcfb0 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlInitializationServiceIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlInitializationServiceIT.java @@ -39,13 +39,11 @@ public class MlInitializationServiceIT extends MlNativeAutodetectIntegTestCase { - private ThreadPool threadPool; private MlInitializationService mlInitializationService; @Before public void setUpMocks() { - threadPool = mock(ThreadPool.class); - when(threadPool.executor(ThreadPool.Names.SAME)).thenReturn(EsExecutors.DIRECT_EXECUTOR_SERVICE); + final var threadPool = mock(ThreadPool.class); when(threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME)).thenReturn(EsExecutors.DIRECT_EXECUTOR_SERVICE); MlDailyMaintenanceService mlDailyMaintenanceService = mock(MlDailyMaintenanceService.class); ClusterService clusterService = mock(ClusterService.class); From 51aa92090dd01139f8071df2d8c175e50a2d9c0d Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Tue, 9 Apr 2024 12:04:43 +0200 Subject: [PATCH 100/173] ES|QL: More deterministic tests (#107248) --- .../elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java index ab288de4ad27d..5aa48234cb11a 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java @@ -478,7 +478,8 @@ public void testWarningHeadersOnFailedConversions() throws IOException { bulkLoadTestData(count); Request request = prepareRequest(SYNC); - var query = fromIndex() + " | eval asInt = to_int(case(integer % 2 == 0, to_str(integer), keyword)) | limit 1000"; + var query = fromIndex() + + " | sort integer asc | eval asInt = to_int(case(integer % 2 == 0, to_str(integer), keyword)) | limit 1000"; var mediaType = attachBody(new RequestObjectBuilder().query(query).build(), request); RequestOptions.Builder options = request.getOptions().toBuilder(); @@ -493,7 +494,7 @@ public void testWarningHeadersOnFailedConversions() throws IOException { int expectedWarnings = Math.min(count / 2, 20); var warnings = response.getWarnings(); assertThat(warnings.size(), is(1 + expectedWarnings)); - var firstHeader = "Line 1:36: evaluation of [to_int(case(integer %25 2 == 0, to_str(integer), keyword))] failed, " + var firstHeader = "Line 1:55: evaluation of [to_int(case(integer %25 2 == 0, to_str(integer), keyword))] failed, " + "treating result as null. Only first 20 failures recorded."; assertThat(warnings.get(0), containsString(firstHeader)); for (int i = 1; i <= expectedWarnings; i++) { From c1ef120fe211544c96a79b1540939780764460c7 Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Tue, 9 Apr 2024 14:15:54 +0200 Subject: [PATCH 101/173] [Docs][ESQL] Make functions reference more digestible (#107258) * [Docs][ESQL] Make functions reference more digestible * Remove redundant links --- .../esql/esql-functions-operators.asciidoc | 53 +++++++++++++++---- 1 file changed, 42 insertions(+), 11 deletions(-) diff --git a/docs/reference/esql/esql-functions-operators.asciidoc b/docs/reference/esql/esql-functions-operators.asciidoc index a1ad512fbe512..ddc077f3b8ff8 100644 --- a/docs/reference/esql/esql-functions-operators.asciidoc +++ b/docs/reference/esql/esql-functions-operators.asciidoc @@ -1,40 +1,71 @@ [[esql-functions-operators]] === {esql} functions and operators - ++++ Functions and operators ++++ {esql} provides a comprehensive set of functions and operators for working with data. -The functions are divided into the following categories: +The reference documentation is divided into the following categories: [[esql-functions]] -<>:: +==== Functions overview + +.*Aggregate functions* +[%collapsible] +==== include::functions/aggregation-functions.asciidoc[tag=agg_list] +==== -<>:: +.*Math functions* +[%collapsible] +==== include::functions/math-functions.asciidoc[tag=math_list] +==== -<>:: +.*String functions* +[%collapsible] +==== include::functions/string-functions.asciidoc[tag=string_list] +==== -<>:: +.*Date and time functions* +[%collapsible] +==== include::functions/date-time-functions.asciidoc[tag=date_list] +==== -<>:: +.*Spatial functions* +[%collapsible] +==== include::functions/spatial-functions.asciidoc[tag=spatial_list] +==== -<>:: +.*Type conversion functions* +[%collapsible] +==== include::functions/type-conversion-functions.asciidoc[tag=type_list] +==== -<>:: +.*Conditional functions and expressions* +[%collapsible] +==== include::functions/conditional-functions-and-expressions.asciidoc[tag=cond_list] +==== -<>:: +.*Multi value functions* +[%collapsible] +==== include::functions/mv-functions.asciidoc[tag=mv_list] +==== + +[[esql-operators-overview]] +==== Operators overview -<>:: +.*Operators* +[%collapsible] +==== include::functions/operators.asciidoc[tag=op_list] +==== include::functions/aggregation-functions.asciidoc[] include::functions/math-functions.asciidoc[] From 3dccc66c2cd79418c8473076ef3721ea3b8048eb Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Tue, 9 Apr 2024 14:16:24 +0200 Subject: [PATCH 102/173] [DOCS][ESQL] Rename _Learning ESQL_ to _ESQL reference_ (#107259) --- docs/reference/esql/esql-language.asciidoc | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/docs/reference/esql/esql-language.asciidoc b/docs/reference/esql/esql-language.asciidoc index e4c873457b21b..77f5e79753fdd 100644 --- a/docs/reference/esql/esql-language.asciidoc +++ b/docs/reference/esql/esql-language.asciidoc @@ -1,11 +1,10 @@ [[esql-language]] -== Learning {esql} - +== {esql} reference ++++ -Learning {esql} +{esql} reference ++++ -Detailed information about the {esql} language: +Detailed reference documentation for the {esql} language: * <> * <> From 73e8984164b7e25d2360dbe7f2eea4057b7513e1 Mon Sep 17 00:00:00 2001 From: Dianna Hohensee Date: Tue, 9 Apr 2024 08:27:47 -0400 Subject: [PATCH 103/173] Add documentation to thread pool and executor code (#106208) --- .../s3/RepositoryCredentialsTests.java | 2 +- .../repositories/s3/S3RepositoryTests.java | 2 +- .../repositories/url/URLRepositoryTests.java | 2 +- .../common/util/concurrent/EsExecutors.java | 3 ++ .../blobstore/BlobStoreRepository.java | 8 ++--- .../threadpool/FixedExecutorBuilder.java | 3 ++ .../threadpool/ScalingExecutorBuilder.java | 4 +++ .../elasticsearch/threadpool/ThreadPool.java | 36 +++++++++++++++++++ .../BlobStoreRepositoryRestoreTests.java | 2 +- .../snapshots/SnapshotResiliencyTests.java | 2 +- ...archableSnapshotsPrewarmingIntegTests.java | 4 +-- ...SnapshotRecoveryStateIntegrationTests.java | 2 +- .../SearchableSnapshotDirectoryTests.java | 2 +- 13 files changed, 59 insertions(+), 13 deletions(-) diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java index cf3bc21526bf6..13e582598a2d2 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java @@ -267,7 +267,7 @@ protected S3Repository createRepository( ) { return new S3Repository(metadata, registry, getService(), clusterService, bigArrays, recoverySettings, s3RepositoriesMetrics) { @Override - protected void assertSnapshotOrGenericThread() { + protected void assertSnapshotOrStatelessPermittedThreadPool() { // eliminate thread name check as we create repo manually on test/main threads } }; diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java index 50470ec499ef6..ff61504d6c525 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java @@ -132,7 +132,7 @@ private S3Repository createS3Repo(RepositoryMetadata metadata) { S3RepositoriesMetrics.NOOP ) { @Override - protected void assertSnapshotOrGenericThread() { + protected void assertSnapshotOrStatelessPermittedThreadPool() { // eliminate thread name check as we create repo manually on test/main threads } }; diff --git a/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/URLRepositoryTests.java b/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/URLRepositoryTests.java index 00abf1e77fd57..a02bff59988d8 100644 --- a/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/URLRepositoryTests.java +++ b/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/URLRepositoryTests.java @@ -43,7 +43,7 @@ private URLRepository createRepository(Settings baseSettings, RepositoryMetadata mock(URLHttpClient.Factory.class) ) { @Override - protected void assertSnapshotOrGenericThread() { + protected void assertSnapshotOrStatelessPermittedThreadPool() { // eliminate thread name check as we create repo manually on test/main threads } }; diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java index 5fcb4684d3f8d..14c1d1e9ef6aa 100644 --- a/server/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java +++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java @@ -33,6 +33,9 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; +/** + * A collection of static methods to help create different ES Executor types. + */ public class EsExecutors { // although the available processors may technically change, for node sizing we use the number available at launch diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 41e849b4d2ebd..5a33a958646df 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -680,7 +680,7 @@ protected BlobStore getBlobStore() { * maintains single lazy instance of {@link BlobContainer} */ protected BlobContainer blobContainer() { - assertSnapshotOrGenericThread(); + assertSnapshotOrStatelessPermittedThreadPool(); if (lifecycle.started() == false) { throw notStartedException(); @@ -705,7 +705,7 @@ protected BlobContainer blobContainer() { * Public for testing. */ public BlobStore blobStore() { - assertSnapshotOrGenericThread(); + assertSnapshotOrStatelessPermittedThreadPool(); BlobStore store = blobStore.get(); if (store == null) { @@ -1994,7 +1994,7 @@ public long getRestoreThrottleTimeInNanos() { return restoreRateLimitingTimeInNanos.count(); } - protected void assertSnapshotOrGenericThread() { + protected void assertSnapshotOrStatelessPermittedThreadPool() { // The Stateless plugin adds custom thread pools for object store operations assert ThreadPool.assertCurrentThreadPool( ThreadPool.Names.SNAPSHOT, @@ -3539,7 +3539,7 @@ public IndexShardSnapshotStatus.Copy getShardSnapshotStatus(SnapshotId snapshotI @Override public void verify(String seed, DiscoveryNode localNode) { - assertSnapshotOrGenericThread(); + assertSnapshotOrStatelessPermittedThreadPool(); if (isReadOnly()) { try { latestIndexBlobId(); diff --git a/server/src/main/java/org/elasticsearch/threadpool/FixedExecutorBuilder.java b/server/src/main/java/org/elasticsearch/threadpool/FixedExecutorBuilder.java index 9668228ac0ec3..544b085a7006d 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/FixedExecutorBuilder.java +++ b/server/src/main/java/org/elasticsearch/threadpool/FixedExecutorBuilder.java @@ -24,6 +24,9 @@ /** * A builder for fixed executors. + * + * Builds an Executor with a static number of threads, as opposed to {@link ScalingExecutorBuilder} that dynamically scales the number of + * threads in the pool up and down based on request load. */ public final class FixedExecutorBuilder extends ExecutorBuilder { diff --git a/server/src/main/java/org/elasticsearch/threadpool/ScalingExecutorBuilder.java b/server/src/main/java/org/elasticsearch/threadpool/ScalingExecutorBuilder.java index 07504bc5f9d2e..29a7d5df08b7b 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/ScalingExecutorBuilder.java +++ b/server/src/main/java/org/elasticsearch/threadpool/ScalingExecutorBuilder.java @@ -24,6 +24,10 @@ /** * A builder for scaling executors. + * + * The {@link #build} method will instantiate a java {@link ExecutorService} thread pool that starts with the specified minimum number of + * threads and then scales up to the specified max number of threads as needed for excess work, scaling back when the burst of activity + * stops. As opposed to the {@link FixedExecutorBuilder} that keeps a fixed number of threads alive. */ public final class ScalingExecutorBuilder extends ExecutorBuilder { diff --git a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java index 9679bc88319d0..ceda140827527 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java +++ b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java @@ -60,12 +60,28 @@ import static java.util.Map.entry; import static org.elasticsearch.core.Strings.format; +/** + * Manages all the Java thread pools we create. {@link Names} contains a list of the thread pools, but plugins can dynamically add more + * thread pools to instantiate. + */ public class ThreadPool implements ReportingService, Scheduler { private static final Logger logger = LogManager.getLogger(ThreadPool.class); + /** + * List of names that identify Java thread pools that are created in {@link ThreadPool#ThreadPool}. + */ public static class Names { + /** + * All the tasks that do not relate to the purpose of one of the other thread pools should use this thread pool. Try to pick one of + * the other more specific thread pools where possible. + */ public static final String GENERIC = "generic"; + /** + * Important management tasks that keep the cluster from falling apart. + * This thread pool ensures cluster coordination tasks do not get blocked by less critical tasks and can continue to make progress. + * This thread pool also defaults to a single thread, reducing contention on the Coordinator mutex. + */ public static final String CLUSTER_COORDINATION = "cluster_coordination"; public static final String GET = "get"; public static final String ANALYZE = "analyze"; @@ -75,6 +91,10 @@ public static class Names { public static final String SEARCH_COORDINATION = "search_coordination"; public static final String AUTO_COMPLETE = "auto_complete"; public static final String SEARCH_THROTTLED = "search_throttled"; + /** + * Cluster management tasks. Tasks that manage data, and tasks that report on cluster health via statistics etc. + * Not a latency sensitive thread pool: some tasks may time be long-running; and the thread pool size is limited / relatively small. + */ public static final String MANAGEMENT = "management"; public static final String FLUSH = "flush"; public static final String REFRESH = "refresh"; @@ -199,6 +219,13 @@ public Collection builders() { Setting.Property.NodeScope ); + /** + * Defines and builds the many thread pools delineated in {@link Names}. + * + * @param settings + * @param meterRegistry + * @param customBuilders a list of additional thread pool builders that were defined elsewhere (like a Plugin). + */ @SuppressWarnings({ "rawtypes", "unchecked" }) public ThreadPool(final Settings settings, MeterRegistry meterRegistry, final ExecutorBuilder... customBuilders) { assert Node.NODE_NAME_SETTING.exists(settings); @@ -327,6 +354,7 @@ public ThreadPool(final Settings settings, MeterRegistry meterRegistry, final Ex threadContext = new ThreadContext(settings); + // Now that all the thread pools have been defined, actually build them. final Map executors = new HashMap<>(); for (final Map.Entry entry : builders.entrySet()) { final ExecutorBuilder.ExecutorSettings executorSettings = entry.getValue().getSettings(settings); @@ -902,6 +930,11 @@ void check(long newAbsoluteMillis, long newRelativeNanos) { } } + /** + * Holds a thread pool and additional ES information ({@link Info}) about that Java thread pool ({@link ExecutorService}) instance. + * + * See {@link Names} for a list of thread pools, though there can be more dynamically added via plugins. + */ static class ExecutorHolder { private final ExecutorService executor; public final Info info; @@ -917,6 +950,9 @@ ExecutorService executor() { } } + /** + * The settings used to create a Java ExecutorService thread pool. + */ public static class Info implements Writeable, ToXContentFragment { private final String name; diff --git a/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java b/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java index 327dc3d4f5fd0..0e4818701c5f5 100644 --- a/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java +++ b/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java @@ -227,7 +227,7 @@ private Repository createRepository() { new RecoverySettings(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)) ) { @Override - protected void assertSnapshotOrGenericThread() { + protected void assertSnapshotOrStatelessPermittedThreadPool() { // eliminate thread name check as we create repo manually } }; diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index dafe994b502f0..185f4582e7377 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -2130,7 +2130,7 @@ public RecyclerBytesStreamOutput newNetworkBytesStream() { recoverySettings ) { @Override - protected void assertSnapshotOrGenericThread() { + protected void assertSnapshotOrStatelessPermittedThreadPool() { // eliminate thread name check as we create repo in the test thread } } diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/SearchableSnapshotsPrewarmingIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/SearchableSnapshotsPrewarmingIntegTests.java index 0cf6cb93c865b..207df0faddd07 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/SearchableSnapshotsPrewarmingIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/SearchableSnapshotsPrewarmingIntegTests.java @@ -449,9 +449,9 @@ public Map getRepositories( (metadata) -> new FsRepository(metadata, env, namedXContentRegistry, clusterService, bigArrays, recoverySettings) { @Override - protected void assertSnapshotOrGenericThread() { + protected void assertSnapshotOrStatelessPermittedThreadPool() { if (enabled.get()) { - super.assertSnapshotOrGenericThread(); + super.assertSnapshotOrStatelessPermittedThreadPool(); } } diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/recovery/SearchableSnapshotRecoveryStateIntegrationTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/recovery/SearchableSnapshotRecoveryStateIntegrationTests.java index 6800dea01863a..4a15d00bc8168 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/recovery/SearchableSnapshotRecoveryStateIntegrationTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/recovery/SearchableSnapshotRecoveryStateIntegrationTests.java @@ -248,7 +248,7 @@ public Map getRepositories( "test-fs", (metadata) -> new FsRepository(metadata, env, namedXContentRegistry, clusterService, bigArrays, recoverySettings) { @Override - protected void assertSnapshotOrGenericThread() { + protected void assertSnapshotOrStatelessPermittedThreadPool() { // ignore } } diff --git a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectoryTests.java b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectoryTests.java index 9c36d7b762871..c54ead2bdbc45 100644 --- a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectoryTests.java +++ b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectoryTests.java @@ -609,7 +609,7 @@ private void testDirectories( ) { @Override - protected void assertSnapshotOrGenericThread() { + protected void assertSnapshotOrStatelessPermittedThreadPool() { // eliminate thread name check as we create repo manually on test/main threads } }; From 9ae414ebda41f7a6aeb2fbbdbeb5965ec2616df0 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 9 Apr 2024 09:39:41 -0400 Subject: [PATCH 104/173] ESQL: Merge randomize a test attribute (#107239) In ESQL we test our functions with values read directly from a `Page` and values that are modified by some other operation - called `floating`. This caused us some trouble when we were working on reference counting but we've mostly got this handled now. Our tests were running lots of cases in "floating" and "non-floating" versions. Replaces three such cases with randomization - so we have a 50/50 shot of floting on each test. We want to do this because ESQL's builds generate a *ton* of tests. Enough to upset gradle enterprise. This change cuts the number of test cases we run from 227811 to 159271. About a 30% reduction in test cases. The actual runtime of the tests doesn't change a ton. These tests are all fairly fast. --- .../scalar/string/LocateNoStartEvaluator.java | 139 ++++++++++++++++++ .../function/AbstractFunctionTestCase.java | 81 ++-------- 2 files changed, 150 insertions(+), 70 deletions(-) create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateNoStartEvaluator.java diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateNoStartEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateNoStartEvaluator.java new file mode 100644 index 0000000000000..947b1ecb49d0c --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateNoStartEvaluator.java @@ -0,0 +1,139 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Locate}. + * This class is generated. Do not edit it. + */ +public final class LocateNoStartEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator str; + + private final EvalOperator.ExpressionEvaluator substr; + + private final DriverContext driverContext; + + public LocateNoStartEvaluator(Source source, EvalOperator.ExpressionEvaluator str, + EvalOperator.ExpressionEvaluator substr, DriverContext driverContext) { + this.warnings = new Warnings(source); + this.str = str; + this.substr = substr; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock strBlock = (BytesRefBlock) str.eval(page)) { + try (BytesRefBlock substrBlock = (BytesRefBlock) substr.eval(page)) { + BytesRefVector strVector = strBlock.asVector(); + if (strVector == null) { + return eval(page.getPositionCount(), strBlock, substrBlock); + } + BytesRefVector substrVector = substrBlock.asVector(); + if (substrVector == null) { + return eval(page.getPositionCount(), strBlock, substrBlock); + } + return eval(page.getPositionCount(), strVector, substrVector).asBlock(); + } + } + } + + public IntBlock eval(int positionCount, BytesRefBlock strBlock, BytesRefBlock substrBlock) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + BytesRef strScratch = new BytesRef(); + BytesRef substrScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (strBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (strBlock.getValueCount(p) != 1) { + if (strBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (substrBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (substrBlock.getValueCount(p) != 1) { + if (substrBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendInt(Locate.process(strBlock.getBytesRef(strBlock.getFirstValueIndex(p), strScratch), substrBlock.getBytesRef(substrBlock.getFirstValueIndex(p), substrScratch))); + } + return result.build(); + } + } + + public IntVector eval(int positionCount, BytesRefVector strVector, BytesRefVector substrVector) { + try(IntVector.Builder result = driverContext.blockFactory().newIntVectorBuilder(positionCount)) { + BytesRef strScratch = new BytesRef(); + BytesRef substrScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + result.appendInt(Locate.process(strVector.getBytesRef(p, strScratch), substrVector.getBytesRef(p, substrScratch))); + } + return result.build(); + } + } + + @Override + public String toString() { + return "LocateNoStartEvaluator[" + "str=" + str + ", substr=" + substr + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(str, substr); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory str; + + private final EvalOperator.ExpressionEvaluator.Factory substr; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory str, + EvalOperator.ExpressionEvaluator.Factory substr) { + this.source = source; + this.str = str; + this.substr = substr; + } + + @Override + public LocateNoStartEvaluator get(DriverContext context) { + return new LocateNoStartEvaluator(source, str.get(context), substr.get(context), context); + } + + @Override + public String toString() { + return "LocateNoStartEvaluator[" + "str=" + str + ", substr=" + substr + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 0772b03bf3210..12c141cc7c8a7 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -242,18 +242,11 @@ protected final void assertResolveTypeValid(Expression expression, DataType expe } public final void testEvaluate() { - testEvaluate(false); - } - - public final void testEvaluateFloating() { - testEvaluate(true); - } - - private void testEvaluate(boolean readFloating) { assumeTrue("All test data types must be representable in order to build fields", testCase.allTypesAreRepresentable()); logger.info( "Test Values: " + testCase.getData().stream().map(TestCaseSupplier.TypedData::toString).collect(Collectors.joining(",")) ); + boolean readFloating = randomBoolean(); Expression expression = readFloating ? buildDeepCopyOfFieldExpression(testCase) : buildFieldExpression(testCase); if (testCase.getExpectedTypeError() != null) { assertTrue("expected unresolved", expression.typeResolved().unresolved()); @@ -296,47 +289,27 @@ private Object toJavaObjectUnsignedLongAware(Block block, int position) { } /** - * Evaluates a {@link Block} of values, all copied from the input pattern, read directly from the page. + * Evaluates a {@link Block} of values, all copied from the input pattern.. *

    * Note that this'll sometimes be a {@link Vector} of values if the * input pattern contained only a single value. *

    */ public final void testEvaluateBlockWithoutNulls() { - testEvaluateBlock(driverContext().blockFactory(), driverContext(), false, false); - } - - /** - * Evaluates a {@link Block} of values, all copied from the input pattern, read from an intermediate operator. - *

    - * Note that this'll sometimes be a {@link Vector} of values if the - * input pattern contained only a single value. - *

    - */ - public final void testEvaluateBlockWithoutNullsFloating() { - testEvaluateBlock(driverContext().blockFactory(), driverContext(), false, true); + testEvaluateBlock(driverContext().blockFactory(), driverContext(), false); } /** * Evaluates a {@link Block} of values, all copied from the input pattern with - * some null values inserted between, read directly from the page. + * some null values inserted between. */ public final void testEvaluateBlockWithNulls() { - testEvaluateBlock(driverContext().blockFactory(), driverContext(), true, false); - } - - /** - * Evaluates a {@link Block} of values, all copied from the input pattern with - * some null values inserted between, read from an intermediate operator. - */ - public final void testEvaluateBlockWithNullsFloating() { - testEvaluateBlock(driverContext().blockFactory(), driverContext(), true, true); + testEvaluateBlock(driverContext().blockFactory(), driverContext(), true); } /** * Evaluates a {@link Block} of values, all copied from the input pattern, - * read directly from the {@link Page}, using the - * {@link CrankyCircuitBreakerService} which fails randomly. + * using the {@link CrankyCircuitBreakerService} which fails randomly. *

    * Note that this'll sometimes be a {@link Vector} of values if the * input pattern contained only a single value. @@ -345,25 +318,7 @@ public final void testEvaluateBlockWithNullsFloating() { public final void testCrankyEvaluateBlockWithoutNulls() { assumeTrue("sometimes the cranky breaker silences warnings, just skip these cases", testCase.getExpectedWarnings() == null); try { - testEvaluateBlock(driverContext().blockFactory(), crankyContext(), false, false); - } catch (CircuitBreakingException ex) { - assertThat(ex.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); - } - } - - /** - * Evaluates a {@link Block} of values, all copied from the input pattern, - * read from an intermediate operator, using the - * {@link CrankyCircuitBreakerService} which fails randomly. - *

    - * Note that this'll sometimes be a {@link Vector} of values if the - * input pattern contained only a single value. - *

    - */ - public final void testCrankyEvaluateBlockWithoutNullsFloating() { - assumeTrue("sometimes the cranky breaker silences warnings, just skip these cases", testCase.getExpectedWarnings() == null); - try { - testEvaluateBlock(driverContext().blockFactory(), crankyContext(), false, true); + testEvaluateBlock(driverContext().blockFactory(), crankyContext(), false); } catch (CircuitBreakingException ex) { assertThat(ex.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); } @@ -371,27 +326,12 @@ public final void testCrankyEvaluateBlockWithoutNullsFloating() { /** * Evaluates a {@link Block} of values, all copied from the input pattern with - * some null values inserted between, read directly from the page, - * using the {@link CrankyCircuitBreakerService} which fails randomly. + * some null values inserted between, using the {@link CrankyCircuitBreakerService} which fails randomly. */ public final void testCrankyEvaluateBlockWithNulls() { assumeTrue("sometimes the cranky breaker silences warnings, just skip these cases", testCase.getExpectedWarnings() == null); try { - testEvaluateBlock(driverContext().blockFactory(), crankyContext(), true, false); - } catch (CircuitBreakingException ex) { - assertThat(ex.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); - } - } - - /** - * Evaluates a {@link Block} of values, all copied from the input pattern with - * some null values inserted between, read from an intermediate operator, - * using the {@link CrankyCircuitBreakerService} which fails randomly. - */ - public final void testCrankyEvaluateBlockWithNullsFloating() { - assumeTrue("sometimes the cranky breaker silences warnings, just skip these cases", testCase.getExpectedWarnings() == null); - try { - testEvaluateBlock(driverContext().blockFactory(), crankyContext(), true, true); + testEvaluateBlock(driverContext().blockFactory(), crankyContext(), true); } catch (CircuitBreakingException ex) { assertThat(ex.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); } @@ -404,9 +344,10 @@ protected Matcher allNullsMatcher() { return nullValue(); } - private void testEvaluateBlock(BlockFactory inputBlockFactory, DriverContext context, boolean insertNulls, boolean readFloating) { + private void testEvaluateBlock(BlockFactory inputBlockFactory, DriverContext context, boolean insertNulls) { assumeTrue("can only run on representable types", testCase.allTypesAreRepresentable()); assumeTrue("must build evaluator to test sending it blocks", testCase.getExpectedTypeError() == null); + boolean readFloating = randomBoolean(); int positions = between(1, 1024); List data = testCase.getData(); Page onePositionPage = row(testCase.getDataValues()); From c50fcb95edc7ee9e9a687b56681098ec03518c4b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Tue, 9 Apr 2024 16:55:33 +0200 Subject: [PATCH 105/173] [DOCS] Expands the list of possible values of the result parameter of the bulk API. (#107265) --- docs/reference/docs/bulk.asciidoc | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/docs/reference/docs/bulk.asciidoc b/docs/reference/docs/bulk.asciidoc index a055c278b41d9..fc25e811807a9 100644 --- a/docs/reference/docs/bulk.asciidoc +++ b/docs/reference/docs/bulk.asciidoc @@ -414,9 +414,7 @@ This parameter is only returned for successful actions. `result`:: (string) Result of the operation. Successful values are `created`, `deleted`, and -`updated`. -+ -This parameter is only returned for successful operations. +`updated`. Other valid values are `noop` and `not found`. `_shards`:: (object) From 62f19e3a0c6aa313a68f75900aaa1427fc854197 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Tue, 9 Apr 2024 17:01:32 +0200 Subject: [PATCH 106/173] Disable dynamic pruning on unindexed fields. (#107194) In order to know whether it can apply dynamic pruning using the points index, Lucene simply looks at whether a field has points. Unfortunately, this doesn't work well with our support for archive indexes, where numeric/date fields report that they have points, but they only support metadata operations on these points (min/max values, doc count), with the goal of quickly filtering out such archive indexes during the `can_match` phase. In order to address this discrepancy, dynamic pruning is now disabled when mappings report that a field is not indexed. This works because archive indexes automatically set `index: false` to make sure that filters run on doc values and not points. However, this is not a great fix as this increases our reliance on disabling dynamic pruning, which is currently marked as deprecated and scheduled for removal in the next Lucene major. So we'll need to either add it back to Lucene or find another approach. Closes #107168 --- .../mapper/extras/ScaledFloatFieldMapper.java | 8 +- .../mapper/murmur3/Murmur3FieldMapper.java | 2 +- .../fielddata/BooleanScriptFieldData.java | 5 ++ .../index/fielddata/DateScriptFieldData.java | 5 ++ .../fielddata/DoubleScriptFieldData.java | 5 ++ .../fielddata/IndexNumericFieldData.java | 13 +++- .../index/fielddata/LongScriptFieldData.java | 5 ++ .../plain/SortedDoublesIndexFieldData.java | 17 ++++- .../plain/SortedNumericIndexFieldData.java | 26 +++++-- .../index/mapper/BooleanFieldMapper.java | 4 +- .../index/mapper/DateFieldMapper.java | 3 +- .../index/mapper/NumberFieldMapper.java | 74 +++++++++++++++---- .../index/mapper/SeqNoFieldMapper.java | 2 +- .../index/mapper/VersionFieldMapper.java | 2 +- .../index/mapper/DateFieldTypeTests.java | 3 +- .../functionscore/FunctionScoreTests.java | 5 ++ .../sampler/DiversifiedSamplerTests.java | 3 +- .../fetch/subphase/FetchFieldsPhaseTests.java | 2 +- .../unsignedlong/UnsignedLongFieldMapper.java | 5 +- .../UnsignedLongIndexFieldData.java | 10 ++- 20 files changed, 160 insertions(+), 39 deletions(-) diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java index e2b932b01a516..09507ae926f44 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java @@ -340,7 +340,8 @@ public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext valuesSourceType, (dv, n) -> { throw new UnsupportedOperationException(); - } + }, + isIndexed() ).build(cache, breakerService); return new ScaledFloatIndexFieldData(scaledValues, scalingFactor, ScaledFloatDocValuesField::new); }; @@ -608,6 +609,11 @@ protected boolean sortRequiresCustomComparator() { return true; } + @Override + protected boolean isIndexed() { + return false; // We don't know how to take advantage of the index with half floats anyway + } + @Override public NumericType getNumericType() { /* diff --git a/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java b/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java index 08a133bcb69c8..44f52105f64c9 100644 --- a/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java +++ b/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java @@ -80,7 +80,7 @@ public String typeName() { @Override public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { failIfNoDocValues(); - return new SortedNumericIndexFieldData.Builder(name(), NumericType.LONG, Murmur3DocValueField::new); + return new SortedNumericIndexFieldData.Builder(name(), NumericType.LONG, Murmur3DocValueField::new, isIndexed()); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/BooleanScriptFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/BooleanScriptFieldData.java index f3dcda813a39d..bc83f85edcf7d 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/BooleanScriptFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/BooleanScriptFieldData.java @@ -90,6 +90,11 @@ protected boolean sortRequiresCustomComparator() { return true; } + @Override + protected boolean isIndexed() { + return false; + } + public static class BooleanScriptLeafFieldData extends LeafLongFieldData { private final BooleanScriptDocValues booleanScriptDocValues; protected final ToScriptFieldFactory toScriptFieldFactory; diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/DateScriptFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/DateScriptFieldData.java index 1199583f89766..a9fdf72e23a31 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/DateScriptFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/DateScriptFieldData.java @@ -90,6 +90,11 @@ protected boolean sortRequiresCustomComparator() { return true; } + @Override + protected boolean isIndexed() { + return false; + } + public static class DateScriptLeafFieldData extends LeafLongFieldData { private final LongScriptDocValues longScriptDocValues; protected final ToScriptFieldFactory toScriptFieldFactory; diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/DoubleScriptFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/DoubleScriptFieldData.java index 9307233f99161..e08a62eee8fb0 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/DoubleScriptFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/DoubleScriptFieldData.java @@ -89,6 +89,11 @@ protected boolean sortRequiresCustomComparator() { return true; } + @Override + protected boolean isIndexed() { + return false; + } + public static class DoubleScriptLeafFieldData extends LeafDoubleFieldData { private final DoubleScriptDocValues doubleScriptDocValues; protected final ToScriptFieldFactory toScriptFieldFactory; diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/IndexNumericFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/IndexNumericFieldData.java index dce94649e0088..391e9e285807f 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/IndexNumericFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/IndexNumericFieldData.java @@ -121,6 +121,7 @@ public final SortField sortField( case LONG: case DOUBLE: // longs, doubles and dates use the same type for doc-values and points. + sortField.setOptimizeSortWithPoints(isIndexed()); break; default: @@ -132,12 +133,18 @@ public final SortField sortField( } /** - * Does {@link #sortField} require a custom comparator because of the way - * the data is stored in doc values ({@code true}) or are the docs values - * stored such that they can be sorted without decoding ({@code false}). + * Should sorting use a custom comparator source vs. rely on a Lucene {@link SortField}. Using a Lucene {@link SortField} when possible + * is important because index sorting cannot be configured with a custom comparator, and because it gives better performance by + * dynamically pruning irrelevant hits. On the other hand, Lucene {@link SortField}s are less flexible and make stronger assumptions + * about how the data is indexed. Therefore, they cannot be used in all cases. */ protected abstract boolean sortRequiresCustomComparator(); + /** + * Return true if, and only if the field is indexed with points that match the content of doc values. + */ + protected abstract boolean isIndexed(); + @Override public final SortField sortField(Object missingValue, MultiValueMode sortMode, Nested nested, boolean reverse) { return sortField(getNumericType(), missingValue, sortMode, nested, reverse); diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/LongScriptFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/LongScriptFieldData.java index 6be5eb9514918..85850b530a1de 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/LongScriptFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/LongScriptFieldData.java @@ -92,6 +92,11 @@ protected boolean sortRequiresCustomComparator() { return true; } + @Override + protected boolean isIndexed() { + return false; + } + public static class LongScriptLeafFieldData extends LeafLongFieldData { private final LongScriptDocValues longScriptDocValues; protected final ToScriptFieldFactory toScriptFieldFactory; diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/plain/SortedDoublesIndexFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/plain/SortedDoublesIndexFieldData.java index a1686344b9309..b7654dfa5569f 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/plain/SortedDoublesIndexFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/plain/SortedDoublesIndexFieldData.java @@ -42,22 +42,25 @@ public static class Builder implements IndexFieldData.Builder { private final NumericType numericType; private final ValuesSourceType valuesSourceType; protected final ToScriptFieldFactory toScriptFieldFactory; + private final boolean indexed; public Builder( String name, NumericType numericType, ValuesSourceType valuesSourceType, - ToScriptFieldFactory toScriptFieldFactory + ToScriptFieldFactory toScriptFieldFactory, + boolean indexed ) { this.name = name; this.numericType = numericType; this.valuesSourceType = valuesSourceType; this.toScriptFieldFactory = toScriptFieldFactory; + this.indexed = indexed; } @Override public SortedDoublesIndexFieldData build(IndexFieldDataCache cache, CircuitBreakerService breakerService) { - return new SortedDoublesIndexFieldData(name, numericType, valuesSourceType, toScriptFieldFactory); + return new SortedDoublesIndexFieldData(name, numericType, valuesSourceType, toScriptFieldFactory, indexed); } } @@ -65,18 +68,21 @@ public SortedDoublesIndexFieldData build(IndexFieldDataCache cache, CircuitBreak protected final String fieldName; protected final ValuesSourceType valuesSourceType; protected final ToScriptFieldFactory toScriptFieldFactory; + protected final boolean indexed; public SortedDoublesIndexFieldData( String fieldName, NumericType numericType, ValuesSourceType valuesSourceType, - ToScriptFieldFactory toScriptFieldFactory + ToScriptFieldFactory toScriptFieldFactory, + boolean indexed ) { this.fieldName = fieldName; this.numericType = Objects.requireNonNull(numericType); assert this.numericType.isFloatingPoint(); this.valuesSourceType = valuesSourceType; this.toScriptFieldFactory = toScriptFieldFactory; + this.indexed = indexed; } @Override @@ -94,6 +100,11 @@ protected boolean sortRequiresCustomComparator() { return numericType == NumericType.HALF_FLOAT; } + @Override + public boolean isIndexed() { + return indexed; + } + @Override public NumericType getNumericType() { return numericType; diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/plain/SortedNumericIndexFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/plain/SortedNumericIndexFieldData.java index c2507dd2470a5..9c871ac822625 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/plain/SortedNumericIndexFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/plain/SortedNumericIndexFieldData.java @@ -42,26 +42,34 @@ public static class Builder implements IndexFieldData.Builder { private final NumericType numericType; private final ValuesSourceType valuesSourceType; protected final ToScriptFieldFactory toScriptFieldFactory; + private final boolean indexed; - public Builder(String name, NumericType numericType, ToScriptFieldFactory toScriptFieldFactory) { - this(name, numericType, numericType.getValuesSourceType(), toScriptFieldFactory); + public Builder( + String name, + NumericType numericType, + ToScriptFieldFactory toScriptFieldFactory, + boolean indexed + ) { + this(name, numericType, numericType.getValuesSourceType(), toScriptFieldFactory, indexed); } public Builder( String name, NumericType numericType, ValuesSourceType valuesSourceType, - ToScriptFieldFactory toScriptFieldFactory + ToScriptFieldFactory toScriptFieldFactory, + boolean indexed ) { this.name = name; this.numericType = numericType; this.valuesSourceType = valuesSourceType; this.toScriptFieldFactory = toScriptFieldFactory; + this.indexed = indexed; } @Override public SortedNumericIndexFieldData build(IndexFieldDataCache cache, CircuitBreakerService breakerService) { - return new SortedNumericIndexFieldData(name, numericType, valuesSourceType, toScriptFieldFactory); + return new SortedNumericIndexFieldData(name, numericType, valuesSourceType, toScriptFieldFactory, indexed); } } @@ -69,18 +77,21 @@ public SortedNumericIndexFieldData build(IndexFieldDataCache cache, CircuitBreak protected final String fieldName; protected final ValuesSourceType valuesSourceType; protected final ToScriptFieldFactory toScriptFieldFactory; + protected final boolean indexed; public SortedNumericIndexFieldData( String fieldName, NumericType numericType, ValuesSourceType valuesSourceType, - ToScriptFieldFactory toScriptFieldFactory + ToScriptFieldFactory toScriptFieldFactory, + boolean indexed ) { this.fieldName = fieldName; this.numericType = Objects.requireNonNull(numericType); assert this.numericType.isFloatingPoint() == false; this.valuesSourceType = valuesSourceType; this.toScriptFieldFactory = toScriptFieldFactory; + this.indexed = indexed; } @Override @@ -98,6 +109,11 @@ protected boolean sortRequiresCustomComparator() { return false; } + @Override + public boolean isIndexed() { + return indexed; + } + @Override protected XFieldComparatorSource dateComparatorSource(Object missingValue, MultiValueMode sortMode, Nested nested) { if (numericType == NumericType.DATE_NANOSECONDS) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java index 968c48abc54d8..f07cd1cc32076 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java @@ -276,7 +276,9 @@ public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext } if ((operation == FielddataOperation.SEARCH || operation == FielddataOperation.SCRIPT) && hasDocValues()) { - return new SortedNumericIndexFieldData.Builder(name(), NumericType.BOOLEAN, BooleanDocValuesField::new); + // boolean fields are indexed, but not with points + boolean indexed = false; + return new SortedNumericIndexFieldData.Builder(name(), NumericType.BOOLEAN, BooleanDocValuesField::new, indexed); } if (operation == FielddataOperation.SCRIPT) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java index 1b926734c1713..3092ed1e827df 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java @@ -795,7 +795,8 @@ public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext return new SortedNumericIndexFieldData.Builder( name(), resolution.numericType(), - resolution.getDefaultToScriptFieldFactory() + resolution.getDefaultToScriptFieldFactory(), + isIndexed() ); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java index 1f7a3bf2106ae..ebb6672cbab18 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java @@ -403,8 +403,14 @@ public void addFields(LuceneDocument document, String name, Number value, boolea } @Override - public IndexFieldData.Builder getFieldDataBuilder(String name, ValuesSourceType valuesSourceType) { - return new SortedDoublesIndexFieldData.Builder(name, numericType(), valuesSourceType, HalfFloatDocValuesField::new); + public IndexFieldData.Builder getFieldDataBuilder(MappedFieldType ft, ValuesSourceType valuesSourceType) { + return new SortedDoublesIndexFieldData.Builder( + ft.name(), + numericType(), + valuesSourceType, + HalfFloatDocValuesField::new, + ft.isIndexed() + ); } @Override @@ -577,8 +583,14 @@ public void addFields(LuceneDocument document, String name, Number value, boolea } @Override - public IndexFieldData.Builder getFieldDataBuilder(String name, ValuesSourceType valuesSourceType) { - return new SortedDoublesIndexFieldData.Builder(name, numericType(), valuesSourceType, FloatDocValuesField::new); + public IndexFieldData.Builder getFieldDataBuilder(MappedFieldType ft, ValuesSourceType valuesSourceType) { + return new SortedDoublesIndexFieldData.Builder( + ft.name(), + numericType(), + valuesSourceType, + FloatDocValuesField::new, + ft.isIndexed() + ); } @Override @@ -717,8 +729,14 @@ public void addFields(LuceneDocument document, String name, Number value, boolea } @Override - public IndexFieldData.Builder getFieldDataBuilder(String name, ValuesSourceType valuesSourceType) { - return new SortedDoublesIndexFieldData.Builder(name, numericType(), valuesSourceType, DoubleDocValuesField::new); + public IndexFieldData.Builder getFieldDataBuilder(MappedFieldType ft, ValuesSourceType valuesSourceType) { + return new SortedDoublesIndexFieldData.Builder( + ft.name(), + numericType(), + valuesSourceType, + DoubleDocValuesField::new, + ft.isIndexed() + ); } @Override @@ -831,8 +849,14 @@ Number valueForSearch(Number value) { } @Override - public IndexFieldData.Builder getFieldDataBuilder(String name, ValuesSourceType valuesSourceType) { - return new SortedNumericIndexFieldData.Builder(name, numericType(), valuesSourceType, ByteDocValuesField::new); + public IndexFieldData.Builder getFieldDataBuilder(MappedFieldType ft, ValuesSourceType valuesSourceType) { + return new SortedNumericIndexFieldData.Builder( + ft.name(), + numericType(), + valuesSourceType, + ByteDocValuesField::new, + ft.isIndexed() + ); } @Override @@ -930,8 +954,14 @@ Number valueForSearch(Number value) { } @Override - public IndexFieldData.Builder getFieldDataBuilder(String name, ValuesSourceType valuesSourceType) { - return new SortedNumericIndexFieldData.Builder(name, numericType(), valuesSourceType, ShortDocValuesField::new); + public IndexFieldData.Builder getFieldDataBuilder(MappedFieldType ft, ValuesSourceType valuesSourceType) { + return new SortedNumericIndexFieldData.Builder( + ft.name(), + numericType(), + valuesSourceType, + ShortDocValuesField::new, + ft.isIndexed() + ); } @Override @@ -1097,8 +1127,14 @@ public void addFields(LuceneDocument document, String name, Number value, boolea } @Override - public IndexFieldData.Builder getFieldDataBuilder(String name, ValuesSourceType valuesSourceType) { - return new SortedNumericIndexFieldData.Builder(name, numericType(), valuesSourceType, IntegerDocValuesField::new); + public IndexFieldData.Builder getFieldDataBuilder(MappedFieldType ft, ValuesSourceType valuesSourceType) { + return new SortedNumericIndexFieldData.Builder( + ft.name(), + numericType(), + valuesSourceType, + IntegerDocValuesField::new, + ft.isIndexed() + ); } @Override @@ -1234,8 +1270,14 @@ public void addFields(LuceneDocument document, String name, Number value, boolea } @Override - public IndexFieldData.Builder getFieldDataBuilder(String name, ValuesSourceType valuesSourceType) { - return new SortedNumericIndexFieldData.Builder(name, numericType(), valuesSourceType, LongDocValuesField::new); + public IndexFieldData.Builder getFieldDataBuilder(MappedFieldType ft, ValuesSourceType valuesSourceType) { + return new SortedNumericIndexFieldData.Builder( + ft.name(), + numericType(), + valuesSourceType, + LongDocValuesField::new, + ft.isIndexed() + ); } @Override @@ -1494,7 +1536,7 @@ public static Query longRangeQuery( return builder.apply(l, u); } - public abstract IndexFieldData.Builder getFieldDataBuilder(String name, ValuesSourceType valuesSourceType); + public abstract IndexFieldData.Builder getFieldDataBuilder(MappedFieldType ft, ValuesSourceType valuesSourceType); public IndexFieldData.Builder getValueFetcherFieldDataBuilder( String name, @@ -1693,7 +1735,7 @@ public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext : type.numericType.getValuesSourceType(); if ((operation == FielddataOperation.SEARCH || operation == FielddataOperation.SCRIPT) && hasDocValues()) { - return type.getFieldDataBuilder(name(), valuesSourceType); + return type.getFieldDataBuilder(this, valuesSourceType); } if (operation == FielddataOperation.SCRIPT) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SeqNoFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/SeqNoFieldMapper.java index 2635c1c11be8e..a46a310d0770f 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SeqNoFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SeqNoFieldMapper.java @@ -216,7 +216,7 @@ public Query rangeQuery( @Override public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { failIfNoDocValues(); - return new SortedNumericIndexFieldData.Builder(name(), NumericType.LONG, SeqNoDocValuesField::new); + return new SortedNumericIndexFieldData.Builder(name(), NumericType.LONG, SeqNoDocValuesField::new, isIndexed()); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/VersionFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/VersionFieldMapper.java index 0d4f5562d3046..1d4f56b02ed74 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/VersionFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/VersionFieldMapper.java @@ -66,7 +66,7 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { @Override public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { failIfNoDocValues(); - return new SortedNumericIndexFieldData.Builder(name(), NumericType.LONG, VersionDocValuesField::new); + return new SortedNumericIndexFieldData.Builder(name(), NumericType.LONG, VersionDocValuesField::new, isIndexed()); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java index d83c75455292f..726ec8561535e 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java @@ -317,7 +317,8 @@ public void testDateNanoDocValues() throws IOException { "my_date", IndexNumericFieldData.NumericType.DATE_NANOSECONDS, CoreValuesSourceType.DATE, - DateNanosDocValuesField::new + DateNanosDocValuesField::new, + false ); // Read index and check the doc values DirectoryReader reader = DirectoryReader.open(w); diff --git a/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreTests.java b/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreTests.java index 20d5fdae5e4cf..f11d3f9b70d23 100644 --- a/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreTests.java @@ -242,6 +242,11 @@ public LeafNumericFieldData loadDirect(LeafReaderContext context) throws Excepti protected boolean sortRequiresCustomComparator() { return false; } + + @Override + protected boolean isIndexed() { + return false; + } } private static final ScoreFunction RANDOM_SCORE_FUNCTION = new RandomScoreFunction(0, 0, new IndexFieldDataStub()); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerTests.java index 6ac538f6c7ce9..96ad3cd5afb22 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerTests.java @@ -163,7 +163,8 @@ private void testCase( "price", IndexNumericFieldData.NumericType.DOUBLE, CoreValuesSourceType.NUMERIC, - (dv, n) -> new DelegateDocValuesField(new Doubles(new DoublesSupplier(dv)), n) + (dv, n) -> new DelegateDocValuesField(new Doubles(new DoublesSupplier(dv)), n), + false ); FunctionScoreQuery query = new FunctionScoreQuery( new MatchAllDocsQuery(), diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhaseTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhaseTests.java index a5371e7b0b00a..39e73837c83ea 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhaseTests.java @@ -65,7 +65,7 @@ public void testDocValueFetcher() throws IOException { when(fieldType.valueFetcher(any(), any())).thenReturn( new DocValueFetcher( DocValueFormat.RAW, - new SortedNumericIndexFieldData("field", IndexNumericFieldData.NumericType.LONG, CoreValuesSourceType.NUMERIC, null) + new SortedNumericIndexFieldData("field", IndexNumericFieldData.NumericType.LONG, CoreValuesSourceType.NUMERIC, null, false) ) ); when(sec.getFieldType(any())).thenReturn(fieldType); diff --git a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java index e0ce1f92b2a37..d30c249813cd2 100644 --- a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java +++ b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java @@ -362,9 +362,10 @@ public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext valuesSourceType, (dv, n) -> { throw new UnsupportedOperationException(); - } + }, + isIndexed() ).build(cache, breakerService); - return new UnsignedLongIndexFieldData(signedLongValues, UnsignedLongDocValuesField::new); + return new UnsignedLongIndexFieldData(signedLongValues, UnsignedLongDocValuesField::new, isIndexed()); }; } diff --git a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongIndexFieldData.java b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongIndexFieldData.java index 0a312933768fb..2f936531f8c72 100644 --- a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongIndexFieldData.java +++ b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongIndexFieldData.java @@ -17,13 +17,16 @@ public class UnsignedLongIndexFieldData extends IndexNumericFieldData { private final IndexNumericFieldData signedLongIFD; protected final ToScriptFieldFactory toScriptFieldFactory; + protected final boolean indexed; UnsignedLongIndexFieldData( IndexNumericFieldData signedLongFieldData, - ToScriptFieldFactory toScriptFieldFactory + ToScriptFieldFactory toScriptFieldFactory, + boolean indexed ) { this.signedLongIFD = signedLongFieldData; this.toScriptFieldFactory = toScriptFieldFactory; + this.indexed = indexed; } @Override @@ -51,6 +54,11 @@ protected boolean sortRequiresCustomComparator() { return false; } + @Override + protected boolean isIndexed() { + return indexed; + } + @Override public NumericType getNumericType() { return NumericType.LONG; From 3674a6f1f64a87f12283daffabe8b56a30824db2 Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Tue, 9 Apr 2024 17:10:58 +0200 Subject: [PATCH 107/173] ESQL: Commit LocateNoStartEvaluator (#107270) This is automatically generated and was created as part of #106899. From 4a5329d29b2720fe7c5b293607cf63bf592c6d9b Mon Sep 17 00:00:00 2001 From: Sean Story Date: Tue, 9 Apr 2024 10:21:06 -0500 Subject: [PATCH 108/173] typo: "not found" -> "not_found" (#107276) --- docs/reference/docs/bulk.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/docs/bulk.asciidoc b/docs/reference/docs/bulk.asciidoc index fc25e811807a9..1a32e64cedb1f 100644 --- a/docs/reference/docs/bulk.asciidoc +++ b/docs/reference/docs/bulk.asciidoc @@ -414,7 +414,7 @@ This parameter is only returned for successful actions. `result`:: (string) Result of the operation. Successful values are `created`, `deleted`, and -`updated`. Other valid values are `noop` and `not found`. +`updated`. Other valid values are `noop` and `not_found`. `_shards`:: (object) From 31c05e9528772731aa64c8558c25ab8ef1165d51 Mon Sep 17 00:00:00 2001 From: Andrei Stefan Date: Tue, 9 Apr 2024 18:57:34 +0300 Subject: [PATCH 109/173] ESQL: allow sorting by expressions and not only regular fields (#107158) * Support expressions in sort commands --- docs/changelog/107158.yaml | 5 + .../src/main/resources/eval.csv-spec | 66 +++++ .../src/main/resources/stats.csv-spec | 24 ++ .../esql/optimizer/LogicalPlanOptimizer.java | 33 ++- .../optimizer/LogicalPlanOptimizerTests.java | 250 ++++++++++++++++-- 5 files changed, 357 insertions(+), 21 deletions(-) create mode 100644 docs/changelog/107158.yaml diff --git a/docs/changelog/107158.yaml b/docs/changelog/107158.yaml new file mode 100644 index 0000000000000..9589fe7e7264b --- /dev/null +++ b/docs/changelog/107158.yaml @@ -0,0 +1,5 @@ +pr: 107158 +summary: "ESQL: allow sorting by expressions and not only regular fields" +area: ES|QL +type: feature +issues: [] diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec index 9b06e9a0a8b23..85b665d717449 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec @@ -424,3 +424,69 @@ emp_no:i -10002 -10003 ; + +sortExpression1#[skip:-8.13.99,reason:supported in 8.14] +FROM employees +| SORT emp_no + salary ASC +| EVAL emp_no = -emp_no +| LIMIT 10 +| EVAL sum = -emp_no + salary +| KEEP emp_no, salary, sum +; + + emp_no:i | salary:i | sum:i +-10015 |25324 |35339 +-10035 |25945 |35980 +-10092 |25976 |36068 +-10048 |26436 |36484 +-10057 |27215 |37272 +-10084 |28035 |38119 +-10026 |28336 |38362 +-10068 |28941 |39009 +-10060 |29175 |39235 +-10042 |30404 |40446 +; + +sortConcat1#[skip:-8.13.99,reason:supported in 8.14] +from employees +| sort concat(left(last_name, 1), left(first_name, 1)), salary desc +| keep first_name, last_name, salary +| eval ll = left(last_name, 1), lf = left(first_name, 1) +| limit 10 +; + + first_name:keyword | last_name:keyword | salary:integer|ll:keyword|lf:keyword +Mona |Azuma |46595 |A |M +Satosi |Awdeh |50249 |A |S +Brendon |Bernini |33370 |B |B +Breannda |Billingsley |29175 |B |B +Cristinel |Bouloucos |58715 |B |C +Charlene |Brattka |28941 |B |C +Margareta |Bierman |41933 |B |M +Mokhtar |Bernatsky |38992 |B |M +Parto |Bamford |61805 |B |P +Premal |Baek |52833 |B |P +; + +sortConcat2#[skip:-8.13.99,reason:supported in 8.14] +from employees +| eval ln = last_name, fn = first_name, concat = concat(left(last_name, 1), left(first_name, 1)) +| sort concat(left(ln, 1), left(fn, 1)), salary desc +| keep f*, l*, salary +| eval c = concat(left(last_name, 1), left(first_name, 1)) +| drop *name, lan* +| limit 10 +; + + fn:keyword | ln:keyword | salary:integer| c:keyword +Mona |Azuma |46595 |AM +Satosi |Awdeh |50249 |AS +Brendon |Bernini |33370 |BB +Breannda |Billingsley |29175 |BB +Cristinel |Bouloucos |58715 |BC +Charlene |Brattka |28941 |BC +Margareta |Bierman |41933 |BM +Mokhtar |Bernatsky |38992 |BM +Parto |Bamford |61805 |BP +Premal |Baek |52833 |BP +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index fb2d46baf27ff..867ff127c90e8 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -1585,3 +1585,27 @@ c:l | k1:i | languages:i 21 | 5 | 5 10 | null | null ; + +minWithSortExpression1#[skip:-8.13.99,reason:supported in 8.14] +FROM employees | STATS min = min(salary) by languages | SORT min + languages; + + min:i | languages:i +25324 |5 +25976 |1 +26436 |3 +27215 |4 +29175 |2 +28336 |null +; + +minWithSortExpression2#[skip:-8.13.99,reason:supported in 8.14] +FROM employees | STATS min = min(salary) by languages | SORT min + CASE(languages == 5, 655, languages); + + min:i | languages:i +25976 |1 +25324 |5 +26436 |3 +27215 |4 +29175 |2 +28336 |null +; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java index 7fb2784bb044f..2aaf34a1dd1d8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java @@ -84,6 +84,7 @@ import static java.util.Arrays.asList; import static java.util.Collections.singleton; import static org.elasticsearch.xpack.esql.expression.NamedExpressions.mergeOutputExpressions; +import static org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizer.SubstituteSurrogates.rawTemporaryName; import static org.elasticsearch.xpack.ql.expression.Expressions.asAttributes; import static org.elasticsearch.xpack.ql.optimizer.OptimizerRules.TransformDirection; import static org.elasticsearch.xpack.ql.optimizer.OptimizerRules.TransformDirection.DOWN; @@ -125,7 +126,8 @@ protected static Batch substitutions() { new ReplaceRegexMatch(), new ReplaceAliasingEvalWithProject(), new SkipQueryOnEmptyMappings(), - new SubstituteSpatialSurrogates() + new SubstituteSpatialSurrogates(), + new ReplaceOrderByExpressionWithEval() // new NormalizeAggregate(), - waits on https://github.com/elastic/elasticsearch/issues/100634 ); } @@ -321,6 +323,35 @@ protected SpatialRelatesFunction rule(SpatialRelatesFunction function) { } } + static class ReplaceOrderByExpressionWithEval extends OptimizerRules.OptimizerRule { + private static int counter = 0; + + @Override + protected LogicalPlan rule(OrderBy orderBy) { + int size = orderBy.order().size(); + List evals = new ArrayList<>(size); + List newOrders = new ArrayList<>(size); + + for (int i = 0; i < size; i++) { + var order = orderBy.order().get(i); + if (order.child() instanceof Attribute == false) { + var name = rawTemporaryName("order_by", String.valueOf(i), String.valueOf(counter++)); + var eval = new Alias(order.child().source(), name, order.child()); + newOrders.add(order.replaceChildren(List.of(eval.toAttribute()))); + evals.add(eval); + } else { + newOrders.add(order); + } + } + if (evals.isEmpty()) { + return orderBy; + } else { + var newOrderBy = new OrderBy(orderBy.source(), new Eval(orderBy.source(), orderBy.child(), evals), newOrders); + return new Project(orderBy.source(), newOrderBy, orderBy.output()); + } + } + } + static class ConvertStringToByteRef extends OptimizerRules.OptimizerExpressionRule { ConvertStringToByteRef() { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index eb3901f37b99a..a60999baba9fe 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -149,6 +149,7 @@ import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; import static org.elasticsearch.xpack.ql.type.DataTypes.VERSION; +import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; @@ -156,6 +157,7 @@ import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.emptyArray; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; @@ -3832,12 +3834,11 @@ public void testNoWrongIsNotNullPruning() { * * For DISSECT expects the following; the others are similar. * - * EsqlProject[[first_name{f}#37, emp_no{r}#33, salary{r}#34]] - * \_TopN[[Order[$$emp_no$temp_name$36{r}#46 + $$salary$temp_name$41{r}#47 * 13[INTEGER],ASC,LAST], Order[NEG($$salary$t - * emp_name$41{r}#47),DESC,FIRST]],3[INTEGER]] - * \_Dissect[first_name{f}#37,Parser[pattern=%{emp_no} %{salary}, appendSeparator=, parser=org.elasticsearch.dissect.Dissect - * Parser@b6858b],[emp_no{r}#33, salary{r}#34]] - * \_Eval[[emp_no{f}#36 AS $$emp_no$temp_name$36, salary{f}#41 AS $$salary$temp_name$41]] + * Project[[first_name{f}#37, emp_no{r}#30, salary{r}#31]] + * \_TopN[[Order[$$order_by$temp_name$0{r}#46,ASC,LAST], Order[$$order_by$temp_name$1{r}#47,DESC,FIRST]],3[INTEGER]] + * \_Dissect[first_name{f}#37,Parser[pattern=%{emp_no} %{salary}, appendSeparator=, + * parser=org.elasticsearch.dissect.DissectParser@87f460f],[emp_no{r}#30, salary{r}#31]] + * \_Eval[[emp_no{f}#36 + salary{f}#41 * 13[INTEGER] AS $$order_by$temp_name$0, NEG(salary{f}#41) AS $$order_by$temp_name$1]] * \_EsRelation[test][_meta_field{f}#42, emp_no{f}#36, first_name{f}#37, ..] */ public void testPushdownWithOverwrittenName() { @@ -3850,7 +3851,7 @@ public void testPushdownWithOverwrittenName() { String queryTemplateKeepAfter = """ FROM test - | SORT 13*(emp_no+salary) ASC, -salary DESC + | SORT emp_no ASC nulls first, salary DESC nulls last, emp_no | {} | KEEP first_name, emp_no, salary | LIMIT 3 @@ -3859,7 +3860,7 @@ public void testPushdownWithOverwrittenName() { String queryTemplateKeepFirst = """ FROM test | KEEP emp_no, salary, first_name - | SORT 13*(emp_no+salary) ASC, -salary DESC + | SORT emp_no ASC nulls first, salary DESC nulls last, emp_no | {} | LIMIT 3 """; @@ -3876,20 +3877,27 @@ public void testPushdownWithOverwrittenName() { assertThat(projections.get(2).name(), equalTo("salary")); var topN = as(project.child(), TopN.class); - assertThat(topN.order().size(), is(2)); + assertThat(topN.order().size(), is(3)); - var firstOrderExpr = as(topN.order().get(0), Order.class); - var mul = as(firstOrderExpr.child(), Mul.class); - var add = as(mul.left(), Add.class); - var renamed_emp_no = as(add.left(), ReferenceAttribute.class); - var renamed_salary = as(add.right(), ReferenceAttribute.class); + var firstOrder = as(topN.order().get(0), Order.class); + assertThat(firstOrder.direction(), equalTo(org.elasticsearch.xpack.ql.expression.Order.OrderDirection.ASC)); + assertThat(firstOrder.nullsPosition(), equalTo(org.elasticsearch.xpack.ql.expression.Order.NullsPosition.FIRST)); + var renamed_emp_no = as(firstOrder.child(), ReferenceAttribute.class); assertThat(renamed_emp_no.toString(), startsWith("$$emp_no$temp_name")); + + var secondOrder = as(topN.order().get(1), Order.class); + assertThat(secondOrder.direction(), equalTo(org.elasticsearch.xpack.ql.expression.Order.OrderDirection.DESC)); + assertThat(secondOrder.nullsPosition(), equalTo(org.elasticsearch.xpack.ql.expression.Order.NullsPosition.LAST)); + var renamed_salary = as(secondOrder.child(), ReferenceAttribute.class); assertThat(renamed_salary.toString(), startsWith("$$salary$temp_name")); - var secondOrderExpr = as(topN.order().get(1), Order.class); - var neg = as(secondOrderExpr.child(), Neg.class); - var renamed_salary2 = as(neg.field(), ReferenceAttribute.class); - assert (renamed_salary2.semanticEquals(renamed_salary) && renamed_salary2.equals(renamed_salary)); + var thirdOrder = as(topN.order().get(2), Order.class); + assertThat(thirdOrder.direction(), equalTo(org.elasticsearch.xpack.ql.expression.Order.OrderDirection.ASC)); + assertThat(thirdOrder.nullsPosition(), equalTo(org.elasticsearch.xpack.ql.expression.Order.NullsPosition.LAST)); + var renamed_emp_no2 = as(thirdOrder.child(), ReferenceAttribute.class); + assertThat(renamed_emp_no2.toString(), startsWith("$$emp_no$temp_name")); + + assert (renamed_emp_no2.semanticEquals(renamed_emp_no) && renamed_emp_no2.equals(renamed_emp_no)); Eval renamingEval = null; if (overwritingCommand.startsWith("EVAL")) { @@ -3913,8 +3921,210 @@ public void testPushdownWithOverwrittenName() { for (Alias field : renamingEval.fields()) { attributesCreatedInEval.add(field.toAttribute()); } - assert (attributesCreatedInEval.contains(renamed_emp_no)); - assert (attributesCreatedInEval.contains(renamed_salary)); + assertThat(attributesCreatedInEval, allOf(hasItem(renamed_emp_no), hasItem(renamed_salary), hasItem(renamed_emp_no2))); + + assertThat(renamingEval.fields().size(), anyOf(equalTo(2), equalTo(4))); // 4 for EVAL, 3 for the other overwritingCommands + // emp_no ASC nulls first + Alias empNoAsc = renamingEval.fields().get(0); + assertThat(empNoAsc.toAttribute(), equalTo(renamed_emp_no)); + var emp_no = as(empNoAsc.child(), FieldAttribute.class); + assertThat(emp_no.name(), equalTo("emp_no")); + + // salary DESC nulls last + Alias salaryDesc = renamingEval.fields().get(1); + assertThat(salaryDesc.toAttribute(), equalTo(renamed_salary)); + var salary_desc = as(salaryDesc.child(), FieldAttribute.class); + assertThat(salary_desc.name(), equalTo("salary")); + + assertThat(renamingEval.child(), instanceOf(EsRelation.class)); + } + } + + /** + * Expects + * Project[[min{r}#4, languages{f}#11]] + * \_TopN[[Order[$$order_by$temp_name$0{r}#18,ASC,LAST]],1000[INTEGER]] + * \_Eval[[min{r}#4 + languages{f}#11 AS $$order_by$temp_name$0]] + * \_Aggregate[[languages{f}#11],[MIN(salary{f}#13) AS min, languages{f}#11]] + * \_EsRelation[test][_meta_field{f}#14, emp_no{f}#8, first_name{f}#9, ge..] + */ + public void testReplaceSortByExpressionsWithStats() { + var plan = optimizedPlan(""" + from test + | stats min = min(salary) by languages + | sort min + languages + """); + + var project = as(plan, Project.class); + assertThat(Expressions.names(project.projections()), contains("min", "languages")); + var topN = as(project.child(), TopN.class); + assertThat(topN.order().size(), is(1)); + + var order = as(topN.order().get(0), Order.class); + assertThat(order.direction(), equalTo(org.elasticsearch.xpack.ql.expression.Order.OrderDirection.ASC)); + assertThat(order.nullsPosition(), equalTo(org.elasticsearch.xpack.ql.expression.Order.NullsPosition.LAST)); + var expression = as(order.child(), ReferenceAttribute.class); + assertThat(expression.toString(), startsWith("$$order_by$0$")); + + var eval = as(topN.child(), Eval.class); + var fields = eval.fields(); + assertThat(Expressions.attribute(fields.get(0)), is(Expressions.attribute(expression))); + var aggregate = as(eval.child(), Aggregate.class); + var aggregates = aggregate.aggregates(); + assertThat(Expressions.names(aggregates), contains("min", "languages")); + var unwrapped = Alias.unwrap(aggregates.get(0)); + var min = as(unwrapped, Min.class); + as(aggregate.child(), EsRelation.class); + } + + /** + * Expects + * + * Project[[salary{f}#19, languages{f}#17, emp_no{f}#14]] + * \_TopN[[Order[$$order_by$0$0{r}#24,ASC,LAST], Order[emp_no{f}#14,DESC,FIRST]],1000[INTEGER]] + * \_Eval[[salary{f}#19 / 10000[INTEGER] + languages{f}#17 AS $$order_by$0$0]] + * \_EsRelation[test][_meta_field{f}#20, emp_no{f}#14, first_name{f}#15, ..] + */ + public void testReplaceSortByExpressionsMultipleSorts() { + var plan = optimizedPlan(""" + from test + | sort salary/10000 + languages, emp_no desc + | eval d = emp_no + | sort salary/10000 + languages, d desc + | keep salary, languages, emp_no + """); + + var project = as(plan, Project.class); + assertThat(Expressions.names(project.projections()), contains("salary", "languages", "emp_no")); + var topN = as(project.child(), TopN.class); + assertThat(topN.order().size(), is(2)); + + var order = as(topN.order().get(0), Order.class); + assertThat(order.direction(), equalTo(org.elasticsearch.xpack.ql.expression.Order.OrderDirection.ASC)); + assertThat(order.nullsPosition(), equalTo(org.elasticsearch.xpack.ql.expression.Order.NullsPosition.LAST)); + ReferenceAttribute expression = as(order.child(), ReferenceAttribute.class); + assertThat(expression.toString(), startsWith("$$order_by$0$")); + + order = as(topN.order().get(1), Order.class); + assertThat(order.direction(), equalTo(org.elasticsearch.xpack.ql.expression.Order.OrderDirection.DESC)); + assertThat(order.nullsPosition(), equalTo(org.elasticsearch.xpack.ql.expression.Order.NullsPosition.FIRST)); + FieldAttribute empNo = as(order.child(), FieldAttribute.class); + assertThat(empNo.name(), equalTo("emp_no")); + + var eval = as(topN.child(), Eval.class); + var fields = eval.fields(); + assertThat(fields.size(), equalTo(1)); + assertThat(Expressions.attribute(fields.get(0)), is(Expressions.attribute(expression))); + Alias salaryAddLanguages = eval.fields().get(0); + var add = as(salaryAddLanguages.child(), Add.class); + var div = as(add.left(), Div.class); + var salary = as(div.left(), FieldAttribute.class); + assertThat(salary.name(), equalTo("salary")); + var _10000 = as(div.right(), Literal.class); + assertThat(_10000.value(), equalTo(10000)); + var languages = as(add.right(), FieldAttribute.class); + assertThat(languages.name(), equalTo("languages")); + + as(eval.child(), EsRelation.class); + } + + /** + * For DISSECT expects the following; the others are similar. + * + * Project[[first_name{f}#37, emp_no{r}#30, salary{r}#31]] + * \_TopN[[Order[$$order_by$temp_name$0{r}#46,ASC,LAST], Order[$$order_by$temp_name$1{r}#47,DESC,FIRST]],3[INTEGER]] + * \_Dissect[first_name{f}#37,Parser[pattern=%{emp_no} %{salary}, appendSeparator=, + * parser=org.elasticsearch.dissect.DissectParser@87f460f],[emp_no{r}#30, salary{r}#31]] + * \_Eval[[emp_no{f}#36 + salary{f}#41 * 13[INTEGER] AS $$order_by$temp_name$0, NEG(salary{f}#41) AS $$order_by$temp_name$1]] + * \_EsRelation[test][_meta_field{f}#42, emp_no{f}#36, first_name{f}#37, ..] + */ + public void testReplaceSortByExpressions() { + List overwritingCommands = List.of( + "EVAL emp_no = 3*emp_no, salary = -2*emp_no-salary", + "DISSECT first_name \"%{emp_no} %{salary}\"", + "GROK first_name \"%{WORD:emp_no} %{WORD:salary}\"", + "ENRICH languages_idx ON first_name WITH emp_no = language_code, salary = language_code" + ); + + String queryTemplateKeepAfter = """ + FROM test + | SORT 13*(emp_no+salary) ASC, -salary DESC + | {} + | KEEP first_name, emp_no, salary + | LIMIT 3 + """; + // Equivalent but with KEEP first - ensures that attributes in the final projection are correct after pushdown rules were applied. + String queryTemplateKeepFirst = """ + FROM test + | KEEP emp_no, salary, first_name + | SORT 13*(emp_no+salary) ASC, -salary DESC + | {} + | LIMIT 3 + """; + + for (String overwritingCommand : overwritingCommands) { + String queryTemplate = randomBoolean() ? queryTemplateKeepFirst : queryTemplateKeepAfter; + var plan = optimizedPlan(LoggerMessageFormat.format(null, queryTemplate, overwritingCommand)); + + var project = as(plan, Project.class); + var projections = project.projections(); + assertThat(projections.size(), equalTo(3)); + assertThat(projections.get(0).name(), equalTo("first_name")); + assertThat(projections.get(1).name(), equalTo("emp_no")); + assertThat(projections.get(2).name(), equalTo("salary")); + + var topN = as(project.child(), TopN.class); + assertThat(topN.order().size(), is(2)); + + var firstOrderExpr = as(topN.order().get(0), Order.class); + assertThat(firstOrderExpr.direction(), equalTo(org.elasticsearch.xpack.ql.expression.Order.OrderDirection.ASC)); + assertThat(firstOrderExpr.nullsPosition(), equalTo(org.elasticsearch.xpack.ql.expression.Order.NullsPosition.LAST)); + var renamedEmpNoSalaryExpression = as(firstOrderExpr.child(), ReferenceAttribute.class); + assertThat(renamedEmpNoSalaryExpression.toString(), startsWith("$$order_by$0$")); + + var secondOrderExpr = as(topN.order().get(1), Order.class); + assertThat(secondOrderExpr.direction(), equalTo(org.elasticsearch.xpack.ql.expression.Order.OrderDirection.DESC)); + assertThat(secondOrderExpr.nullsPosition(), equalTo(org.elasticsearch.xpack.ql.expression.Order.NullsPosition.FIRST)); + var renamedNegatedSalaryExpression = as(secondOrderExpr.child(), ReferenceAttribute.class); + assertThat(renamedNegatedSalaryExpression.toString(), startsWith("$$order_by$1$")); + + Eval renamingEval = null; + if (overwritingCommand.startsWith("EVAL")) { + // Multiple EVALs should be merged, so there's only one. + renamingEval = as(topN.child(), Eval.class); + } + if (overwritingCommand.startsWith("DISSECT")) { + var dissect = as(topN.child(), Dissect.class); + renamingEval = as(dissect.child(), Eval.class); + } + if (overwritingCommand.startsWith("GROK")) { + var grok = as(topN.child(), Grok.class); + renamingEval = as(grok.child(), Eval.class); + } + if (overwritingCommand.startsWith("ENRICH")) { + var enrich = as(topN.child(), Enrich.class); + renamingEval = as(enrich.child(), Eval.class); + } + + assertThat(renamingEval.fields().size(), anyOf(equalTo(2), equalTo(4))); // 4 for EVAL, 2 for the other overwritingCommands + + // 13*(emp_no+salary) + Alias _13empNoSalary = renamingEval.fields().get(0); + assertThat(_13empNoSalary.toAttribute(), equalTo(renamedEmpNoSalaryExpression)); + var mul = as(_13empNoSalary.child(), Mul.class); + var add = as(mul.left(), Add.class); + var emp_no = as(add.left(), FieldAttribute.class); + assertThat(emp_no.name(), equalTo("emp_no")); + var salary = as(add.right(), FieldAttribute.class); + assertThat(salary.name(), equalTo("salary")); + var _13 = as(mul.right(), Literal.class); + assertThat(_13.value(), equalTo(13)); + + // -salary + Alias negatedSalary = renamingEval.fields().get(1); + assertThat(negatedSalary.toAttribute(), equalTo(renamedNegatedSalaryExpression)); + var neg = as(negatedSalary.child(), Neg.class); + assertThat(neg.field(), equalTo(salary)); assertThat(renamingEval.child(), instanceOf(EsRelation.class)); } From aba75664090a808e90148d19fc2def97560ef2d4 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 9 Apr 2024 12:22:15 -0400 Subject: [PATCH 110/173] ESQL: Better tests to AUTO_BUCKET (#107228) This improves the tests for AUTO_BUCKET marginally, specifically so that it tests all valid combinations of arguments and generates a correct types table. This'll combine nicely with #106782 to generate the signatures that kibana needs for it's editor. --- .../esql/functions/types/auto_bucket.asciidoc | 37 +++- .../src/main/resources/meta.csv-spec | 4 +- .../function/scalar/math/AutoBucket.java | 4 +- .../function/AbstractFunctionTestCase.java | 48 +++-- .../expression/function/TestCaseSupplier.java | 8 + .../function/scalar/math/AutoBucketTests.java | 179 ++++++++++-------- 6 files changed, 180 insertions(+), 100 deletions(-) diff --git a/docs/reference/esql/functions/types/auto_bucket.asciidoc b/docs/reference/esql/functions/types/auto_bucket.asciidoc index 535e2df29c353..cfe74ae25c3d0 100644 --- a/docs/reference/esql/functions/types/auto_bucket.asciidoc +++ b/docs/reference/esql/functions/types/auto_bucket.asciidoc @@ -5,5 +5,40 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== field | buckets | from | to | result - +datetime | integer | datetime | datetime | datetime +datetime | integer | datetime | keyword | datetime +datetime | integer | datetime | text | datetime +datetime | integer | keyword | datetime | datetime +datetime | integer | keyword | keyword | datetime +datetime | integer | keyword | text | datetime +datetime | integer | text | datetime | datetime +datetime | integer | text | keyword | datetime +datetime | integer | text | text | datetime +double | integer | double | double | double +double | integer | double | integer | double +double | integer | double | long | double +double | integer | integer | double | double +double | integer | integer | integer | double +double | integer | integer | long | double +double | integer | long | double | double +double | integer | long | integer | double +double | integer | long | long | double +integer | integer | double | double | double +integer | integer | double | integer | double +integer | integer | double | long | double +integer | integer | integer | double | double +integer | integer | integer | integer | double +integer | integer | integer | long | double +integer | integer | long | double | double +integer | integer | long | integer | double +integer | integer | long | long | double +long | integer | double | double | double +long | integer | double | integer | double +long | integer | double | long | double +long | integer | integer | double | double +long | integer | integer | integer | double +long | integer | integer | long | double +long | integer | long | double | double +long | integer | long | integer | double +long | integer | long | long | double |=== diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec index d344b50c0364f..492da4ee5ef36 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -7,7 +7,7 @@ synopsis:keyword "double asin(number:double|integer|long|unsigned_long)" "double atan(number:double|integer|long|unsigned_long)" "double atan2(y_coordinate:double|integer|long|unsigned_long, x_coordinate:double|integer|long|unsigned_long)" -"double|date auto_bucket(field:integer|long|double|date, buckets:integer, from:integer|long|double|date|string, to:integer|long|double|date|string)" +"double|date auto_bucket(field:integer|long|double|date, buckets:integer, from:integer|long|double|date|keyword|text, to:integer|long|double|date|keyword|text)" "double avg(number:double|integer|long)" "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version case(condition:boolean, trueValue...:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" "double|integer|long|unsigned_long ceil(number:double|integer|long|unsigned_long)" @@ -117,7 +117,7 @@ acos |number |"double|integer|long|unsigne asin |number |"double|integer|long|unsigned_long" |Number between -1 and 1. If `null`, the function returns `null`. atan |number |"double|integer|long|unsigned_long" |Numeric expression. If `null`, the function returns `null`. atan2 |[y_coordinate, x_coordinate] |["double|integer|long|unsigned_long", "double|integer|long|unsigned_long"] |[y coordinate. If `null`\, the function returns `null`., x coordinate. If `null`\, the function returns `null`.] -auto_bucket |[field, buckets, from, to] |["integer|long|double|date", integer, "integer|long|double|date|string", "integer|long|double|date|string"] |["", "", "", ""] +auto_bucket |[field, buckets, from, to] |["integer|long|double|date", integer, "integer|long|double|date|keyword|text", "integer|long|double|date|keyword|text"] |["", "", "", ""] avg |number |"double|integer|long" |[""] case |[condition, trueValue] |[boolean, "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version"] |["", ""] ceil |number |"double|integer|long|unsigned_long" |Numeric expression. If `null`, the function returns `null`. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java index b9aeff7f1d935..ea581437f6c4f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java @@ -90,8 +90,8 @@ public AutoBucket( Source source, @Param(name = "field", type = { "integer", "long", "double", "date" }) Expression field, @Param(name = "buckets", type = { "integer" }) Expression buckets, - @Param(name = "from", type = { "integer", "long", "double", "date", "string" }) Expression from, - @Param(name = "to", type = { "integer", "long", "double", "date", "string" }) Expression to + @Param(name = "from", type = { "integer", "long", "double", "date", "keyword", "text" }) Expression from, + @Param(name = "to", type = { "integer", "long", "double", "date", "keyword", "text" }) Expression to ) { super(source, List.of(field, buckets, from, to)); this.field = field; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 12c141cc7c8a7..889dfbf4c9b17 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -214,7 +214,10 @@ public static ExpressionEvaluator.Factory evaluator(Expression e) { } Layout.Builder builder = new Layout.Builder(); buildLayout(builder, e); - assertTrue(e.resolved()); + Expression.TypeResolution resolution = e.typeResolved(); + if (resolution.unresolved()) { + throw new AssertionError("expected resolved " + resolution.message()); + } return EvalMapper.toEvaluator(e, builder.build()); } @@ -256,7 +259,10 @@ public final void testEvaluate() { } return; } - assertFalse("expected resolved", expression.typeResolved().unresolved()); + Expression.TypeResolution resolution = expression.typeResolved(); + if (resolution.unresolved()) { + throw new AssertionError("expected resolved " + resolution.message()); + } expression = new FoldNull().rule(expression); assertThat(expression.dataType(), equalTo(testCase.expectedType())); logger.info("Result type: " + expression.dataType()); @@ -596,6 +602,28 @@ private static String signatureType(DataType type) { * on input types like {@link Greatest} or {@link Coalesce}. */ protected static List anyNullIsNull(boolean entirelyNullPreservesType, List testCaseSuppliers) { + return anyNullIsNull( + testCaseSuppliers, + (nullPosition, nullValueDataType, original) -> entirelyNullPreservesType == false + && nullValueDataType == DataTypes.NULL + && original.getData().size() == 1 ? DataTypes.NULL : original.expectedType(), + (nullPosition, original) -> original + ); + } + + public interface ExpectedType { + DataType expectedType(int nullPosition, DataType nullValueDataType, TestCaseSupplier.TestCase original); + } + + public interface ExpectedEvaluatorToString { + Matcher evaluatorToString(int nullPosition, Matcher original); + } + + protected static List anyNullIsNull( + List testCaseSuppliers, + ExpectedType expectedType, + ExpectedEvaluatorToString evaluatorToString + ) { typesRequired(testCaseSuppliers); List suppliers = new ArrayList<>(testCaseSuppliers.size()); suppliers.addAll(testCaseSuppliers); @@ -618,15 +646,12 @@ protected static List anyNullIsNull(boolean entirelyNullPreser TestCaseSupplier.TestCase oc = original.get(); List data = IntStream.range(0, oc.getData().size()).mapToObj(i -> { TestCaseSupplier.TypedData od = oc.getData().get(i); - if (i == finalNullPosition) { - return new TestCaseSupplier.TypedData(null, od.type(), od.name()); - } - return od; + return i == finalNullPosition ? od.forceValueToNull() : od; }).toList(); return new TestCaseSupplier.TestCase( data, - oc.evaluatorToString(), - oc.expectedType(), + evaluatorToString.evaluatorToString(finalNullPosition, oc.evaluatorToString()), + expectedType.expectedType(finalNullPosition, oc.getData().get(finalNullPosition).type(), oc), nullValue(), null, oc.getExpectedTypeError(), @@ -649,7 +674,7 @@ protected static List anyNullIsNull(boolean entirelyNullPreser return new TestCaseSupplier.TestCase( data, equalTo("LiteralsEvaluator[lit=null]"), - entirelyNullPreservesType == false && oc.getData().size() == 1 ? DataTypes.NULL : oc.expectedType(), + expectedType.expectedType(finalNullPosition, DataTypes.NULL, oc), nullValue(), null, oc.getExpectedTypeError(), @@ -755,9 +780,8 @@ private static Stream> allPermutations(int argumentCount) { if (argumentCount == 0) { return Stream.of(List.of()); } - if (argumentCount > 4) { - // TODO check for a limit 4. is arbitrary. - throw new IllegalArgumentException("would generate too many types"); + if (argumentCount > 3) { + throw new IllegalArgumentException("would generate too many combinations"); } Stream> stream = representable().map(t -> List.of(t)); for (int i = 1; i < argumentCount; i++) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java index d600e51c07925..c064cfebd9cc5 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java @@ -1325,6 +1325,14 @@ public TypedData forceLiteral() { return new TypedData(data, type, name, true); } + /** + * Return a {@link TypedData} that always returns {@code null} for it's + * value without modifying anything else in the supplier. + */ + public TypedData forceValueToNull() { + return new TypedData(null, type, name, forceLiteral); + } + @Override public String toString() { if (type == DataTypes.UNSIGNED_LONG && data instanceof Long longData) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucketTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucketTests.java index 013753c801c39..9d8cf702a375a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucketTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucketTests.java @@ -13,126 +13,139 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Rounding; import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; -import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.expression.Literal; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; import org.hamcrest.Matcher; +import java.util.ArrayList; import java.util.List; +import java.util.function.LongSupplier; import java.util.function.Supplier; import static org.hamcrest.Matchers.equalTo; -public class AutoBucketTests extends AbstractScalarFunctionTestCase { +public class AutoBucketTests extends AbstractFunctionTestCase { public AutoBucketTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } @ParametersFactory public static Iterable parameters() { - return parameterSuppliersFromTypedData(List.of(new TestCaseSupplier("Autobucket Single date", () -> { - List args = List.of( - new TestCaseSupplier.TypedData( - DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2023-02-17T09:00:00.00Z"), - DataTypes.DATETIME, - "arg" - ) - ); - return new TestCaseSupplier.TestCase( - args, - "DateTruncEvaluator[fieldVal=Attribute[channel=0], rounding=Rounding[DAY_OF_MONTH in Z][fixed to midnight]]", - DataTypes.DATETIME, - dateResultsMatcher(args) - ); - }), new TestCaseSupplier("Autobucket Single long", () -> { - List args = List.of(new TestCaseSupplier.TypedData(100L, DataTypes.LONG, "arg")); - return new TestCaseSupplier.TestCase( - args, - "MulDoublesEvaluator[lhs=FloorDoubleEvaluator[" - + "val=DivDoublesEvaluator[lhs=CastLongToDoubleEvaluator[v=Attribute[channel=0]], " - + "rhs=LiteralsEvaluator[lit=50.0]]], rhs=LiteralsEvaluator[lit=50.0]]", - DataTypes.DOUBLE, - numericResultsMatcher(args, 100.0) - ); - }), new TestCaseSupplier("Autobucket Single int", () -> { - List args = List.of(new TestCaseSupplier.TypedData(100, DataTypes.INTEGER, "arg")); - return new TestCaseSupplier.TestCase( - args, - "MulDoublesEvaluator[lhs=FloorDoubleEvaluator[" - + "val=DivDoublesEvaluator[lhs=CastIntToDoubleEvaluator[v=Attribute[channel=0]], " - + "rhs=LiteralsEvaluator[lit=50.0]]], rhs=LiteralsEvaluator[lit=50.0]]", - DataTypes.DOUBLE, - numericResultsMatcher(args, 100.0) - ); - }), new TestCaseSupplier("Autobucket Single double", () -> { - List args = List.of(new TestCaseSupplier.TypedData(100.0, DataTypes.DOUBLE, "arg")); - return new TestCaseSupplier.TestCase( - args, - "MulDoublesEvaluator[lhs=FloorDoubleEvaluator[val=DivDoublesEvaluator[lhs=Attribute[channel=0], " - + "rhs=LiteralsEvaluator[lit=50.0]]], rhs=LiteralsEvaluator[lit=50.0]]", - DataTypes.DOUBLE, - numericResultsMatcher(args, 100.0) - ); - }))); + List suppliers = new ArrayList<>(); + dateCases(suppliers, "fixed date", () -> DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2023-02-17T09:00:00.00Z")); + numberCases(suppliers, "fixed long", DataTypes.LONG, () -> 100L); + numberCases(suppliers, "fixed int", DataTypes.INTEGER, () -> 100); + numberCases(suppliers, "fixed double", DataTypes.DOUBLE, () -> 100.0); + // TODO make errorsForCasesWithoutExamples do something sensible for 4+ parameters + return parameterSuppliersFromTypedData( + anyNullIsNull( + suppliers, + (nullPosition, nullValueDataType, original) -> nullPosition == 0 && nullValueDataType == DataTypes.NULL + ? DataTypes.NULL + : original.expectedType(), + (nullPosition, original) -> nullPosition == 0 ? original : equalTo("LiteralsEvaluator[lit=null]") + ) + ); } - private Expression build(Source source, Expression arg) { - Literal from; - Literal to; - if (arg.dataType() == DataTypes.DATETIME) { - from = stringOrDateTime("2023-02-01T00:00:00.00Z"); - to = stringOrDateTime("2023-03-01T09:00:00.00Z"); - } else { - from = new Literal(Source.EMPTY, 0, DataTypes.DOUBLE); - to = new Literal(Source.EMPTY, 1000, DataTypes.DOUBLE); - } - return new AutoBucket(source, arg, new Literal(Source.EMPTY, 50, DataTypes.INTEGER), from, to); - } + // TODO once we cast above the functions we can drop these + private static final DataType[] DATE_BOUNDS_TYPE = new DataType[] { DataTypes.DATETIME, DataTypes.KEYWORD, DataTypes.TEXT }; - private Literal stringOrDateTime(String date) { - if (randomBoolean()) { - return new Literal(Source.EMPTY, new BytesRef(date), randomBoolean() ? DataTypes.KEYWORD : DataTypes.TEXT); + private static void dateCases(List suppliers, String name, LongSupplier date) { + for (DataType fromType : DATE_BOUNDS_TYPE) { + for (DataType toType : DATE_BOUNDS_TYPE) { + suppliers.add(new TestCaseSupplier(name, List.of(DataTypes.DATETIME, DataTypes.INTEGER, fromType, toType), () -> { + List args = new ArrayList<>(); + args.add(new TestCaseSupplier.TypedData(date.getAsLong(), DataTypes.DATETIME, "field")); + // TODO more "from" and "to" and "buckets" + args.add(new TestCaseSupplier.TypedData(50, DataTypes.INTEGER, "buckets").forceLiteral()); + args.add(dateBound("from", fromType, "2023-02-01T00:00:00.00Z")); + args.add(dateBound("to", toType, "2023-03-01T09:00:00.00Z")); + return new TestCaseSupplier.TestCase( + args, + "DateTruncEvaluator[fieldVal=Attribute[channel=0], rounding=Rounding[DAY_OF_MONTH in Z][fixed to midnight]]", + DataTypes.DATETIME, + dateResultsMatcher(args) + ); + })); + } } - return new Literal(Source.EMPTY, DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis(date), DataTypes.DATETIME); } - @Override - protected DataType expectedType(List argTypes) { - if (argTypes.get(0).isNumeric()) { - return DataTypes.DOUBLE; + private static TestCaseSupplier.TypedData dateBound(String name, DataType type, String date) { + Object value; + if (type == DataTypes.DATETIME) { + value = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis(date); + } else { + value = new BytesRef(date); } - return argTypes.get(0); + return new TestCaseSupplier.TypedData(value, type, name).forceLiteral(); } - private static Matcher dateResultsMatcher(List typedData) { - long millis = ((Number) typedData.get(0).data()).longValue(); - return equalTo(Rounding.builder(Rounding.DateTimeUnit.DAY_OF_MONTH).build().prepareForUnknown().round(millis)); + private static final DataType[] NUMBER_BOUNDS_TYPES = new DataType[] { DataTypes.INTEGER, DataTypes.LONG, DataTypes.DOUBLE }; + + private static void numberCases(List suppliers, String name, DataType numberType, Supplier number) { + for (DataType fromType : NUMBER_BOUNDS_TYPES) { + for (DataType toType : NUMBER_BOUNDS_TYPES) { + suppliers.add(new TestCaseSupplier(name, List.of(numberType, DataTypes.INTEGER, fromType, toType), () -> { + List args = new ArrayList<>(); + args.add(new TestCaseSupplier.TypedData(number.get(), "field")); + // TODO more "from" and "to" and "buckets" + args.add(new TestCaseSupplier.TypedData(50, DataTypes.INTEGER, "buckets").forceLiteral()); + args.add(numericBound("from", fromType, 0.0)); + args.add(numericBound("to", toType, 1000.0)); + // TODO more number types for "from" and "to" + String attr = "Attribute[channel=0]"; + if (numberType == DataTypes.INTEGER) { + attr = "CastIntToDoubleEvaluator[v=" + attr + "]"; + } else if (numberType == DataTypes.LONG) { + attr = "CastLongToDoubleEvaluator[v=" + attr + "]"; + } + return new TestCaseSupplier.TestCase( + args, + "MulDoublesEvaluator[lhs=FloorDoubleEvaluator[val=DivDoublesEvaluator[lhs=" + + attr + + ", " + + "rhs=LiteralsEvaluator[lit=50.0]]], rhs=LiteralsEvaluator[lit=50.0]]", + DataTypes.DOUBLE, + dateResultsMatcher(args) + ); + })); + } + } } - private static Matcher numericResultsMatcher(List typedData, Object value) { - return equalTo(value); + private static TestCaseSupplier.TypedData numericBound(String name, DataType type, double value) { + Number v; + if (type == DataTypes.INTEGER) { + v = (int) value; + } else if (type == DataTypes.LONG) { + v = (long) value; + } else { + v = value; + } + return new TestCaseSupplier.TypedData(v, type, name).forceLiteral(); } - @Override - protected List argSpec() { - DataType[] numerics = numerics(); - DataType[] all = new DataType[numerics.length + 1]; - all[0] = DataTypes.DATETIME; - System.arraycopy(numerics, 0, all, 1, numerics.length); - return List.of(required(all)); + private static Matcher dateResultsMatcher(List typedData) { + if (typedData.get(0).type() == DataTypes.DATETIME) { + long millis = ((Number) typedData.get(0).data()).longValue(); + return equalTo(Rounding.builder(Rounding.DateTimeUnit.DAY_OF_MONTH).build().prepareForUnknown().round(millis)); + } + return equalTo(((Number) typedData.get(0).data()).doubleValue()); } @Override protected Expression build(Source source, List args) { - return build(source, args.get(0)); + return new AutoBucket(source, args.get(0), args.get(1), args.get(2), args.get(3)); } @Override - protected Matcher badTypeError(List spec, int badArgPosition, DataType badArgType) { - return equalTo("first argument of [exp] must be [datetime or numeric], found value [arg0] type [" + badArgType.typeName() + "]"); + public void testSimpleWithNulls() { + assumeFalse("we test nulls in parameters", true); } } From 24aed5c7feaf59474ebb52ffad9db5f51c01fa60 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Tue, 9 Apr 2024 09:24:11 -0700 Subject: [PATCH 111/173] Simplify merging enrich output (#107018) The merge logic in MergePositionsOperator is excessively complex and lacks flexibility. It relies on the source operator emitting pages with ascending positions. Additionally, this merge logic introduced an unusual method, appendAllValuesToCurrentPosition, to the Block.Builder. We should replace this with a simpler and more flexible approach. This PR uses a mechanism similar to the grouping aggregation. In fact, it is very close to the values aggregation. Initially, I considered using the GroupingState from ValuesAggregator. However, unlike in the values aggregation, we don't expect many multi-values in enrich. Hence, I introduced the new EnrichResultBuilders instead. --- x-pack/plugin/esql/build.gradle | 57 +++++++ .../compute/data/BooleanBlock.java | 13 -- .../compute/data/BooleanBlockBuilder.java | 49 ------ .../compute/data/BytesRefBlock.java | 13 -- .../compute/data/BytesRefBlockBuilder.java | 50 ------ .../compute/data/DoubleBlock.java | 13 -- .../compute/data/DoubleBlockBuilder.java | 49 ------ .../elasticsearch/compute/data/IntBlock.java | 13 -- .../compute/data/IntBlockBuilder.java | 49 ------ .../elasticsearch/compute/data/LongBlock.java | 13 -- .../compute/data/LongBlockBuilder.java | 49 ------ .../org/elasticsearch/compute/data/Block.java | 6 - .../compute/data/ConstantNullBlock.java | 5 - .../elasticsearch/compute/data/DocBlock.java | 5 - .../data/SingletonOrdinalsBuilder.java | 5 - .../compute/data/X-Block.java.st | 13 -- .../compute/data/X-BlockBuilder.java.st | 60 ------- .../data/BlockBuilderAppendBlockTests.java | 147 ----------------- .../compute/data/TestBlockBuilder.java | 30 ---- .../enrich/EnrichResultBuilderForBoolean.java | 90 +++++++++++ .../EnrichResultBuilderForBytesRef.java | 107 +++++++++++++ .../enrich/EnrichResultBuilderForDouble.java | 90 +++++++++++ .../enrich/EnrichResultBuilderForInt.java | 90 +++++++++++ .../enrich/EnrichResultBuilderForLong.java | 90 +++++++++++ .../esql/enrich/EnrichLookupService.java | 28 +--- .../esql/enrich/EnrichResultBuilder.java | 80 ++++++++++ .../esql/enrich/MergePositionsOperator.java | 123 +++------------ .../esql/enrich/X-EnrichResultBuilder.java.st | 134 ++++++++++++++++ .../esql/enrich/EnrichResultBuilderTests.java | 148 ++++++++++++++++++ .../enrich/MergePositionsOperatorTests.java | 1 - 30 files changed, 910 insertions(+), 710 deletions(-) delete mode 100644 x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderAppendBlockTests.java create mode 100644 x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBoolean.java create mode 100644 x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBytesRef.java create mode 100644 x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForDouble.java create mode 100644 x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForInt.java create mode 100644 x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForLong.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilder.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/X-EnrichResultBuilder.java.st create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderTests.java diff --git a/x-pack/plugin/esql/build.gradle b/x-pack/plugin/esql/build.gradle index 668ecec0e393d..87ef4dd0b3eff 100644 --- a/x-pack/plugin/esql/build.gradle +++ b/x-pack/plugin/esql/build.gradle @@ -2,6 +2,7 @@ import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' +apply plugin: 'elasticsearch.string-templates' esplugin { name 'x-pack-esql' description 'The plugin that powers ESQL for Elasticsearch' @@ -222,3 +223,59 @@ tasks.register("regen") { } } } + +tasks.named("spotlessJava") { dependsOn stringTemplates } +tasks.named('checkstyleMain').configure { + excludes = [ "**/*.java.st" ] +} + +def prop(Type, type, TYPE, BYTES, Array) { + return [ + "Type" : Type, + "type" : type, + "TYPE" : TYPE, + "BYTES" : BYTES, + "Array" : Array, + + "int" : type == "int" ? "true" : "", + "long" : type == "long" ? "true" : "", + "double" : type == "double" ? "true" : "", + "BytesRef" : type == "BytesRef" ? "true" : "", + "boolean" : type == "boolean" ? "true" : "", + ] +} + +tasks.named('stringTemplates').configure { + var intProperties = prop("Int", "int", "INT", "Integer.BYTES", "IntArray") + var longProperties = prop("Long", "long", "LONG", "Long.BYTES", "LongArray") + var doubleProperties = prop("Double", "double", "DOUBLE", "Double.BYTES", "DoubleArray") + var bytesRefProperties = prop("BytesRef", "BytesRef", "BYTES_REF", "org.apache.lucene.util.RamUsageEstimator.NUM_BYTES_OBJECT_REF", "") + var booleanProperties = prop("Boolean", "boolean", "BOOLEAN", "Byte.BYTES", "BitArray") + // enrich + File enrichResultBuilderInput = file("src/main/java/org/elasticsearch/xpack/esql/enrich/X-EnrichResultBuilder.java.st") + template { + it.properties = intProperties + it.inputFile = enrichResultBuilderInput + it.outputFile = "org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForInt.java" + } + template { + it.properties = longProperties + it.inputFile = enrichResultBuilderInput + it.outputFile = "org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForLong.java" + } + template { + it.properties = doubleProperties + it.inputFile = enrichResultBuilderInput + it.outputFile = "org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForDouble.java" + } + template { + it.properties = bytesRefProperties + it.inputFile = enrichResultBuilderInput + it.outputFile = "org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBytesRef.java" + } + template { + it.properties = booleanProperties + it.inputFile = enrichResultBuilderInput + it.outputFile = "org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBoolean.java" + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java index 43181a344e268..f365a2ed78610 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java @@ -223,19 +223,6 @@ sealed interface Builder extends Block.Builder, BlockLoader.BooleanBuilder permi @Override Builder mvOrdering(Block.MvOrdering mvOrdering); - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - Builder appendAllValuesToCurrentPosition(Block block); - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - Builder appendAllValuesToCurrentPosition(BooleanBlock block); - @Override BooleanBlock build(); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java index 09c436e805d57..32627a0e0d36b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java @@ -71,55 +71,6 @@ public BooleanBlockBuilder endPositionEntry() { return this; } - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public BooleanBlockBuilder appendAllValuesToCurrentPosition(Block block) { - if (block.areAllValuesNull()) { - return appendNull(); - } - return appendAllValuesToCurrentPosition((BooleanBlock) block); - } - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public BooleanBlockBuilder appendAllValuesToCurrentPosition(BooleanBlock block) { - final int positionCount = block.getPositionCount(); - if (positionCount == 0) { - return appendNull(); - } - final int totalValueCount = block.getTotalValueCount(); - if (totalValueCount == 0) { - return appendNull(); - } - if (totalValueCount > 1) { - beginPositionEntry(); - } - final BooleanVector vector = block.asVector(); - if (vector != null) { - for (int p = 0; p < positionCount; p++) { - appendBoolean(vector.getBoolean(p)); - } - } else { - for (int p = 0; p < positionCount; p++) { - int count = block.getValueCount(p); - int i = block.getFirstValueIndex(p); - for (int v = 0; v < count; v++) { - appendBoolean(block.getBoolean(i++)); - } - } - } - if (totalValueCount > 1) { - endPositionEntry(); - } - return this; - } - @Override public BooleanBlockBuilder copyFrom(Block block, int beginInclusive, int endExclusive) { if (block.areAllValuesNull()) { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java index 5f5e1f9caa488..a6c75dbc1122f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java @@ -229,19 +229,6 @@ sealed interface Builder extends Block.Builder, BlockLoader.BytesRefBuilder perm @Override Builder mvOrdering(Block.MvOrdering mvOrdering); - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - Builder appendAllValuesToCurrentPosition(Block block); - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - Builder appendAllValuesToCurrentPosition(BytesRefBlock block); - @Override BytesRefBlock build(); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java index aed422b0c0104..4ef7ed4084228 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java @@ -78,56 +78,6 @@ protected void writeNullValue() { values.append(BytesRefBlock.NULL_VALUE); } - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public BytesRefBlockBuilder appendAllValuesToCurrentPosition(Block block) { - if (block.areAllValuesNull()) { - return appendNull(); - } - return appendAllValuesToCurrentPosition((BytesRefBlock) block); - } - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public BytesRefBlockBuilder appendAllValuesToCurrentPosition(BytesRefBlock block) { - final int positionCount = block.getPositionCount(); - if (positionCount == 0) { - return appendNull(); - } - final int totalValueCount = block.getTotalValueCount(); - if (totalValueCount == 0) { - return appendNull(); - } - if (totalValueCount > 1) { - beginPositionEntry(); - } - BytesRef scratch = new BytesRef(); - final BytesRefVector vector = block.asVector(); - if (vector != null) { - for (int p = 0; p < positionCount; p++) { - appendBytesRef(vector.getBytesRef(p, scratch)); - } - } else { - for (int p = 0; p < positionCount; p++) { - int count = block.getValueCount(p); - int i = block.getFirstValueIndex(p); - for (int v = 0; v < count; v++) { - appendBytesRef(block.getBytesRef(i++, scratch)); - } - } - } - if (totalValueCount > 1) { - endPositionEntry(); - } - return this; - } - @Override public BytesRefBlockBuilder copyFrom(Block block, int beginInclusive, int endExclusive) { if (block.areAllValuesNull()) { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java index 27d70caaa18fe..a682c2cba019e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java @@ -224,19 +224,6 @@ sealed interface Builder extends Block.Builder, BlockLoader.DoubleBuilder permit @Override Builder mvOrdering(Block.MvOrdering mvOrdering); - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - Builder appendAllValuesToCurrentPosition(Block block); - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - Builder appendAllValuesToCurrentPosition(DoubleBlock block); - @Override DoubleBlock build(); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java index 427127784869a..5921c2daa9f92 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java @@ -71,55 +71,6 @@ public DoubleBlockBuilder endPositionEntry() { return this; } - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public DoubleBlockBuilder appendAllValuesToCurrentPosition(Block block) { - if (block.areAllValuesNull()) { - return appendNull(); - } - return appendAllValuesToCurrentPosition((DoubleBlock) block); - } - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public DoubleBlockBuilder appendAllValuesToCurrentPosition(DoubleBlock block) { - final int positionCount = block.getPositionCount(); - if (positionCount == 0) { - return appendNull(); - } - final int totalValueCount = block.getTotalValueCount(); - if (totalValueCount == 0) { - return appendNull(); - } - if (totalValueCount > 1) { - beginPositionEntry(); - } - final DoubleVector vector = block.asVector(); - if (vector != null) { - for (int p = 0; p < positionCount; p++) { - appendDouble(vector.getDouble(p)); - } - } else { - for (int p = 0; p < positionCount; p++) { - int count = block.getValueCount(p); - int i = block.getFirstValueIndex(p); - for (int v = 0; v < count; v++) { - appendDouble(block.getDouble(i++)); - } - } - } - if (totalValueCount > 1) { - endPositionEntry(); - } - return this; - } - @Override public DoubleBlockBuilder copyFrom(Block block, int beginInclusive, int endExclusive) { if (block.areAllValuesNull()) { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java index a34d50bf6ff55..e9d606b51c6a1 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java @@ -223,19 +223,6 @@ sealed interface Builder extends Block.Builder, BlockLoader.IntBuilder permits I @Override Builder mvOrdering(Block.MvOrdering mvOrdering); - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - Builder appendAllValuesToCurrentPosition(Block block); - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - Builder appendAllValuesToCurrentPosition(IntBlock block); - @Override IntBlock build(); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java index aaf46798fd789..85f943004de29 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java @@ -71,55 +71,6 @@ public IntBlockBuilder endPositionEntry() { return this; } - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public IntBlockBuilder appendAllValuesToCurrentPosition(Block block) { - if (block.areAllValuesNull()) { - return appendNull(); - } - return appendAllValuesToCurrentPosition((IntBlock) block); - } - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public IntBlockBuilder appendAllValuesToCurrentPosition(IntBlock block) { - final int positionCount = block.getPositionCount(); - if (positionCount == 0) { - return appendNull(); - } - final int totalValueCount = block.getTotalValueCount(); - if (totalValueCount == 0) { - return appendNull(); - } - if (totalValueCount > 1) { - beginPositionEntry(); - } - final IntVector vector = block.asVector(); - if (vector != null) { - for (int p = 0; p < positionCount; p++) { - appendInt(vector.getInt(p)); - } - } else { - for (int p = 0; p < positionCount; p++) { - int count = block.getValueCount(p); - int i = block.getFirstValueIndex(p); - for (int v = 0; v < count; v++) { - appendInt(block.getInt(i++)); - } - } - } - if (totalValueCount > 1) { - endPositionEntry(); - } - return this; - } - @Override public IntBlockBuilder copyFrom(Block block, int beginInclusive, int endExclusive) { if (block.areAllValuesNull()) { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java index 21c3eb4257b8d..3e1c5fcfaac95 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java @@ -224,19 +224,6 @@ sealed interface Builder extends Block.Builder, BlockLoader.LongBuilder permits @Override Builder mvOrdering(Block.MvOrdering mvOrdering); - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - Builder appendAllValuesToCurrentPosition(Block block); - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - Builder appendAllValuesToCurrentPosition(LongBlock block); - @Override LongBlock build(); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java index 5d8daf306809d..d24ae214da63a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java @@ -71,55 +71,6 @@ public LongBlockBuilder endPositionEntry() { return this; } - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public LongBlockBuilder appendAllValuesToCurrentPosition(Block block) { - if (block.areAllValuesNull()) { - return appendNull(); - } - return appendAllValuesToCurrentPosition((LongBlock) block); - } - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public LongBlockBuilder appendAllValuesToCurrentPosition(LongBlock block) { - final int positionCount = block.getPositionCount(); - if (positionCount == 0) { - return appendNull(); - } - final int totalValueCount = block.getTotalValueCount(); - if (totalValueCount == 0) { - return appendNull(); - } - if (totalValueCount > 1) { - beginPositionEntry(); - } - final LongVector vector = block.asVector(); - if (vector != null) { - for (int p = 0; p < positionCount; p++) { - appendLong(vector.getLong(p)); - } - } else { - for (int p = 0; p < positionCount; p++) { - int count = block.getValueCount(p); - int i = block.getFirstValueIndex(p); - for (int v = 0; v < count; v++) { - appendLong(block.getLong(i++)); - } - } - } - if (totalValueCount > 1) { - endPositionEntry(); - } - return this; - } - @Override public LongBlockBuilder copyFrom(Block block, int beginInclusive, int endExclusive) { if (block.areAllValuesNull()) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java index 7fc92da1943ac..0e34eaa68881f 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java @@ -183,12 +183,6 @@ interface Builder extends BlockLoader.Builder, Releasable { */ Builder endPositionEntry(); - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - Builder appendAllValuesToCurrentPosition(Block block); - /** * Copy the values in {@code block} from {@code beginInclusive} to * {@code endExclusive} into this builder. diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java index c2ac99a7c8489..3df75f4bc1c56 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java @@ -172,11 +172,6 @@ public Builder copyFrom(Block block, int beginInclusive, int endExclusive) { return this; } - @Override - public Block.Builder appendAllValuesToCurrentPosition(Block block) { - return appendNull(); - } - @Override public Block.Builder mvOrdering(MvOrdering mvOrdering) { /* diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java index 8d3497a66a2d7..2751cd31fd362 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java @@ -149,11 +149,6 @@ public Builder copyFrom(Block block, int beginInclusive, int endExclusive) { return this; } - @Override - public Block.Builder appendAllValuesToCurrentPosition(Block block) { - throw new UnsupportedOperationException("DocBlock doesn't support appendBlockAndMerge"); - } - @Override public Block.Builder mvOrdering(MvOrdering mvOrdering) { /* diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/SingletonOrdinalsBuilder.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/SingletonOrdinalsBuilder.java index 8616d7a7e1bc6..fd9dd6a479298 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/SingletonOrdinalsBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/SingletonOrdinalsBuilder.java @@ -165,11 +165,6 @@ public void close() { blockFactory.adjustBreaker(-ordsSize(ords.length)); } - @Override - public Block.Builder appendAllValuesToCurrentPosition(Block block) { - throw new UnsupportedOperationException(); - } - @Override public Block.Builder copyFrom(Block block, int beginInclusive, int endExclusive) { throw new UnsupportedOperationException(); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st index 3850e3da7c796..331a5713fa3d1 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st @@ -277,19 +277,6 @@ $endif$ @Override Builder mvOrdering(Block.MvOrdering mvOrdering); - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - Builder appendAllValuesToCurrentPosition(Block block); - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - Builder appendAllValuesToCurrentPosition($Type$Block block); - @Override $Type$Block build(); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st index 5b432f1c62968..fab3be0be4233 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st @@ -113,66 +113,6 @@ $if(BytesRef)$ } $endif$ - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public $Type$BlockBuilder appendAllValuesToCurrentPosition(Block block) { - if (block.areAllValuesNull()) { - return appendNull(); - } - return appendAllValuesToCurrentPosition(($Type$Block) block); - } - - /** - * Appends the all values of the given block into a the current position - * in this builder. - */ - @Override - public $Type$BlockBuilder appendAllValuesToCurrentPosition($Type$Block block) { - final int positionCount = block.getPositionCount(); - if (positionCount == 0) { - return appendNull(); - } - final int totalValueCount = block.getTotalValueCount(); - if (totalValueCount == 0) { - return appendNull(); - } - if (totalValueCount > 1) { - beginPositionEntry(); - } -$if(BytesRef)$ - BytesRef scratch = new BytesRef(); -$endif$ - final $Type$Vector vector = block.asVector(); - if (vector != null) { - for (int p = 0; p < positionCount; p++) { -$if(BytesRef)$ - appendBytesRef(vector.getBytesRef(p, scratch)); -$else$ - append$Type$(vector.get$Type$(p)); -$endif$ - } - } else { - for (int p = 0; p < positionCount; p++) { - int count = block.getValueCount(p); - int i = block.getFirstValueIndex(p); - for (int v = 0; v < count; v++) { -$if(BytesRef)$ - appendBytesRef(block.getBytesRef(i++, scratch)); -$else$ - append$Type$(block.get$Type$(i++)); -$endif$ - } - } - } - if (totalValueCount > 1) { - endPositionEntry(); - } - return this; - } - @Override public $Type$BlockBuilder copyFrom(Block block, int beginInclusive, int endExclusive) { if (block.areAllValuesNull()) { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderAppendBlockTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderAppendBlockTests.java deleted file mode 100644 index 9c1b02aa74107..0000000000000 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderAppendBlockTests.java +++ /dev/null @@ -1,147 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.compute.data; - -import org.elasticsearch.compute.operator.ComputeTestCase; - -import java.util.ArrayList; -import java.util.List; - -import static org.hamcrest.Matchers.equalTo; - -public class BlockBuilderAppendBlockTests extends ComputeTestCase { - - public void testBasic() { - BlockFactory blockFactory = blockFactory(); - IntBlock src = blockFactory.newIntBlockBuilder(10) - .appendInt(1) - .appendNull() - .beginPositionEntry() - .appendInt(4) - .appendInt(6) - .endPositionEntry() - .appendInt(10) - .appendInt(20) - .appendInt(30) - .appendNull() - .beginPositionEntry() - .appendInt(1) - .endPositionEntry() - .build(); - // copy position by position - try (IntBlock.Builder dst = blockFactory.newIntBlockBuilder(randomIntBetween(1, 20))) { - for (int i = 0; i < src.getPositionCount(); i++) { - try (IntBlock filter = src.filter(i)) { - dst.appendAllValuesToCurrentPosition(filter); - } - } - try (IntBlock block = dst.build()) { - assertThat(block, equalTo(src)); - } - } - // copy all block - try (IntBlock.Builder dst = blockFactory.newIntBlockBuilder(randomIntBetween(1, 20))) { - try (IntBlock block = dst.appendAllValuesToCurrentPosition(src).build()) { - assertThat(block.getPositionCount(), equalTo(1)); - assertThat(BlockUtils.toJavaObject(block, 0), equalTo(List.of(1, 4, 6, 10, 20, 30, 1))); - } - } - try (Block dst = randomlyDivideAndMerge(src)) { - assertThat(dst.getPositionCount(), equalTo(1)); - assertThat(BlockUtils.toJavaObject(dst, 0), equalTo(List.of(1, 4, 6, 10, 20, 30, 1))); - } - } - - public void testRandomNullBlock() { - BlockFactory blockFactory = blockFactory(); - IntBlock.Builder src = blockFactory.newIntBlockBuilder(10); - try (var nullBlock = blockFactory.newConstantNullBlock(between(1, 100))) { - src.appendAllValuesToCurrentPosition(nullBlock); - } - src.appendInt(101); - try (var nullBlock = blockFactory.newConstantNullBlock(between(1, 100))) { - src.appendAllValuesToCurrentPosition(nullBlock); - } - IntBlock block = src.build(); - assertThat(block.getPositionCount(), equalTo(3)); - assertTrue(block.isNull(0)); - assertThat(block.getInt(1), equalTo(101)); - assertTrue(block.isNull(2)); - try (Block flatten = randomlyDivideAndMerge(block)) { - assertThat(flatten.getPositionCount(), equalTo(1)); - assertThat(BlockUtils.toJavaObject(flatten, 0), equalTo(101)); - } - } - - public void testRandom() { - ElementType elementType = randomFrom(ElementType.INT, ElementType.BYTES_REF, ElementType.BOOLEAN); - Block block = BasicBlockTests.randomBlock( - elementType, - randomIntBetween(1, 1024), - randomBoolean(), - 0, - between(1, 16), - 0, - between(0, 16) - ).block(); - - block = randomlyDivideAndMerge(block); - block.close(); - } - - private Block randomlyDivideAndMerge(Block block) { - while (block.getPositionCount() > 1 || randomBoolean()) { - int positionCount = block.getPositionCount(); - int offset = 0; - Block.Builder builder = block.elementType() - .newBlockBuilder(randomIntBetween(1, 100), TestBlockFactory.getNonBreakingInstance()); - List expected = new ArrayList<>(); - while (offset < positionCount) { - int length = randomIntBetween(1, positionCount - offset); - int[] positions = new int[length]; - for (int i = 0; i < length; i++) { - positions[i] = offset + i; - } - offset += length; - Block sub = block.filter(positions); - expected.add(extractAndFlattenBlockValues(sub)); - builder.appendAllValuesToCurrentPosition(sub); - sub.close(); - } - block.close(); - block = builder.build(); - assertThat(block.getPositionCount(), equalTo(expected.size())); - for (int i = 0; i < block.getPositionCount(); i++) { - assertThat(BlockUtils.toJavaObject(block, i), equalTo(expected.get(i))); - } - } - return block; - } - - static Object extractAndFlattenBlockValues(Block block) { - List values = new ArrayList<>(); - for (int i = 0; i < block.getPositionCount(); i++) { - Object v = BlockUtils.toJavaObject(block, i); - if (v == null) { - continue; - } - if (v instanceof List l) { - values.addAll(l); - } else { - values.add(v); - } - } - if (values.isEmpty()) { - return null; - } else if (values.size() == 1) { - return values.get(0); - } else { - return values; - } - } -} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockBuilder.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockBuilder.java index a2b074c1403a0..4595b26ca27aa 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockBuilder.java @@ -113,12 +113,6 @@ public TestBlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { return this; } - @Override - public Block.Builder appendAllValuesToCurrentPosition(Block block) { - builder.appendAllValuesToCurrentPosition(block); - return this; - } - @Override public IntBlock build() { return builder.build(); @@ -174,12 +168,6 @@ public TestBlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { return this; } - @Override - public Block.Builder appendAllValuesToCurrentPosition(Block block) { - builder.appendAllValuesToCurrentPosition(block); - return this; - } - @Override public LongBlock build() { return builder.build(); @@ -235,12 +223,6 @@ public TestBlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { return this; } - @Override - public Block.Builder appendAllValuesToCurrentPosition(Block block) { - builder.appendAllValuesToCurrentPosition(block); - return this; - } - @Override public DoubleBlock build() { return builder.build(); @@ -296,12 +278,6 @@ public TestBlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { return this; } - @Override - public Block.Builder appendAllValuesToCurrentPosition(Block block) { - builder.appendAllValuesToCurrentPosition(block); - return this; - } - @Override public BytesRefBlock build() { return builder.build(); @@ -360,12 +336,6 @@ public TestBlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { return this; } - @Override - public Block.Builder appendAllValuesToCurrentPosition(Block block) { - builder.appendAllValuesToCurrentPosition(block); - return this; - } - @Override public BooleanBlock build() { return builder.build(); diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBoolean.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBoolean.java new file mode 100644 index 0000000000000..0427afb6d80c8 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBoolean.java @@ -0,0 +1,90 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.enrich; + +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.Releasables; + +import java.util.Arrays; + +/** + * {@link EnrichResultBuilder} for Booleans. + * This class is generated. Edit `X-EnrichResultBuilder.java.st` instead. + */ +final class EnrichResultBuilderForBoolean extends EnrichResultBuilder { + private ObjectArray cells; + + EnrichResultBuilderForBoolean(BlockFactory blockFactory, int channel, int totalPositions) { + super(blockFactory, channel, totalPositions); + this.cells = blockFactory.bigArrays().newObjectArray(totalPositions); + } + + @Override + void addInputPage(IntVector positions, Page page) { + BooleanBlock block = page.getBlock(channel); + for (int i = 0; i < positions.getPositionCount(); i++) { + int valueCount = block.getValueCount(i); + if (valueCount == 0) { + continue; + } + int cellPosition = positions.getInt(i); + final var oldCell = cells.get(cellPosition); + final var newCell = extendCell(oldCell, valueCount); + cells.set(cellPosition, newCell); + int dstIndex = oldCell != null ? oldCell.length : 0; + adjustBreaker(RamUsageEstimator.sizeOf(newCell) - (oldCell != null ? RamUsageEstimator.sizeOf(oldCell) : 0)); + int firstValueIndex = block.getFirstValueIndex(i); + for (int v = 0; v < valueCount; v++) { + newCell[dstIndex + v] = block.getBoolean(firstValueIndex + v); + } + } + } + + private boolean[] extendCell(boolean[] oldCell, int newValueCount) { + if (oldCell == null) { + return new boolean[newValueCount]; + } else { + return Arrays.copyOf(oldCell, oldCell.length + newValueCount); + } + } + + @Override + Block build() { + try (BooleanBlock.Builder builder = blockFactory.newBooleanBlockBuilder(totalPositions)) { + for (int i = 0; i < totalPositions; i++) { + final var cell = cells.get(i); + if (cell == null) { + builder.appendNull(); + continue; + } + if (cell.length > 1) { + builder.beginPositionEntry(); + } + // TODO: sort and dedup + for (var v : cell) { + builder.appendBoolean(v); + } + if (cell.length > 1) { + builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + @Override + public void close() { + Releasables.close(cells, super::close); + } +} diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBytesRef.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBytesRef.java new file mode 100644 index 0000000000000..ff881da5baf44 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBytesRef.java @@ -0,0 +1,107 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.enrich; + +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.util.BytesRefArray; +import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.Releasables; + +import java.util.Arrays; + +/** + * {@link EnrichResultBuilder} for BytesRefs. + * This class is generated. Edit `X-EnrichResultBuilder.java.st` instead. + */ +final class EnrichResultBuilderForBytesRef extends EnrichResultBuilder { + private final BytesRefArray bytes; // shared between all cells + private ObjectArray cells; + + EnrichResultBuilderForBytesRef(BlockFactory blockFactory, int channel, int totalPositions) { + super(blockFactory, channel, totalPositions); + this.cells = blockFactory.bigArrays().newObjectArray(totalPositions); + BytesRefArray bytes = null; + try { + bytes = new BytesRefArray(totalPositions * 3L, blockFactory.bigArrays()); + this.bytes = bytes; + } finally { + if (bytes == null) { + this.cells.close(); + } + } + } + + @Override + void addInputPage(IntVector positions, Page page) { + BytesRefBlock block = page.getBlock(channel); + BytesRef scratch = new BytesRef(); + for (int i = 0; i < positions.getPositionCount(); i++) { + int valueCount = block.getValueCount(i); + if (valueCount == 0) { + continue; + } + int cellPosition = positions.getInt(i); + final var oldCell = cells.get(cellPosition); + final var newCell = extendCell(oldCell, valueCount); + cells.set(cellPosition, newCell); + int dstIndex = oldCell != null ? oldCell.length : 0; + adjustBreaker(RamUsageEstimator.sizeOf(newCell) - (oldCell != null ? RamUsageEstimator.sizeOf(oldCell) : 0)); + int firstValueIndex = block.getFirstValueIndex(i); + int bytesOrd = Math.toIntExact(bytes.size()); + for (int v = 0; v < valueCount; v++) { + scratch = block.getBytesRef(firstValueIndex + v, scratch); + bytes.append(scratch); + newCell[dstIndex + v] = bytesOrd + v; + } + } + } + + private int[] extendCell(int[] oldCell, int newValueCount) { + if (oldCell == null) { + return new int[newValueCount]; + } else { + return Arrays.copyOf(oldCell, oldCell.length + newValueCount); + } + } + + @Override + Block build() { + try (BytesRefBlock.Builder builder = blockFactory.newBytesRefBlockBuilder(totalPositions)) { + BytesRef scratch = new BytesRef(); + for (int i = 0; i < totalPositions; i++) { + final var cell = cells.get(i); + if (cell == null) { + builder.appendNull(); + continue; + } + if (cell.length > 1) { + builder.beginPositionEntry(); + } + // TODO: sort and dedup + for (var v : cell) { + builder.appendBytesRef(bytes.get(v, scratch)); + } + if (cell.length > 1) { + builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + @Override + public void close() { + Releasables.close(bytes, cells, super::close); + } +} diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForDouble.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForDouble.java new file mode 100644 index 0000000000000..93c178d816326 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForDouble.java @@ -0,0 +1,90 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.enrich; + +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.Releasables; + +import java.util.Arrays; + +/** + * {@link EnrichResultBuilder} for Doubles. + * This class is generated. Edit `X-EnrichResultBuilder.java.st` instead. + */ +final class EnrichResultBuilderForDouble extends EnrichResultBuilder { + private ObjectArray cells; + + EnrichResultBuilderForDouble(BlockFactory blockFactory, int channel, int totalPositions) { + super(blockFactory, channel, totalPositions); + this.cells = blockFactory.bigArrays().newObjectArray(totalPositions); + } + + @Override + void addInputPage(IntVector positions, Page page) { + DoubleBlock block = page.getBlock(channel); + for (int i = 0; i < positions.getPositionCount(); i++) { + int valueCount = block.getValueCount(i); + if (valueCount == 0) { + continue; + } + int cellPosition = positions.getInt(i); + final var oldCell = cells.get(cellPosition); + final var newCell = extendCell(oldCell, valueCount); + cells.set(cellPosition, newCell); + int dstIndex = oldCell != null ? oldCell.length : 0; + adjustBreaker(RamUsageEstimator.sizeOf(newCell) - (oldCell != null ? RamUsageEstimator.sizeOf(oldCell) : 0)); + int firstValueIndex = block.getFirstValueIndex(i); + for (int v = 0; v < valueCount; v++) { + newCell[dstIndex + v] = block.getDouble(firstValueIndex + v); + } + } + } + + private double[] extendCell(double[] oldCell, int newValueCount) { + if (oldCell == null) { + return new double[newValueCount]; + } else { + return Arrays.copyOf(oldCell, oldCell.length + newValueCount); + } + } + + @Override + Block build() { + try (DoubleBlock.Builder builder = blockFactory.newDoubleBlockBuilder(totalPositions)) { + for (int i = 0; i < totalPositions; i++) { + final var cell = cells.get(i); + if (cell == null) { + builder.appendNull(); + continue; + } + if (cell.length > 1) { + builder.beginPositionEntry(); + } + // TODO: sort and dedup + for (var v : cell) { + builder.appendDouble(v); + } + if (cell.length > 1) { + builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + @Override + public void close() { + Releasables.close(cells, super::close); + } +} diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForInt.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForInt.java new file mode 100644 index 0000000000000..4dec877e0d1e4 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForInt.java @@ -0,0 +1,90 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.enrich; + +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.Releasables; + +import java.util.Arrays; + +/** + * {@link EnrichResultBuilder} for Ints. + * This class is generated. Edit `X-EnrichResultBuilder.java.st` instead. + */ +final class EnrichResultBuilderForInt extends EnrichResultBuilder { + private ObjectArray cells; + + EnrichResultBuilderForInt(BlockFactory blockFactory, int channel, int totalPositions) { + super(blockFactory, channel, totalPositions); + this.cells = blockFactory.bigArrays().newObjectArray(totalPositions); + } + + @Override + void addInputPage(IntVector positions, Page page) { + IntBlock block = page.getBlock(channel); + for (int i = 0; i < positions.getPositionCount(); i++) { + int valueCount = block.getValueCount(i); + if (valueCount == 0) { + continue; + } + int cellPosition = positions.getInt(i); + final var oldCell = cells.get(cellPosition); + final var newCell = extendCell(oldCell, valueCount); + cells.set(cellPosition, newCell); + int dstIndex = oldCell != null ? oldCell.length : 0; + adjustBreaker(RamUsageEstimator.sizeOf(newCell) - (oldCell != null ? RamUsageEstimator.sizeOf(oldCell) : 0)); + int firstValueIndex = block.getFirstValueIndex(i); + for (int v = 0; v < valueCount; v++) { + newCell[dstIndex + v] = block.getInt(firstValueIndex + v); + } + } + } + + private int[] extendCell(int[] oldCell, int newValueCount) { + if (oldCell == null) { + return new int[newValueCount]; + } else { + return Arrays.copyOf(oldCell, oldCell.length + newValueCount); + } + } + + @Override + Block build() { + try (IntBlock.Builder builder = blockFactory.newIntBlockBuilder(totalPositions)) { + for (int i = 0; i < totalPositions; i++) { + final var cell = cells.get(i); + if (cell == null) { + builder.appendNull(); + continue; + } + if (cell.length > 1) { + builder.beginPositionEntry(); + } + // TODO: sort and dedup + for (var v : cell) { + builder.appendInt(v); + } + if (cell.length > 1) { + builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + @Override + public void close() { + Releasables.close(cells, super::close); + } +} diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForLong.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForLong.java new file mode 100644 index 0000000000000..0dd4d1d0a8a0d --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForLong.java @@ -0,0 +1,90 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.enrich; + +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.Releasables; + +import java.util.Arrays; + +/** + * {@link EnrichResultBuilder} for Longs. + * This class is generated. Edit `X-EnrichResultBuilder.java.st` instead. + */ +final class EnrichResultBuilderForLong extends EnrichResultBuilder { + private ObjectArray cells; + + EnrichResultBuilderForLong(BlockFactory blockFactory, int channel, int totalPositions) { + super(blockFactory, channel, totalPositions); + this.cells = blockFactory.bigArrays().newObjectArray(totalPositions); + } + + @Override + void addInputPage(IntVector positions, Page page) { + LongBlock block = page.getBlock(channel); + for (int i = 0; i < positions.getPositionCount(); i++) { + int valueCount = block.getValueCount(i); + if (valueCount == 0) { + continue; + } + int cellPosition = positions.getInt(i); + final var oldCell = cells.get(cellPosition); + final var newCell = extendCell(oldCell, valueCount); + cells.set(cellPosition, newCell); + int dstIndex = oldCell != null ? oldCell.length : 0; + adjustBreaker(RamUsageEstimator.sizeOf(newCell) - (oldCell != null ? RamUsageEstimator.sizeOf(oldCell) : 0)); + int firstValueIndex = block.getFirstValueIndex(i); + for (int v = 0; v < valueCount; v++) { + newCell[dstIndex + v] = block.getLong(firstValueIndex + v); + } + } + } + + private long[] extendCell(long[] oldCell, int newValueCount) { + if (oldCell == null) { + return new long[newValueCount]; + } else { + return Arrays.copyOf(oldCell, oldCell.length + newValueCount); + } + } + + @Override + Block build() { + try (LongBlock.Builder builder = blockFactory.newLongBlockBuilder(totalPositions)) { + for (int i = 0; i < totalPositions; i++) { + final var cell = cells.get(i); + if (cell == null) { + builder.appendNull(); + continue; + } + if (cell.length > 1) { + builder.beginPositionEntry(); + } + // TODO: sort and dedup + for (var v : cell) { + builder.appendLong(v); + } + if (cell.length > 1) { + builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + @Override + public void close() { + Releasables.close(cells, super::close); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java index 77120c757e97a..e5d4e58d9d61b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java @@ -37,7 +37,6 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.compute.operator.OutputOperator; -import org.elasticsearch.compute.operator.ProjectOperator; import org.elasticsearch.core.AbstractRefCounted; import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.Releasables; @@ -318,22 +317,10 @@ private void doLookup( 0 ) ); - - // drop docs block - intermediateOperators.add(droppingBlockOperator(extractFields.size() + 2, 0)); - boolean singleLeaf = searchContext.searcher().getLeafContexts().size() == 1; - // merging field-values by position - final int[] mergingChannels = IntStream.range(0, extractFields.size()).map(i -> i + 1).toArray(); + final int[] mergingChannels = IntStream.range(0, extractFields.size()).map(i -> i + 2).toArray(); intermediateOperators.add( - new MergePositionsOperator( - singleLeaf, - inputPage.getPositionCount(), - 0, - mergingChannels, - mergingTypes, - driverContext.blockFactory() - ) + new MergePositionsOperator(inputPage.getPositionCount(), 1, mergingChannels, mergingTypes, driverContext.blockFactory()) ); AtomicReference result = new AtomicReference<>(); OutputOperator outputOperator = new OutputOperator(List.of(), Function.identity(), result::set); @@ -392,17 +379,6 @@ private Page createNullResponse(int positionCount, List extract } } - private static Operator droppingBlockOperator(int totalBlocks, int droppingPosition) { - var size = totalBlocks - 1; - var projection = new ArrayList(size); - for (int i = 0; i < totalBlocks; i++) { - if (i != droppingPosition) { - projection.add(i); - } - } - return new ProjectOperator(projection); - } - private class TransportHandler implements TransportRequestHandler { @Override public void messageReceived(LookupRequest request, TransportChannel channel, Task task) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilder.java new file mode 100644 index 0000000000000..5bb42f3090695 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilder.java @@ -0,0 +1,80 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.enrich; + +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.Releasable; + +/** + * An abstract class responsible for collecting values for an output block of enrich. + * The incoming values of the same position are combined and added to a single corresponding position. + */ +abstract class EnrichResultBuilder implements Releasable { + protected final BlockFactory blockFactory; + protected final int channel; + protected final int totalPositions; + private long usedBytes; + + EnrichResultBuilder(BlockFactory blockFactory, int channel, int totalPositions) { + this.blockFactory = blockFactory; + this.channel = channel; + this.totalPositions = totalPositions; + } + + /** + * Collects the input values from the input page. + * + * @param positions the positions vector + * @param page the input page. The block located at {@code channel} is the value block + */ + abstract void addInputPage(IntVector positions, Page page); + + abstract Block build(); + + final void adjustBreaker(long bytes) { + blockFactory.breaker().addEstimateBytesAndMaybeBreak(bytes, "<>"); + usedBytes += bytes; + } + + @Override + public void close() { + blockFactory.breaker().addWithoutBreaking(-usedBytes); + } + + static EnrichResultBuilder enrichResultBuilder(ElementType elementType, BlockFactory blockFactory, int channel, int totalPositions) { + return switch (elementType) { + case NULL -> new EnrichResultBuilderForNull(blockFactory, channel, totalPositions); + case INT -> new EnrichResultBuilderForInt(blockFactory, channel, totalPositions); + case LONG -> new EnrichResultBuilderForLong(blockFactory, channel, totalPositions); + case DOUBLE -> new EnrichResultBuilderForDouble(blockFactory, channel, totalPositions); + case BOOLEAN -> new EnrichResultBuilderForBoolean(blockFactory, channel, totalPositions); + case BYTES_REF -> new EnrichResultBuilderForBytesRef(blockFactory, channel, totalPositions); + default -> throw new IllegalArgumentException("no enrich result builder for [" + elementType + "]"); + }; + } + + private static class EnrichResultBuilderForNull extends EnrichResultBuilder { + EnrichResultBuilderForNull(BlockFactory blockFactory, int channel, int totalPositions) { + super(blockFactory, channel, totalPositions); + } + + @Override + void addInputPage(IntVector positions, Page page) { + assert page.getBlock(channel).areAllValuesNull() : "expected all nulls; but got values"; + } + + @Override + Block build() { + return blockFactory.newConstantNullBlock(totalPositions); + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperator.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperator.java index 89447807db5b9..a3b7a8be61e2c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperator.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperator.java @@ -11,12 +11,13 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.Operator; -import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import java.util.Arrays; +import java.util.Objects; /** * Combines values at the given blocks with the same positions into a single position for the blocks at the given channels @@ -44,21 +45,13 @@ */ final class MergePositionsOperator implements Operator { private boolean finished = false; - private int filledPositions = 0; - private final boolean singleMode; - private final int positionCount; private final int positionChannel; - private final Block.Builder[] outputBuilders; - private final int[] mergingChannels; - private final ElementType[] mergingTypes; - private PositionBuilder positionBuilder = null; + private final EnrichResultBuilder[] builders; private Page outputPage; - private final BlockFactory blockFactory; MergePositionsOperator( - boolean singleMode, int positionCount, int positionChannel, int[] mergingChannels, @@ -73,123 +66,51 @@ final class MergePositionsOperator implements Operator { + Arrays.toString(mergingTypes) ); } - this.blockFactory = blockFactory; - this.singleMode = singleMode; - this.positionCount = positionCount; this.positionChannel = positionChannel; - this.mergingChannels = mergingChannels; - this.mergingTypes = mergingTypes; - this.outputBuilders = new Block.Builder[mergingTypes.length]; + this.builders = new EnrichResultBuilder[mergingTypes.length]; try { for (int i = 0; i < mergingTypes.length; i++) { - outputBuilders[i] = mergingTypes[i].newBlockBuilder(positionCount, blockFactory); + builders[i] = EnrichResultBuilder.enrichResultBuilder(mergingTypes[i], blockFactory, mergingChannels[i], positionCount); } } finally { - if (outputBuilders[outputBuilders.length - 1] == null) { - Releasables.close(outputBuilders); + if (builders[builders.length - 1] == null) { + Releasables.close(builders); } } } @Override public boolean needsInput() { - return true; + return finished == false; } @Override public void addInput(Page page) { try { final IntBlock positions = page.getBlock(positionChannel); - final int currentPosition = positions.getInt(0); - if (singleMode) { - fillNullUpToPosition(currentPosition); - for (int i = 0; i < mergingChannels.length; i++) { - int channel = mergingChannels[i]; - outputBuilders[i].appendAllValuesToCurrentPosition(page.getBlock(channel)); - } - filledPositions++; - } else { - if (positionBuilder != null && positionBuilder.position != currentPosition) { - flushPositionBuilder(); - } - if (positionBuilder == null) { - positionBuilder = new PositionBuilder(currentPosition, mergingTypes, blockFactory); - } - positionBuilder.combine(page, mergingChannels); + final IntVector positionsVector = Objects.requireNonNull(positions.asVector(), "positions must be a vector"); + for (EnrichResultBuilder builder : builders) { + builder.addInputPage(positionsVector, page); } } finally { Releasables.closeExpectNoException(page::releaseBlocks); } } - static final class PositionBuilder implements Releasable { - private final int position; - private final Block.Builder[] builders; - - PositionBuilder(int position, ElementType[] elementTypes, BlockFactory blockFactory) { - this.position = position; - this.builders = new Block.Builder[elementTypes.length]; - try { - for (int i = 0; i < builders.length; i++) { - builders[i] = elementTypes[i].newBlockBuilder(1, blockFactory); - } - } finally { - if (builders[builders.length - 1] == null) { - Releasables.close(builders); - } - } - } - - void combine(Page page, int[] channels) { - for (int i = 0; i < channels.length; i++) { - Block block = page.getBlock(channels[i]); - builders[i].appendAllValuesToCurrentPosition(block); - } - } - - void buildTo(Block.Builder[] output) { - for (int i = 0; i < output.length; i++) { - try (var b = builders[i]; Block block = b.build()) { - output[i].appendAllValuesToCurrentPosition(block); - } + @Override + public void finish() { + final Block[] blocks = new Block[builders.length]; + try { + for (int i = 0; i < builders.length; i++) { + blocks[i] = builders[i].build(); } - } - - @Override - public void close() { - Releasables.close(builders); - } - } - - private void flushPositionBuilder() { - fillNullUpToPosition(positionBuilder.position); - filledPositions++; - try (var p = positionBuilder) { - p.buildTo(outputBuilders); + outputPage = new Page(blocks); } finally { - positionBuilder = null; - } - } - - private void fillNullUpToPosition(int position) { - while (filledPositions < position) { - for (Block.Builder builder : outputBuilders) { - builder.appendNull(); + finished = true; + if (outputPage == null) { + Releasables.close(blocks); } - filledPositions++; - } - } - - @Override - public void finish() { - if (positionBuilder != null) { - flushPositionBuilder(); } - fillNullUpToPosition(positionCount); - final Block[] blocks = Block.Builder.buildAll(outputBuilders); - outputPage = new Page(blocks); - assert outputPage.getPositionCount() == positionCount; - finished = true; } @Override @@ -206,7 +127,7 @@ public Page getOutput() { @Override public void close() { - Releasables.close(Releasables.wrap(outputBuilders), positionBuilder, () -> { + Releasables.close(Releasables.wrap(builders), () -> { if (outputPage != null) { outputPage.releaseBlocks(); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/X-EnrichResultBuilder.java.st b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/X-EnrichResultBuilder.java.st new file mode 100644 index 0000000000000..4c5c9fabfa797 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/X-EnrichResultBuilder.java.st @@ -0,0 +1,134 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.enrich; + +$if(BytesRef)$ +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.util.BytesRefArray; +$else$ +import org.apache.lucene.util.RamUsageEstimator; +$endif$ +import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +$if(long)$ +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.$Type$Block; +$else$ +import org.elasticsearch.compute.data.$Type$Block; +import org.elasticsearch.compute.data.IntVector; +$endif$ +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.Releasables; + +import java.util.Arrays; + +/** + * {@link EnrichResultBuilder} for $Type$s. + * This class is generated. Edit `X-EnrichResultBuilder.java.st` instead. + */ +final class EnrichResultBuilderFor$Type$ extends EnrichResultBuilder { +$if(BytesRef)$ + private final BytesRefArray bytes; // shared between all cells +$endif$ + private ObjectArray<$if(BytesRef)$int$else$$type$$endif$[]> cells; + + EnrichResultBuilderFor$Type$(BlockFactory blockFactory, int channel, int totalPositions) { + super(blockFactory, channel, totalPositions); + this.cells = blockFactory.bigArrays().newObjectArray(totalPositions); +$if(BytesRef)$ + BytesRefArray bytes = null; + try { + bytes = new BytesRefArray(totalPositions * 3L, blockFactory.bigArrays()); + this.bytes = bytes; + } finally { + if (bytes == null) { + this.cells.close(); + } + } +$endif$ + } + + @Override + void addInputPage(IntVector positions, Page page) { + $Type$Block block = page.getBlock(channel); +$if(BytesRef)$ + BytesRef scratch = new BytesRef(); +$endif$ + for (int i = 0; i < positions.getPositionCount(); i++) { + int valueCount = block.getValueCount(i); + if (valueCount == 0) { + continue; + } + int cellPosition = positions.getInt(i); + final var oldCell = cells.get(cellPosition); + final var newCell = extendCell(oldCell, valueCount); + cells.set(cellPosition, newCell); + int dstIndex = oldCell != null ? oldCell.length : 0; + adjustBreaker(RamUsageEstimator.sizeOf(newCell) - (oldCell != null ? RamUsageEstimator.sizeOf(oldCell) : 0)); + int firstValueIndex = block.getFirstValueIndex(i); +$if(BytesRef)$ + int bytesOrd = Math.toIntExact(bytes.size()); + for (int v = 0; v < valueCount; v++) { + scratch = block.getBytesRef(firstValueIndex + v, scratch); + bytes.append(scratch); + newCell[dstIndex + v] = bytesOrd + v; + } +$else$ + for (int v = 0; v < valueCount; v++) { + newCell[dstIndex + v] = block.get$Type$(firstValueIndex + v); + } +$endif$ + } + } + + private $if(BytesRef)$int$else$$type$$endif$[] extendCell($if(BytesRef)$int$else$$type$$endif$[] oldCell, int newValueCount) { + if (oldCell == null) { + return new $if(BytesRef)$int$else$$type$$endif$[newValueCount]; + } else { + return Arrays.copyOf(oldCell, oldCell.length + newValueCount); + } + } + + @Override + Block build() { + try ($Type$Block.Builder builder = blockFactory.new$Type$BlockBuilder(totalPositions)) { +$if(BytesRef)$ + BytesRef scratch = new BytesRef(); +$endif$ + for (int i = 0; i < totalPositions; i++) { + final var cell = cells.get(i); + if (cell == null) { + builder.appendNull(); + continue; + } + if (cell.length > 1) { + builder.beginPositionEntry(); + } + // TODO: sort and dedup + for (var v : cell) { +$if(BytesRef)$ + builder.appendBytesRef(bytes.get(v, scratch)); +$else$ + builder.append$Type$(v); +$endif$ + } + if (cell.length > 1) { + builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + @Override + public void close() { + Releasables.close($if(BytesRef)$bytes, $endif$cells, super::close); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderTests.java new file mode 100644 index 0000000000000..f6e8b9107504c --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderTests.java @@ -0,0 +1,148 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.enrich; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.common.util.PageCacheRecycler; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.test.ESTestCase; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; + +public class EnrichResultBuilderTests extends ESTestCase { + + public void testBytesRef() { + BlockFactory blockFactory = blockFactory(); + Map> expectedValues = new HashMap<>(); + int numPages = between(0, 10); + int maxPosition = between(0, 100); + var resultBuilder = EnrichResultBuilder.enrichResultBuilder(ElementType.BYTES_REF, blockFactory, 0, maxPosition + 1); + for (int i = 0; i < numPages; i++) { + int numRows = between(1, 100); + try ( + var positionsBuilder = blockFactory.newIntVectorBuilder(numRows); + var valuesBuilder = blockFactory.newBytesRefBlockBuilder(numRows) + ) { + for (int r = 0; r < numRows; r++) { + int position = between(0, maxPosition); + positionsBuilder.appendInt(position); + int numValues = between(0, 3); + if (numValues == 0) { + valuesBuilder.appendNull(); + } + if (numValues > 1) { + valuesBuilder.beginPositionEntry(); + } + for (int v = 0; v < numValues; v++) { + BytesRef val = new BytesRef(randomByteArrayOfLength(10)); + expectedValues.computeIfAbsent(position, k -> new ArrayList<>()).add(val); + valuesBuilder.appendBytesRef(val); + } + if (numValues > 1) { + valuesBuilder.endPositionEntry(); + } + } + try (var positions = positionsBuilder.build(); var valuesBlock = valuesBuilder.build()) { + resultBuilder.addInputPage(positions, new Page(valuesBlock)); + } + } + } + try (BytesRefBlock actualOutput = (BytesRefBlock) resultBuilder.build()) { + assertThat(actualOutput.getPositionCount(), equalTo(maxPosition + 1)); + for (int i = 0; i < actualOutput.getPositionCount(); i++) { + List values = expectedValues.get(i); + if (actualOutput.isNull(i)) { + assertNull(values); + } else { + int valueCount = actualOutput.getValueCount(i); + int first = actualOutput.getFirstValueIndex(i); + assertThat(valueCount, equalTo(values.size())); + for (int v = 0; v < valueCount; v++) { + assertThat(actualOutput.getBytesRef(first + v, new BytesRef()), equalTo(values.get(v))); + } + } + } + } + resultBuilder.close(); + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } + + public void testLong() { + BlockFactory blockFactory = blockFactory(); + Map> expectedValues = new HashMap<>(); + int numPages = between(0, 10); + int maxPosition = between(0, 100); + var resultBuilder = EnrichResultBuilder.enrichResultBuilder(ElementType.LONG, blockFactory, 0, maxPosition + 1); + for (int i = 0; i < numPages; i++) { + int numRows = between(1, 100); + try ( + var positionsBuilder = blockFactory.newIntVectorBuilder(numRows); + var valuesBuilder = blockFactory.newLongBlockBuilder(numRows) + ) { + for (int r = 0; r < numRows; r++) { + int position = between(0, maxPosition); + positionsBuilder.appendInt(position); + int numValues = between(0, 3); + if (numValues == 0) { + valuesBuilder.appendNull(); + } + if (numValues > 1) { + valuesBuilder.beginPositionEntry(); + } + for (int v = 0; v < numValues; v++) { + long val = randomLong(); + expectedValues.computeIfAbsent(position, k -> new ArrayList<>()).add(val); + valuesBuilder.appendLong(val); + } + if (numValues > 1) { + valuesBuilder.endPositionEntry(); + } + } + try (var positions = positionsBuilder.build(); var valuesBlock = valuesBuilder.build()) { + resultBuilder.addInputPage(positions, new Page(valuesBlock)); + } + } + } + try (LongBlock actualOutput = (LongBlock) resultBuilder.build()) { + assertThat(actualOutput.getPositionCount(), equalTo(maxPosition + 1)); + for (int i = 0; i < actualOutput.getPositionCount(); i++) { + List values = expectedValues.get(i); + if (actualOutput.isNull(i)) { + assertNull(values); + } else { + int valueCount = actualOutput.getValueCount(i); + int first = actualOutput.getFirstValueIndex(i); + assertThat(valueCount, equalTo(values.size())); + for (int v = 0; v < valueCount; v++) { + assertThat(actualOutput.getLong(first + v), equalTo(values.get(v))); + } + } + } + } + resultBuilder.close(); + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } + + BlockFactory blockFactory() { + var bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofMb(100)).withCircuitBreaking(); + CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); + return new BlockFactory(breaker, bigArrays); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperatorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperatorTests.java index 80d127fc81907..09bc36a5390af 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperatorTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperatorTests.java @@ -32,7 +32,6 @@ public void testSimple() throws Exception { CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); BlockFactory blockFactory = new BlockFactory(breaker, bigArrays); MergePositionsOperator mergeOperator = new MergePositionsOperator( - randomBoolean(), 7, 0, new int[] { 1, 2 }, From 75228dfd4517a28ef7b72461117f440e440ff37c Mon Sep 17 00:00:00 2001 From: Parker Timmins Date: Tue, 9 Apr 2024 11:11:49 -0600 Subject: [PATCH 112/173] Add granular error list to alias action response (#106514) When an alias action list is posted with must_exist==false, and succeeds only partially, a list of results for each action are now returned. The results contain information about the requested action, indices, and aliases. If must_exist==true, or all actions fail, the call will return a 400 status along with the associated exception. --- docs/changelog/106514.yaml | 6 + docs/reference/alias.asciidoc | 71 +++++ docs/reference/indices/aliases.asciidoc | 58 ++++- .../data_stream/140_data_stream_aliases.yml | 83 ++++++ .../indices.update_aliases/40_must_exist.yml | 97 +++++++ .../org/elasticsearch/TransportVersions.java | 1 + ...dicesAliasesClusterStateUpdateRequest.java | 10 +- .../indices/alias/IndicesAliasesRequest.java | 5 +- .../alias/IndicesAliasesRequestBuilder.java | 3 +- .../indices/alias/IndicesAliasesResponse.java | 245 ++++++++++++++++++ .../alias/TransportIndicesAliasesAction.java | 32 ++- .../client/internal/IndicesAdminClient.java | 5 +- .../internal/support/AbstractClient.java | 5 +- .../cluster/metadata/AliasAction.java | 4 +- .../metadata/MetadataIndexAliasesService.java | 15 +- .../alias/IndicesAliasesResponseTests.java | 108 ++++++++ .../MetadataIndexAliasesServiceTests.java | 16 +- .../core/ml/annotations/AnnotationIndex.java | 6 +- .../xpack/core/ml/utils/MlIndexAndAlias.java | 3 +- .../core/ilm/ShrinkSetAliasStepTests.java | 8 +- .../core/ml/utils/MlIndexAndAliasTests.java | 11 +- .../search/SearchApplicationIndexService.java | 11 +- .../xpack/ml/MlInitializationService.java | 5 +- .../ml/job/persistence/JobDataDeleter.java | 9 +- .../job/persistence/JobResultsProvider.java | 3 +- .../TransformClusterStateListener.java | 4 +- 26 files changed, 766 insertions(+), 58 deletions(-) create mode 100644 docs/changelog/106514.yaml create mode 100644 server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesResponse.java create mode 100644 server/src/test/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesResponseTests.java diff --git a/docs/changelog/106514.yaml b/docs/changelog/106514.yaml new file mode 100644 index 0000000000000..5b25f40db2742 --- /dev/null +++ b/docs/changelog/106514.yaml @@ -0,0 +1,6 @@ +pr: 106514 +summary: Add granular error list to alias action response +area: Indices APIs +type: feature +issues: + - 94478 diff --git a/docs/reference/alias.asciidoc b/docs/reference/alias.asciidoc index 6ddd3602e1467..5b30501ed7c9d 100644 --- a/docs/reference/alias.asciidoc +++ b/docs/reference/alias.asciidoc @@ -121,6 +121,77 @@ POST _aliases // TEST[s/^/PUT _data_stream\/logs-nginx.access-prod\nPUT _data_stream\/logs-my_app-default\n/] // end::alias-multiple-actions-example[] +[discrete] +[[multiple-action-results]] +=== Multiple action results + +When using multiple actions, if some succeed and some fail, a list of per-action results will be returned. + +Consider a similar action list to the previous example, but now with an alias `log-non-existing`, which does not yet exist. +In this case, the `remove` action will fail, but the `add` action will succeed. +The response will contain the list `action_results`, with a result for every requested action. + +[source,console] +---- +POST _aliases +{ + "actions": [ + { + "remove": { + "index": "index1", + "alias": "logs-non-existing" + } + }, + { + "add": { + "index": "index2", + "alias": "logs-non-existing" + } + } + ] +} +---- +// TEST[s/^/PUT \/index1\nPUT \/index2\n/] + +The API returns the following result: + +[source,console-result] +-------------------------------------------------- +{ + "acknowledged": true, + "errors": true, + "action_results": [ + { + "action": { + "type": "remove", + "indices": [ "index1" ], + "aliases": [ "logs-non-existing" ], + }, + "status": 404, + "error": { + "type": "aliases_not_found_exception", + "reason": "aliases [logs-non-existing] missing", + "resource.type": "aliases", + "resource.id": "logs-non-existing" + } + }, + { + "action": { + "type": "add", + "indices": [ "index2" ], + "aliases": [ "logs-non-existing" ], + }, + "status": 200 + } + ] +} +-------------------------------------------------- + +Allowing the action list to succeed partially may not provide the desired result. +It may be more appropriate to set `must_exist` to `true`, which will cause the entire action +list to fail if a single action fails. + + [discrete] [[add-alias-at-creation]] === Add an alias at index creation diff --git a/docs/reference/indices/aliases.asciidoc b/docs/reference/indices/aliases.asciidoc index 76698501fd416..34248cc5f98d3 100644 --- a/docs/reference/indices/aliases.asciidoc +++ b/docs/reference/indices/aliases.asciidoc @@ -145,10 +145,16 @@ the alias points to one data stream. + Only the `add` action supports this parameter. +// tag::alias-options[] `must_exist`:: (Optional, Boolean) -If `true`, the alias must exist to perform the action. Defaults to `false`. Only -the `remove` action supports this parameter. +Affects the behavior when attempting to remove an alias which does not exist. +If `true`, removing an alias which does not exist will cause all actions to fail. +If `false`, removing an alias which does not exist will only cause that removal to fail. +Defaults to `false`. +// end::alias-options[] ++ +Only the `remove` action supports this parameter. // tag::alias-options[] `routing`:: @@ -168,3 +174,51 @@ stream aliases don't support this parameter. Only the `add` action supports this parameter. ===== ==== + + + +[role="child_attributes"] +[[indices-aliases-api-response-body]] +==== {api-response-body-title} + +`acknowledged`:: +(Boolean) +If `true`, the request received a response from the master node within the +`timeout` period. + +`errors`:: +(Boolean) +If `true`, at least one of the requested actions failed. + +`action_results`:: +(Optional, array of objects) Results for each requested action. ++ +.Properties of `action_results` objects +[%collapsible%open] +==== + +`action`:: +(object) +Description of the associated action request. ++ +.Properties of `action` object +[%collapsible%open] +===== +`type`:: +(string) The type of the associated action, one of `add`, `remove`, or `remove_index`. + +`indices`:: +(array of strings) List of indices in the associated action. + +`aliases`:: +(array of strings) List of aliases in the associated action. +===== + +`status`:: +(integer) HTTP status code returned for the action. + +`error`:: +(Optional, object) Contains additional information about the failed action. ++ +Only present if the action failed. +==== diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/140_data_stream_aliases.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/140_data_stream_aliases.yml index 70c563d1d4510..1050d6e01a95f 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/140_data_stream_aliases.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/140_data_stream_aliases.yml @@ -307,3 +307,86 @@ indices.get_alias: name: this-does-not-exist* - is_false: ds-first.aliases.my-alias +--- +"Action Results with multiple matching aliases": + - skip: + version: " - 8.13.99" + reason: "alias action results do not work until 8.14" + features: allowed_warnings + - do: + allowed_warnings: + - "index template [my-template] has index patterns [log-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-template] will take precedence during new index creation" + indices.put_index_template: + name: my-template + body: + index_patterns: [ log-* ] + template: + settings: + index.number_of_replicas: 0 + data_stream: { } + - do: + indices.create_data_stream: + name: log-foobar + - is_true: acknowledged + - do: + indices.update_aliases: + body: + actions: + - add: + index: log-foobar + aliases: test_alias1 + - remove: + index: log-foobar + aliases: test_non_existing + must_exist: false + - is_true: errors + - length: { action_results: 2 } + - match: { action_results.0.status: 200 } + - match: { action_results.0.action: { 'type': 'add', 'indices': ['log-foobar'], 'aliases': ['test_alias1'] } } + - match: { action_results.0.error: null } + - match: { action_results.1.status: 404 } + - match: { action_results.1.action: { 'type': 'remove', 'indices': ['log-foobar'], 'aliases': ['test_non_existing'] } } + - match: { action_results.1.error.type: aliases_not_found_exception } +--- +"Single action result per action": + - skip: + version: " - 8.13.99" + reason: "alias action results do not work until 8.14" + features: allowed_warnings + - do: + allowed_warnings: + - "index template [my-template] has index patterns [log-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-template] will take precedence during new index creation" + indices.put_index_template: + name: my-template + body: + index_patterns: [ log-* ] + template: + settings: + index.number_of_replicas: 0 + data_stream: { } + - do: + indices.create_data_stream: + name: log-test-1 + - do: + indices.create_data_stream: + name: log-test-2 + - is_true: acknowledged + - do: + indices.update_aliases: + body: + actions: + - add: + index: log-test-* + aliases: test_alias1 + - remove: + index: log-test-* + aliases: test_non_existing + must_exist: false + - is_true: errors + - length: { action_results: 2 } + - match: { action_results.0.status: 200} + - match: { action_results.0.action: { 'type': 'add', 'indices': ['log-test-1', 'log-test-2'], 'aliases': ['test_alias1'] } } + - match: { action_results.0.error: null } + - match: { action_results.1.status: 404 } + - match: { action_results.1.action: { 'type': 'remove', 'indices': ['log-test-1', 'log-test-2'], 'aliases': ['test_non_existing'] } } + - match: { action_results.1.error.type: aliases_not_found_exception } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.update_aliases/40_must_exist.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.update_aliases/40_must_exist.yml index dbe167608e576..fa3c740612872 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.update_aliases/40_must_exist.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.update_aliases/40_must_exist.yml @@ -82,3 +82,100 @@ - remove_index: index: test_index must_exist: true +--- +"Partial success with must_exist == false": + - skip: + version: " - 8.13.99" + reason: "alias action results do not work until 8.14" + - do: + indices.create: + index: test_index + - do: + indices.update_aliases: + body: + actions: + - add: + index: test_index + aliases: test_alias1 + - remove: + index: test_index + aliases: test_non_existing + must_exist: false + - is_true: errors + - match: { action_results.0.status: 200 } + - match: { action_results.0.action: { 'type': 'add', 'indices': ['test_index'], 'aliases': ['test_alias1'] } } + - match: { action_results.0.error: null } + - match: { action_results.1.status: 404 } + - match: { action_results.1.action: { 'type': 'remove', 'indices': ['test_index'], 'aliases': ['test_non_existing'] } } + - match: { action_results.1.error.type: aliases_not_found_exception } +--- +"Partial success with must_exist == null (default)": + - skip: + version: " - 8.13.99" + reason: "alias action results do not work until 8.14" + - do: + indices.create: + index: test_index + - do: + indices.update_aliases: + body: + actions: + - add: + index: test_index + aliases: test_alias1 + - remove: + index: test_index + aliases: test_non_existing + - is_true: errors + - match: { action_results.0.status: 200} + - match: { action_results.0.action: { 'type': 'add', 'indices': ['test_index'], 'aliases': ['test_alias1'] } } + - match: { action_results.0.error: null } + - match: { action_results.1.status: 404} + - match: { action_results.1.action: { 'type': 'remove', 'indices': ['test_index'], 'aliases': ['test_non_existing'] } } + - match: { action_results.1.error.type: aliases_not_found_exception } +--- +"No action_results field if all actions successful": + - skip: + version: " - 8.13.99" + reason: "alias action results do not work until 8.14" + - do: + indices.create: + index: test_index + - do: + indices.update_aliases: + body: + actions: + - add: + index: test_index + aliases: test_alias1 + - is_false: errors + - match: { action_results: null } +--- +"Single result per input action": + - skip: + version: " - 8.13.99" + reason: "alias action results do not work until 8.14" + - do: + indices.create: + index: test_index1 + - do: + indices.create: + index: test_index2 + - do: + indices.update_aliases: + body: + actions: + - add: + index: test_index* + aliases: test_alias1 + - remove: + index: test_index* + aliases: test_non_existing + - length: { action_results: 2 } + - is_true: errors + - match: { action_results.0.status: 200} + - match: { action_results.0.action: { 'type': 'add', 'indices': ['test_index1', 'test_index2'], 'aliases': ['test_alias1'] } } + - match: { action_results.0.error: null } + - match: { action_results.1.status: 404} + - match: { action_results.1.action: { 'type': 'remove', 'indices': ['test_index1', 'test_index2'], 'aliases': ['test_non_existing'] } } + - match: { action_results.1.error.type: aliases_not_found_exception } diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 4a1bf691ea1b0..e05487c9c88fe 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -164,6 +164,7 @@ static TransportVersion def(int id) { public static final TransportVersion ESQL_ORDINAL_BLOCK = def(8_623_00_0); public static final TransportVersion ML_INFERENCE_COHERE_RERANK = def(8_624_00_0); public static final TransportVersion INDEXING_PRESSURE_DOCUMENT_REJECTIONS_COUNT = def(8_625_00_0); + public static final TransportVersion ALIAS_ACTION_RESULTS = def(8_626_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesClusterStateUpdateRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesClusterStateUpdateRequest.java index b52098a49c002..1f87cf618dfcf 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesClusterStateUpdateRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesClusterStateUpdateRequest.java @@ -7,6 +7,7 @@ */ package org.elasticsearch.action.admin.indices.alias; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse.AliasActionResult; import org.elasticsearch.cluster.ack.ClusterStateUpdateRequest; import org.elasticsearch.cluster.metadata.AliasAction; @@ -18,8 +19,11 @@ public class IndicesAliasesClusterStateUpdateRequest extends ClusterStateUpdateRequest { private final List actions; - public IndicesAliasesClusterStateUpdateRequest(List actions) { + private final List actionResults; + + public IndicesAliasesClusterStateUpdateRequest(List actions, List actionResults) { this.actions = actions; + this.actionResults = actionResults; } /** @@ -28,4 +32,8 @@ public IndicesAliasesClusterStateUpdateRequest(List actions) { public List actions() { return actions; } + + public List getActionResults() { + return actionResults; + } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java index a4f5ee9eb672b..fac2006b68814 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java @@ -83,7 +83,6 @@ public static class AliasActions implements AliasesRequest, Writeable, ToXConten private static final ParseField IS_WRITE_INDEX = new ParseField("is_write_index"); private static final ParseField IS_HIDDEN = new ParseField("is_hidden"); private static final ParseField MUST_EXIST = new ParseField("must_exist"); - private static final ParseField ADD = new ParseField("add"); private static final ParseField REMOVE = new ParseField("remove"); private static final ParseField REMOVE_INDEX = new ParseField("remove_index"); @@ -105,6 +104,10 @@ public byte value() { return value; } + public String getFieldName() { + return fieldName; + } + public static Type fromValue(byte value) { return switch (value) { case 0 -> ADD; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequestBuilder.java index 4e49a5fe8d400..1462e36ea7895 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequestBuilder.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; import org.elasticsearch.action.support.master.AcknowledgedRequestBuilder; -import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.ElasticsearchClient; import org.elasticsearch.index.query.QueryBuilder; @@ -21,7 +20,7 @@ */ public class IndicesAliasesRequestBuilder extends AcknowledgedRequestBuilder< IndicesAliasesRequest, - AcknowledgedResponse, + IndicesAliasesResponse, IndicesAliasesRequestBuilder> { public IndicesAliasesRequestBuilder(ElasticsearchClient client) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesResponse.java new file mode 100644 index 0000000000000..b4f483e6f8161 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesResponse.java @@ -0,0 +1,245 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.admin.indices.alias; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.rest.action.admin.indices.AliasesNotFoundException; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; +import java.util.stream.Collectors; + +/** + * Response with error information for a request to add/remove aliases for one or more indices. + * Contains an acknowledged boolean, an errors boolean, and a list of results. + * The result list is only present if there are errors, and contains a result for every input action. + * This response replaces AcknowledgedResponse, and knows how to de/serialize from/to AcknowledgedResponse + * in case of mixed version clusters. + */ +public class IndicesAliasesResponse extends AcknowledgedResponse { + + // Response without any error information, analogous to AcknowledgedResponse.FALSE + public static final IndicesAliasesResponse NOT_ACKNOWLEDGED = new IndicesAliasesResponse(false, false, List.of()); + + // Response without any error information, analogous to AcknowledgedResponse.TRUE + public static final IndicesAliasesResponse ACKNOWLEDGED_NO_ERRORS = new IndicesAliasesResponse(true, false, List.of()); + + private static final String ACTION_RESULTS_FIELD = "action_results"; + private static final String ERRORS_FIELD = "errors"; + + private final List actionResults; + private final boolean errors; + + protected IndicesAliasesResponse(StreamInput in) throws IOException { + super(in); + + if (in.getTransportVersion().onOrAfter(TransportVersions.ALIAS_ACTION_RESULTS)) { + this.errors = in.readBoolean(); + this.actionResults = in.readCollectionAsImmutableList(AliasActionResult::new); + } else { + this.errors = false; + this.actionResults = List.of(); + } + } + + /** + * @param acknowledged whether the update was acknowledged by all the relevant nodes in the cluster + * @param errors true if any of the requested actions failed + * @param actionResults the list of results for each input action, only present if there are errors + */ + IndicesAliasesResponse(boolean acknowledged, boolean errors, final List actionResults) { + super(acknowledged); + this.errors = errors; + this.actionResults = actionResults; + } + + public List getActionResults() { + return actionResults; + } + + public boolean hasErrors() { + return errors; + } + + /** + * Build a response from a list of action results. Sets the errors boolean based + * on whether an of the individual results contain an error. + * @param actionResults an action result for each of the requested alias actions + * @return response containing all action results + */ + public static IndicesAliasesResponse build(final List actionResults) { + assert actionResults.isEmpty() == false : "IndicesAliasesResponse must be instantiated with at least one action result."; + final boolean errors = actionResults.stream().anyMatch(a -> a.error != null); + return new IndicesAliasesResponse(true, errors, actionResults); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + if (out.getTransportVersion().onOrAfter(TransportVersions.ALIAS_ACTION_RESULTS)) { + out.writeBoolean(errors); + out.writeCollection(actionResults); + } + } + + @Override + protected void addCustomFields(XContentBuilder builder, Params params) throws IOException { + builder.field(ERRORS_FIELD, errors); + // if there are no errors, don't provide granular list of results + if (errors) { + builder.field(ACTION_RESULTS_FIELD, actionResults); + } + } + + @Override + // Only used equals in tests + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + if (super.equals(o) == false) return false; + IndicesAliasesResponse response = (IndicesAliasesResponse) o; + return errors == response.errors && Objects.equals(actionResults, response.actionResults); + } + + @Override + // Only used hashCode in tests + public int hashCode() { + return Objects.hash(super.hashCode(), actionResults, errors); + } + + /** + * Result for a single alias add/remove action + */ + public static class AliasActionResult implements Writeable, ToXContentObject { + + /** + * Resolved indices to which the action applies. This duplicates information + * which exists in the action, but is included because the action indices may + * or may not be resolved depending on if the security layer is used or not. + */ + private final List indices; + private final AliasActions action; + private final ElasticsearchException error; + + /** + * Build result that could be either a success or failure + * @param indices the resolved indices to which the associated action applies + * @param action the alias action consisting of add/remove, aliases, and indices + * @param numAliasesRemoved the number of aliases remove, if any + * @return the action result + */ + public static AliasActionResult build(List indices, AliasActions action, int numAliasesRemoved) { + if (action.actionType() == AliasActions.Type.REMOVE && numAliasesRemoved == 0) { + return buildRemoveError(indices, action); + } + return buildSuccess(indices, action); + } + + /** + * Build an error result for a failed remove action. + */ + private static AliasActionResult buildRemoveError(List indices, AliasActions action) { + return new AliasActionResult(indices, action, new AliasesNotFoundException((action.getOriginalAliases()))); + } + + /** + * Build a success action result with no errors. + */ + public static AliasActionResult buildSuccess(List indices, AliasActions action) { + return new AliasActionResult(indices, action, null); + } + + private int getStatus() { + return error == null ? 200 : error.status().getStatus(); + } + + private AliasActionResult(List indices, AliasActions action, ElasticsearchException error) { + assert indices.isEmpty() == false : "Alias action result must be instantiated with at least one index"; + this.indices = indices; + this.action = action; + this.error = error; + } + + private AliasActionResult(StreamInput in) throws IOException { + this.indices = in.readStringCollectionAsList(); + this.action = new AliasActions(in); + this.error = in.readException(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeStringCollection(indices); + action.writeTo(out); + out.writeException(error); + } + + public static final String ACTION_FIELD = "action"; + public static final String ACTION_TYPE_FIELD = "type"; + public static final String ACTION_INDICES_FIELD = "indices"; + public static final String ACTION_ALIASES_FIELD = "aliases"; + public static final String STATUS_FIELD = "status"; + public static final String ERROR_FIELD = "error"; + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + // include subset of fields from action request + builder.field(ACTION_FIELD); + builder.startObject(); + builder.field(ACTION_TYPE_FIELD, action.actionType().getFieldName()); + builder.field(ACTION_INDICES_FIELD, indices.stream().sorted().collect(Collectors.toList())); + builder.array(ACTION_ALIASES_FIELD, action.getOriginalAliases()); + builder.endObject(); + + builder.field(STATUS_FIELD, getStatus()); + + if (error != null) { + builder.startObject(ERROR_FIELD); + error.toXContent(builder, params); + builder.endObject(); + } + builder.endObject(); + return builder; + } + + @Override + // Only used equals in tests + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AliasActionResult that = (AliasActionResult) o; + return Objects.equals(indices, that.indices) && Objects.equals(action, that.action) + // ElasticsearchException does not have hashCode() so assume errors are equal iff class and message are equal + && Objects.equals(error == null ? null : error.getMessage(), that.error == null ? null : that.error.getMessage()) + && Objects.equals(error == null ? null : error.getClass(), that.error == null ? null : that.error.getClass()); + } + + @Override + // Only used hashCode in tests + public int hashCode() { + return Objects.hash( + indices, + action, + // ElasticsearchException does not have hashCode() so assume errors are equal iff class and message are equal + error == null ? null : error.getMessage(), + error == null ? null : error.getClass() + ); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java index e56be8852e7df..2e231b398af72 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java @@ -14,9 +14,9 @@ import org.elasticsearch.action.ActionType; import org.elasticsearch.action.RequestValidators; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse.AliasActionResult; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.action.support.master.AcknowledgedTransportMasterNodeAction; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; @@ -56,10 +56,10 @@ /** * Add/remove aliases action */ -public class TransportIndicesAliasesAction extends AcknowledgedTransportMasterNodeAction { +public class TransportIndicesAliasesAction extends TransportMasterNodeAction { public static final String NAME = "indices:admin/aliases"; - public static final ActionType TYPE = new ActionType<>(NAME); + public static final ActionType TYPE = new ActionType<>(NAME); private static final Logger logger = LogManager.getLogger(TransportIndicesAliasesAction.class); private final MetadataIndexAliasesService indexAliasesService; @@ -85,6 +85,7 @@ public TransportIndicesAliasesAction( actionFilters, IndicesAliasesRequest::new, indexNameExpressionResolver, + IndicesAliasesResponse::new, EsExecutors.DIRECT_EXECUTOR_SERVICE ); this.indexAliasesService = indexAliasesService; @@ -106,15 +107,19 @@ protected void masterOperation( Task task, final IndicesAliasesRequest request, final ClusterState state, - final ActionListener listener + final ActionListener listener ) { // Expand the indices names List actions = request.aliasActions(); List finalActions = new ArrayList<>(); + List actionResults = new ArrayList<>(); // Resolve all the AliasActions into AliasAction instances and gather all the aliases Set aliases = new HashSet<>(); for (AliasActions action : actions) { + int numAliasesRemoved = 0; + List resolvedIndices = new ArrayList<>(); + List concreteDataStreams = indexNameExpressionResolver.dataStreamNames( state, request.indicesOptions(), @@ -161,18 +166,24 @@ protected void masterOperation( finalActions.add(new AddDataStreamAlias(alias, dataStreamName, action.writeIndex(), action.filter())); } } + + actionResults.add(AliasActionResult.buildSuccess(concreteDataStreams, action)); continue; } case REMOVE -> { for (String dataStreamName : concreteDataStreams) { for (String alias : concreteDataStreamAliases(action, state.metadata(), dataStreamName)) { finalActions.add(new AliasAction.RemoveDataStreamAlias(alias, dataStreamName, action.mustExist())); + numAliasesRemoved++; } } + if (nonBackingIndices.isEmpty() == false) { // Regular aliases/indices match as well with the provided expression. // (Only when adding new aliases, matching both data streams and indices is disallowed) + resolvedIndices.addAll(concreteDataStreams); } else { + actionResults.add(AliasActionResult.build(concreteDataStreams, action, numAliasesRemoved)); continue; } } @@ -224,6 +235,7 @@ protected void masterOperation( case REMOVE: for (String alias : concreteAliases(action, state.metadata(), index.getName())) { finalActions.add(new AliasAction.Remove(index.getName(), alias, action.mustExist())); + numAliasesRemoved++; } break; case REMOVE_INDEX: @@ -233,14 +245,18 @@ protected void masterOperation( throw new IllegalArgumentException("Unsupported action [" + action.actionType() + "]"); } } + + Arrays.stream(concreteIndices).map(Index::getName).forEach(resolvedIndices::add); + actionResults.add(AliasActionResult.build(resolvedIndices, action, numAliasesRemoved)); } if (finalActions.isEmpty() && false == actions.isEmpty()) { throw new AliasesNotFoundException(aliases.toArray(new String[aliases.size()])); } request.aliasActions().clear(); - IndicesAliasesClusterStateUpdateRequest updateRequest = new IndicesAliasesClusterStateUpdateRequest(unmodifiableList(finalActions)) - .ackTimeout(request.timeout()) - .masterNodeTimeout(request.masterNodeTimeout()); + IndicesAliasesClusterStateUpdateRequest updateRequest = new IndicesAliasesClusterStateUpdateRequest( + unmodifiableList(finalActions), + unmodifiableList(actionResults) + ).ackTimeout(request.timeout()).masterNodeTimeout(request.masterNodeTimeout()); indexAliasesService.indicesAliases(updateRequest, listener.delegateResponse((l, e) -> { logger.debug("failed to perform aliases", e); diff --git a/server/src/main/java/org/elasticsearch/client/internal/IndicesAdminClient.java b/server/src/main/java/org/elasticsearch/client/internal/IndicesAdminClient.java index 69b897df4d76d..d38f5b0439f84 100644 --- a/server/src/main/java/org/elasticsearch/client/internal/IndicesAdminClient.java +++ b/server/src/main/java/org/elasticsearch/client/internal/IndicesAdminClient.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequestBuilder; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse; @@ -371,7 +372,7 @@ public interface IndicesAdminClient extends ElasticsearchClient { * @param request The index aliases request * @return The result future */ - ActionFuture aliases(IndicesAliasesRequest request); + ActionFuture aliases(IndicesAliasesRequest request); /** * Allows to add/remove aliases from indices. @@ -379,7 +380,7 @@ public interface IndicesAdminClient extends ElasticsearchClient { * @param request The index aliases request * @param listener A listener to be notified with a result */ - void aliases(IndicesAliasesRequest request, ActionListener listener); + void aliases(IndicesAliasesRequest request, ActionListener listener); /** * Allows to add/remove aliases from indices. diff --git a/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java b/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java index 76073696b0b27..26a8768a78e78 100644 --- a/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java +++ b/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java @@ -118,6 +118,7 @@ import org.elasticsearch.action.admin.cluster.storedscripts.TransportPutStoredScriptAction; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.admin.indices.alias.TransportIndicesAliasesAction; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesAction; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; @@ -1083,12 +1084,12 @@ public ThreadPool threadPool() { } @Override - public ActionFuture aliases(final IndicesAliasesRequest request) { + public ActionFuture aliases(final IndicesAliasesRequest request) { return execute(TransportIndicesAliasesAction.TYPE, request); } @Override - public void aliases(final IndicesAliasesRequest request, final ActionListener listener) { + public void aliases(final IndicesAliasesRequest request, final ActionListener listener) { execute(TransportIndicesAliasesAction.TYPE, request, listener); } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/AliasAction.java b/server/src/main/java/org/elasticsearch/cluster/metadata/AliasAction.java index 63647e53619fe..533ae3a3ad50d 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/AliasAction.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/AliasAction.java @@ -8,10 +8,10 @@ package org.elasticsearch.cluster.metadata; -import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.core.Nullable; +import org.elasticsearch.rest.action.admin.indices.AliasesNotFoundException; /** * Individual operation to perform on the cluster state as part of an {@link IndicesAliasesRequest}. @@ -189,7 +189,7 @@ boolean removeIndex() { boolean apply(NewAliasValidator aliasValidator, Metadata.Builder metadata, IndexMetadata index) { if (false == index.getAliases().containsKey(alias)) { if (mustExist != null && mustExist) { - throw new ResourceNotFoundException("required alias [" + alias + "] does not exist"); + throw new AliasesNotFoundException(alias); } return false; } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexAliasesService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexAliasesService.java index fb5acbdd2ac49..d9cd1a7725ca8 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexAliasesService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexAliasesService.java @@ -11,7 +11,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesClusterStateUpdateRequest; -import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateAckListener; import org.elasticsearch.cluster.ClusterStateTaskExecutor; @@ -79,7 +79,10 @@ public Tuple executeTask(ApplyAliasesTask this.taskQueue = clusterService.createTaskQueue("index-aliases", Priority.URGENT, this.executor); } - public void indicesAliases(final IndicesAliasesClusterStateUpdateRequest request, final ActionListener listener) { + public void indicesAliases( + final IndicesAliasesClusterStateUpdateRequest request, + final ActionListener listener + ) { taskQueue.submitTask("index-aliases", new ApplyAliasesTask(request, listener), null); // TODO use request.masterNodeTimeout() here? } @@ -254,7 +257,7 @@ private static void validateAliasTargetIsNotDSBackingIndex(ClusterState currentS /** * A cluster state update task that consists of the cluster state request and the listeners that need to be notified upon completion. */ - record ApplyAliasesTask(IndicesAliasesClusterStateUpdateRequest request, ActionListener listener) + record ApplyAliasesTask(IndicesAliasesClusterStateUpdateRequest request, ActionListener listener) implements ClusterStateTaskListener, ClusterStateAckListener { @@ -271,17 +274,17 @@ public boolean mustAck(DiscoveryNode discoveryNode) { @Override public void onAllNodesAcked() { - listener.onResponse(AcknowledgedResponse.TRUE); + listener.onResponse(IndicesAliasesResponse.build(request.getActionResults())); } @Override public void onAckFailure(Exception e) { - listener.onResponse(AcknowledgedResponse.FALSE); + listener.onResponse(IndicesAliasesResponse.NOT_ACKNOWLEDGED); } @Override public void onAckTimeout() { - listener.onResponse(AcknowledgedResponse.FALSE); + listener.onResponse(IndicesAliasesResponse.NOT_ACKNOWLEDGED); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesResponseTests.java new file mode 100644 index 0000000000000..75a1bf8732a4f --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesResponseTests.java @@ -0,0 +1,108 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.admin.indices.alias; + +import org.elasticsearch.TransportVersions; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.index.alias.RandomAliasActionsGenerator; +import org.elasticsearch.test.AbstractWireSerializingTestCase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +public class IndicesAliasesResponseTests extends AbstractWireSerializingTestCase { + public void testMixedModeSerialization() throws IOException { + + // AcknowledgedResponse to IndicesAliasesResponse + // in version before TransportVersions.ALIAS_ACTION_RESULTS + { + var ack = AcknowledgedResponse.of(randomBoolean()); + try (BytesStreamOutput output = new BytesStreamOutput()) { + ack.writeTo(output); + try (StreamInput in = output.bytes().streamInput()) { + in.setTransportVersion(TransportVersions.V_8_12_0); + + var indicesAliasesResponse = new IndicesAliasesResponse(in); + + assertEquals(ack.isAcknowledged(), indicesAliasesResponse.isAcknowledged()); + assertTrue(indicesAliasesResponse.getActionResults().isEmpty()); + assertFalse(indicesAliasesResponse.hasErrors()); + } + } + } + + // IndicesAliasesResponse to AcknowledgedResponse + // out version before TransportVersions.ALIAS_ACTION_RESULTS + { + var indicesAliasesResponse = randomIndicesAliasesResponse(); + try (BytesStreamOutput output = new BytesStreamOutput()) { + output.setTransportVersion(TransportVersions.V_8_12_0); + + indicesAliasesResponse.writeTo(output); + try (StreamInput in = output.bytes().streamInput()) { + var ack = AcknowledgedResponse.readFrom(in); + assertEquals(ack.isAcknowledged(), indicesAliasesResponse.isAcknowledged()); + } + } + } + } + + @Override + protected Writeable.Reader instanceReader() { + return IndicesAliasesResponse::new; + } + + @Override + protected IndicesAliasesResponse createTestInstance() { + return randomIndicesAliasesResponse(); + } + + private static IndicesAliasesResponse randomIndicesAliasesResponse() { + int numActions = between(0, 5); + List results = new ArrayList<>(); + for (int i = 0; i < numActions; ++i) { + results.add(randomIndicesAliasesResult()); + } + return new IndicesAliasesResponse(randomBoolean(), randomBoolean(), results); + } + + @Override + protected IndicesAliasesResponse mutateInstance(IndicesAliasesResponse instance) throws IOException { + switch (between(0, 2)) { + case 0: { + boolean acknowledged = instance.isAcknowledged() == false; + return new IndicesAliasesResponse(acknowledged, instance.hasErrors(), instance.getActionResults()); + } + case 1: { + boolean errors = instance.hasErrors() == false; + return new IndicesAliasesResponse(instance.isAcknowledged(), errors, instance.getActionResults()); + } + default: { + var results = new ArrayList<>(instance.getActionResults()); + if (results.isEmpty()) { + results.add(randomIndicesAliasesResult()); + } else { + results.remove(between(0, results.size() - 1)); + } + return new IndicesAliasesResponse(instance.isAcknowledged(), instance.hasErrors(), results); + } + } + } + + private static IndicesAliasesResponse.AliasActionResult randomIndicesAliasesResult() { + var action = RandomAliasActionsGenerator.randomAliasAction(); + var indices = Arrays.asList(generateRandomStringArray(10, 5, false, false)); + return IndicesAliasesResponse.AliasActionResult.build(indices, action, randomIntBetween(0, 3)); + } +} diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexAliasesServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexAliasesServiceTests.java index 0901b1190cfc0..3f63875bfc216 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexAliasesServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexAliasesServiceTests.java @@ -8,8 +8,9 @@ package org.elasticsearch.cluster.metadata; -import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesClusterStateUpdateRequest; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse.AliasActionResult; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.service.ClusterService; @@ -19,6 +20,7 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.rest.action.admin.indices.AliasesNotFoundException; import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.index.IndexVersionUtils; @@ -156,11 +158,11 @@ public void testMustExist() { // Show that removing non-existing alias with mustExist == true fails final ClusterState finalCS = after; - final ResourceNotFoundException iae = expectThrows( - ResourceNotFoundException.class, + final AliasesNotFoundException iae = expectThrows( + AliasesNotFoundException.class, () -> service.applyAliasActions(finalCS, singletonList(new AliasAction.Remove(index, "test_2", true))) ); - assertThat(iae.getMessage(), containsString("required alias [test_2] does not exist")); + assertThat(iae.getMessage(), containsString("aliases [test_2] missing")); } public void testMultipleIndices() { @@ -690,10 +692,12 @@ public void testAddAndRemoveAliasClusterStateUpdate() throws Exception { String index = randomAlphaOfLength(5); ClusterState before = createIndex(ClusterState.builder(ClusterName.DEFAULT).build(), index); IndicesAliasesClusterStateUpdateRequest addAliasRequest = new IndicesAliasesClusterStateUpdateRequest( - List.of(new AliasAction.Add(index, "test", null, null, null, null, null)) + List.of(new AliasAction.Add(index, "test", null, null, null, null, null)), + List.of(AliasActionResult.buildSuccess(List.of(index), AliasActions.add().aliases("test").indices(index))) ); IndicesAliasesClusterStateUpdateRequest removeAliasRequest = new IndicesAliasesClusterStateUpdateRequest( - List.of(new AliasAction.Remove(index, "test", true)) + List.of(new AliasAction.Remove(index, "test", true)), + List.of(AliasActionResult.buildSuccess(List.of(index), AliasActions.remove().aliases("test").indices(index))) ); ClusterState after = ClusterStateTaskExecutorUtils.executeAndAssertSuccessful( diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/annotations/AnnotationIndex.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/annotations/AnnotationIndex.java index d3a20235e3a38..07be597c7024e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/annotations/AnnotationIndex.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/annotations/AnnotationIndex.java @@ -14,9 +14,9 @@ import org.elasticsearch.action.admin.cluster.health.TransportClusterHealthAction; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; -import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexAbstraction; @@ -130,7 +130,9 @@ public static void createAnnotationsIndexIfNecessary( client.threadPool().getThreadContext(), ML_ORIGIN, requestBuilder.request(), - finalDelegate.delegateFailureAndWrap((l, r) -> checkMappingsListener.onResponse(r.isAcknowledged())), + finalDelegate.delegateFailureAndWrap( + (l, r) -> checkMappingsListener.onResponse(r.isAcknowledged()) + ), client.admin().indices()::aliases ); }); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAlias.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAlias.java index 016540815fb0a..d4ec7563b868b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAlias.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAlias.java @@ -16,6 +16,7 @@ import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; @@ -295,7 +296,7 @@ private static void updateWriteAlias( client.threadPool().getThreadContext(), ML_ORIGIN, request, - listener.delegateFailureAndWrap((l, resp) -> l.onResponse(resp.isAcknowledged())), + listener.delegateFailureAndWrap((l, resp) -> l.onResponse(resp.isAcknowledged())), client.admin().indices()::aliases ); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkSetAliasStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkSetAliasStepTests.java index 15e1539570e28..d12cd17d957d4 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkSetAliasStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkSetAliasStepTests.java @@ -9,8 +9,8 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.cluster.metadata.AliasMetadata; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.index.IndexVersion; @@ -90,8 +90,8 @@ public void testPerformAction() throws Exception { IndicesAliasesRequest request = (IndicesAliasesRequest) invocation.getArguments()[0]; assertThat(request.getAliasActions(), equalTo(expectedAliasActions)); @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocation.getArguments()[1]; - listener.onResponse(AcknowledgedResponse.TRUE); + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + listener.onResponse(IndicesAliasesResponse.ACKNOWLEDGED_NO_ERRORS); return null; }).when(indicesClient).aliases(Mockito.any(), Mockito.any()); @@ -113,7 +113,7 @@ public void testPerformActionFailure() { Mockito.doAnswer((Answer) invocation -> { @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocation.getArguments()[1]; + ActionListener listener = (ActionListener) invocation.getArguments()[1]; listener.onFailure(exception); return null; }).when(indicesClient).aliases(Mockito.any(), Mockito.any()); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java index e7dcc6b441a31..f9fdc0c8362e5 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java @@ -13,11 +13,11 @@ import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.template.put.TransportPutComposableIndexTemplateAction; -import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.client.internal.AdminClient; import org.elasticsearch.client.internal.Client; @@ -97,8 +97,8 @@ public void setUpMocks() { ); doAnswer(withResponse(new CreateIndexResponse(true, true, FIRST_CONCRETE_INDEX))).when(indicesAdminClient).create(any(), any()); when(indicesAdminClient.prepareAliases()).thenReturn(new IndicesAliasesRequestBuilder(client)); - doAnswer(withResponse(AcknowledgedResponse.TRUE)).when(indicesAdminClient).aliases(any(), any()); - doAnswer(withResponse(AcknowledgedResponse.TRUE)).when(indicesAdminClient).putTemplate(any(), any()); + doAnswer(withResponse(IndicesAliasesResponse.ACKNOWLEDGED_NO_ERRORS)).when(indicesAdminClient).aliases(any(), any()); + doAnswer(withResponse(IndicesAliasesResponse.ACKNOWLEDGED_NO_ERRORS)).when(indicesAdminClient).putTemplate(any(), any()); clusterAdminClient = mock(ClusterAdminClient.class); doAnswer(invocationOnMock -> { @@ -116,8 +116,9 @@ public void setUpMocks() { when(client.threadPool()).thenReturn(threadPool); when(client.admin()).thenReturn(adminClient); doAnswer(invocationOnMock -> { - ActionListener actionListener = (ActionListener) invocationOnMock.getArguments()[2]; - actionListener.onResponse(AcknowledgedResponse.TRUE); + ActionListener actionListener = (ActionListener) invocationOnMock + .getArguments()[2]; + actionListener.onResponse(IndicesAliasesResponse.ACKNOWLEDGED_NO_ERRORS); return null; }).when(client) .execute( diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexService.java index 61e425d4b05dd..0ccef9acba088 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexService.java @@ -18,6 +18,7 @@ import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse; import org.elasticsearch.action.delete.DeleteRequest; @@ -223,7 +224,7 @@ private static String getSearchAliasName(SearchApplication app) { public void putSearchApplication(SearchApplication app, boolean create, ActionListener listener) { createOrUpdateAlias(app, new ActionListener<>() { @Override - public void onResponse(AcknowledgedResponse acknowledgedResponse) { + public void onResponse(IndicesAliasesResponse response) { updateSearchApplication(app, create, listener); } @@ -240,7 +241,7 @@ public void onFailure(Exception e) { }); } - private void createOrUpdateAlias(SearchApplication app, ActionListener listener) { + private void createOrUpdateAlias(SearchApplication app, ActionListener listener) { final Metadata metadata = clusterService.state().metadata(); final String searchAliasName = getSearchAliasName(app); @@ -332,14 +333,14 @@ private void removeAlias(String searchAliasName, ActionListener() { @Override - public void onResponse(AcknowledgedResponse acknowledgedResponse) { - listener.onResponse(AcknowledgedResponse.TRUE); + public void onResponse(IndicesAliasesResponse response) { + listener.onResponse(response); } @Override public void onFailure(Exception e) { if (e instanceof ResourceNotFoundException) { - listener.onResponse(AcknowledgedResponse.TRUE); + listener.onResponse(IndicesAliasesResponse.ACKNOWLEDGED_NO_ERRORS); } else { listener.onFailure(e); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java index dab2010035b66..c849e69c780bd 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.admin.indices.alias.TransportIndicesAliasesAction; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesAction; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; @@ -173,7 +174,7 @@ private void makeMlInternalIndicesHidden() { String[] mlHiddenIndexPatterns = MachineLearning.getMlHiddenIndexPatterns(); // Step 5: Handle errors encountered on the way. - ActionListener finalListener = ActionListener.wrap(updateAliasesResponse -> { + ActionListener finalListener = ActionListener.wrap(updateAliasesResponse -> { if (updateAliasesResponse.isAcknowledged() == false) { logger.warn("One or more of the ML internal aliases could not be made hidden."); return; @@ -194,7 +195,7 @@ private void makeMlInternalIndicesHidden() { } if (indicesAliasesRequest.getAliasActions().isEmpty()) { logger.debug("There are no ML internal aliases that need to be made hidden, [{}]", getAliasesResponse.getAliases()); - finalListener.onResponse(AcknowledgedResponse.TRUE); + finalListener.onResponse(IndicesAliasesResponse.ACKNOWLEDGED_NO_ERRORS); return; } String indicesWithNonHiddenAliasesString = indicesAliasesRequest.getAliasActions() diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java index 577bbe3dac6ce..b9cc1902b7ab6 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; @@ -287,7 +288,7 @@ public void deleteJobDocuments( AtomicReference indexNames = new AtomicReference<>(); - final ActionListener completionHandler = ActionListener.wrap( + final ActionListener completionHandler = ActionListener.wrap( response -> finishedHandler.accept(response.isAcknowledged()), failureHandler ); @@ -295,7 +296,7 @@ public void deleteJobDocuments( // Step 9. If we did not drop the indices and after DBQ state done, we delete the aliases ActionListener dbqHandler = ActionListener.wrap(bulkByScrollResponse -> { if (bulkByScrollResponse == null) { // no action was taken by DBQ, assume indices were deleted - completionHandler.onResponse(AcknowledgedResponse.TRUE); + completionHandler.onResponse(IndicesAliasesResponse.ACKNOWLEDGED_NO_ERRORS); } else { if (bulkByScrollResponse.isTimedOut()) { logger.warn("[{}] DeleteByQuery for indices [{}] timed out.", jobId, String.join(", ", indexNames.get())); @@ -469,7 +470,7 @@ private void deleteResultsByQuery( executeAsyncWithOrigin(client, ML_ORIGIN, RefreshAction.INSTANCE, refreshRequest, refreshListener); } - private void deleteAliases(@SuppressWarnings("HiddenField") String jobId, ActionListener finishedHandler) { + private void deleteAliases(@SuppressWarnings("HiddenField") String jobId, ActionListener finishedHandler) { final String readAliasName = AnomalyDetectorsIndex.jobResultsAliasedName(jobId); final String writeAliasName = AnomalyDetectorsIndex.resultsWriteAlias(jobId); @@ -486,7 +487,7 @@ private void deleteAliases(@SuppressWarnings("HiddenField") String jobId, Action if (removeRequest == null) { // don't error if the job's aliases have already been deleted - carry on and delete the // rest of the job's data - finishedHandler.onResponse(AcknowledgedResponse.TRUE); + finishedHandler.onResponse(IndicesAliasesResponse.ACKNOWLEDGED_NO_ERRORS); return; } executeAsyncWithOrigin( diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java index 1abb466a20f1a..50342a7bf99e0 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java @@ -16,6 +16,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DelegatingActionListener; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; @@ -344,7 +345,7 @@ public void createJobResultIndex(Job job, ClusterState state, final ActionListen client.threadPool().getThreadContext(), ML_ORIGIN, request, - ActionListener.wrap(r -> finalListener.onResponse(true), finalListener::onFailure), + ActionListener.wrap(r -> finalListener.onResponse(true), finalListener::onFailure), client.admin().indices()::aliases ); }, finalListener::onFailure); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformClusterStateListener.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformClusterStateListener.java index e2f66fe914bc2..970403e49c5a3 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformClusterStateListener.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformClusterStateListener.java @@ -11,7 +11,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; -import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; @@ -97,7 +97,7 @@ private static void createAuditAliasForDataFrameBWC(ClusterState state, Client c client.threadPool().getThreadContext(), TRANSFORM_ORIGIN, request, - ActionListener.wrap(r -> finalListener.onResponse(r.isAcknowledged()), finalListener::onFailure), + ActionListener.wrap(r -> finalListener.onResponse(r.isAcknowledged()), finalListener::onFailure), client.admin().indices()::aliases ); } From 885256648983b5cef143f6a57f43acb4872229ce Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 9 Apr 2024 13:17:15 -0400 Subject: [PATCH 113/173] Move ESQL's LOCATE test cases to cases (#107271) This moves the test cases declared in the tests for ESQL's LOCATE function to test cases which will cause #106782 to properly generate all of the available signatures. It also buys us all of testing for incorrect parameter combinations. --- .../function/scalar/string/Locate.java | 4 +- .../expression/function/TestCaseSupplier.java | 2 +- .../function/scalar/string/LocateTests.java | 316 +++++++++--------- 3 files changed, 152 insertions(+), 170 deletions(-) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Locate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Locate.java index c8b546718aabf..52d60da3f7341 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Locate.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Locate.java @@ -28,8 +28,8 @@ import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.SECOND; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.THIRD; -import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isInteger; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isString; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isType; /** * Locate function, given a string 'a' and a substring 'b', it returns the index of the first occurrence of the substring 'b' in 'a'. @@ -80,7 +80,7 @@ protected TypeResolution resolveType() { return resolution; } - return start == null ? TypeResolution.TYPE_RESOLVED : isInteger(start, sourceText(), THIRD); + return start == null ? TypeResolution.TYPE_RESOLVED : isType(start, dt -> dt == DataTypes.INTEGER, sourceText(), THIRD, "integer"); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java index c064cfebd9cc5..db26624bc66bf 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java @@ -72,7 +72,7 @@ public TestCaseSupplier(List types, Supplier supplier) { this(nameFromTypes(types), types, supplier); } - static String nameFromTypes(List types) { + public static String nameFromTypes(List types) { return types.stream().map(t -> "<" + t.typeName() + ">").collect(Collectors.joining(", ")); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java index b95f05039630a..a7f4ca0342782 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java @@ -11,22 +11,21 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.expression.Literal; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; +import java.util.Locale; +import java.util.function.Function; import java.util.function.Supplier; -import static java.nio.charset.StandardCharsets.UTF_8; -import static org.elasticsearch.compute.data.BlockUtils.toJavaObject; import static org.hamcrest.Matchers.equalTo; /** @@ -37,192 +36,175 @@ public LocateTests(@Name("TestCase") Supplier testCas this.testCase = testCaseSupplier.get(); } + private static final DataType[] STRING_TYPES = new DataType[] { DataTypes.KEYWORD, DataTypes.TEXT }; + @ParametersFactory public static Iterable parameters() { List suppliers = new ArrayList<>(); - suppliers.add( - supplier( - "keywords", - DataTypes.KEYWORD, - DataTypes.KEYWORD, - () -> randomRealisticUnicodeOfCodepointLength(10), - () -> randomRealisticUnicodeOfCodepointLength(2), - () -> 0 - ) - ); - suppliers.add( - supplier( - "mixed keyword, text", - DataTypes.KEYWORD, - DataTypes.TEXT, - () -> randomRealisticUnicodeOfCodepointLength(10), - () -> randomRealisticUnicodeOfCodepointLength(2), - () -> 0 - ) - ); - suppliers.add( - supplier( - "texts", - DataTypes.TEXT, - DataTypes.TEXT, - () -> randomRealisticUnicodeOfCodepointLength(10), - () -> randomRealisticUnicodeOfCodepointLength(2), - () -> 0 - ) - ); - suppliers.add( - supplier( - "mixed text, keyword", - DataTypes.TEXT, - DataTypes.KEYWORD, - () -> randomRealisticUnicodeOfCodepointLength(10), - () -> randomRealisticUnicodeOfCodepointLength(2), - () -> 0 - ) - ); - return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); - } - - public void testToString() { - assertThat( - evaluator( - new Locate( - Source.EMPTY, - field("str", DataTypes.KEYWORD), - field("substr", DataTypes.KEYWORD), - field("start", DataTypes.INTEGER) - ) - ).get(driverContext()).toString(), - equalTo("LocateEvaluator[str=Attribute[channel=0], substr=Attribute[channel=1], start=Attribute[channel=2]]") - ); - } - - @Override - protected Expression build(Source source, List args) { - return new Locate(source, args.get(0), args.get(1), args.size() < 3 ? null : args.get(2)); - } - - public void testPrefixString() { - assertThat(process("a tiger", "a t", 0), equalTo(1)); - assertThat(process("a tiger", "a", 0), equalTo(1)); - assertThat(process("界世", "界", 0), equalTo(1)); - } - - public void testSuffixString() { - assertThat(process("a tiger", "er", 0), equalTo(6)); - assertThat(process("a tiger", "r", 0), equalTo(7)); - assertThat(process("世界", "界", 0), equalTo(2)); - } - - public void testMidString() { - assertThat(process("a tiger", "ti", 0), equalTo(3)); - assertThat(process("a tiger", "ige", 0), equalTo(4)); - assertThat(process("世界世", "界", 0), equalTo(2)); - } - - public void testOutOfRange() { - assertThat(process("a tiger", "tigers", 0), equalTo(0)); - assertThat(process("a tiger", "ipa", 0), equalTo(0)); - assertThat(process("世界世", "\uD83C\uDF0D", 0), equalTo(0)); - } - - public void testExactString() { - assertThat(process("a tiger", "a tiger", 0), equalTo(1)); - assertThat(process("tigers", "tigers", 0), equalTo(1)); - assertThat(process("界世", "界世", 0), equalTo(1)); - } + for (DataType strType : STRING_TYPES) { + for (DataType substrType : STRING_TYPES) { + suppliers.add( + supplier( + "", + strType, + substrType, + () -> randomRealisticUnicodeOfCodepointLength(10), + str -> randomRealisticUnicodeOfCodepointLength(2), + null, + (str, substr, start) -> 1 + str.indexOf(substr) + ) + ); + suppliers.add( + supplier( + "exact match ", + strType, + substrType, + () -> randomRealisticUnicodeOfCodepointLength(10), + str -> str, + null, + (str, substr, start) -> 1 + ) + ); + suppliers.add( + supplier( + "", + strType, + substrType, + () -> randomRealisticUnicodeOfCodepointLength(10), + str -> randomRealisticUnicodeOfCodepointLength(2), + () -> between(0, 3), + (str, substr, start) -> 1 + str.indexOf(substr, start) + ) + ); + } + } - public void testSupplementaryCharacter() { + suppliers = errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers)); + + // Here follows some non-randomized examples that we want to cover on every run + suppliers.add(supplier("a tiger", "a t", null, 1)); + suppliers.add(supplier("a tiger", "a", null, 1)); + suppliers.add(supplier("界世", "界", null, 1)); + suppliers.add(supplier("a tiger", "er", null, 6)); + suppliers.add(supplier("a tiger", "r", null, 7)); + suppliers.add(supplier("界世", "世", null, 2)); + suppliers.add(supplier("a tiger", "ti", null, 3)); + suppliers.add(supplier("a tiger", "ige", null, 4)); + suppliers.add(supplier("世界世", "界", null, 2)); + suppliers.add(supplier("a tiger", "tigers", null, 0)); + suppliers.add(supplier("a tiger", "ipa", null, 0)); + suppliers.add(supplier("世界世", "\uD83C\uDF0D", null, 0)); + + // Extra assertions about 4-byte characters // some assertions about the supplementary (4-byte) character we'll use for testing assert "𠜎".length() == 2; assert "𠜎".codePointCount(0, 2) == 1; - assert "𠜎".getBytes(UTF_8).length == 4; - - assertThat(process("a ti𠜎er", "𠜎er", 0), equalTo(5)); - assertThat(process("a ti𠜎er", "i𠜎e", 0), equalTo(4)); - assertThat(process("a ti𠜎er", "ti𠜎", 0), equalTo(3)); - assertThat(process("a ti𠜎er", "er", 0), equalTo(6)); - assertThat(process("a ti𠜎er", "r", 0), equalTo(7)); - - assertThat(process("𠜎a ti𠜎er", "𠜎er", 0), equalTo(6)); - assertThat(process("𠜎a ti𠜎er", "i𠜎e", 0), equalTo(5)); - assertThat(process("𠜎a ti𠜎er", "ti𠜎", 0), equalTo(4)); - assertThat(process("𠜎a ti𠜎er", "er", 0), equalTo(7)); - assertThat(process("𠜎a ti𠜎er", "r", 0), equalTo(8)); - - // exact - assertThat(process("a ti𠜎er", "a ti𠜎er", 0), equalTo(1)); - assertThat(process("𠜎𠜎𠜎abc", "𠜎𠜎𠜎abc", 0), equalTo(1)); - assertThat(process(" 𠜎𠜎𠜎abc", " 𠜎𠜎𠜎abc", 0), equalTo(1)); - assertThat(process("𠜎𠜎𠜎 abc ", "𠜎𠜎𠜎 abc ", 0), equalTo(1)); - + assert "𠜎".getBytes(StandardCharsets.UTF_8).length == 4; + suppliers.add(supplier("a ti𠜎er", "𠜎er", null, 5)); + suppliers.add(supplier("a ti𠜎er", "i𠜎e", null, 4)); + suppliers.add(supplier("a ti𠜎er", "ti𠜎", null, 3)); + suppliers.add(supplier("a ti𠜎er", "er", null, 6)); + suppliers.add(supplier("a ti𠜎er", "r", null, 7)); + suppliers.add(supplier("a ti𠜎er", "a ti𠜎er", null, 1)); // prefix - assertThat(process("𠜎abc", "𠜎", 0), equalTo(1)); - assertThat(process("𠜎 abc", "𠜎 ", 0), equalTo(1)); - assertThat(process("𠜎𠜎𠜎abc", "𠜎𠜎𠜎", 0), equalTo(1)); - assertThat(process("𠜎𠜎𠜎 abc", "𠜎𠜎𠜎 ", 0), equalTo(1)); - assertThat(process(" 𠜎𠜎𠜎 abc", " 𠜎𠜎𠜎 ", 0), equalTo(1)); - assertThat(process("𠜎 𠜎 𠜎 abc", "𠜎 𠜎 𠜎 ", 0), equalTo(1)); - + suppliers.add(supplier("𠜎abc", "𠜎", null, 1)); + suppliers.add(supplier("𠜎 abc", "𠜎 ", null, 1)); + suppliers.add(supplier("𠜎𠜎𠜎abc", "𠜎𠜎𠜎", null, 1)); + suppliers.add(supplier("𠜎𠜎𠜎 abc", "𠜎𠜎𠜎 ", null, 1)); + suppliers.add(supplier(" 𠜎𠜎𠜎 abc", " 𠜎𠜎𠜎 ", null, 1)); + suppliers.add(supplier("𠜎 𠜎 𠜎 abc", "𠜎 𠜎 𠜎 ", null, 1)); // suffix - assertThat(process("abc𠜎", "𠜎", 0), equalTo(4)); - assertThat(process("abc 𠜎", " 𠜎", 0), equalTo(4)); - assertThat(process("abc𠜎𠜎𠜎", "𠜎𠜎𠜎", 0), equalTo(4)); - assertThat(process("abc 𠜎𠜎𠜎", " 𠜎𠜎𠜎", 0), equalTo(4)); - assertThat(process("abc𠜎𠜎𠜎 ", "𠜎𠜎𠜎 ", 0), equalTo(4)); - + suppliers.add(supplier("abc𠜎", "𠜎", null, 4)); + suppliers.add(supplier("abc 𠜎", " 𠜎", null, 4)); + suppliers.add(supplier("abc𠜎𠜎𠜎", "𠜎𠜎𠜎", null, 4)); + suppliers.add(supplier("abc 𠜎𠜎𠜎", " 𠜎𠜎𠜎", null, 4)); + suppliers.add(supplier("abc𠜎𠜎𠜎 ", "𠜎𠜎𠜎 ", null, 4)); // out of range - assertThat(process("𠜎a ti𠜎er", "𠜎a ti𠜎ers", 0), equalTo(0)); - assertThat(process("a ti𠜎er", "aa ti𠜎er", 0), equalTo(0)); - assertThat(process("abc𠜎𠜎", "𠜎𠜎𠜎", 0), equalTo(0)); + suppliers.add(supplier("𠜎a ti𠜎er", "𠜎a ti𠜎ers", null, 0)); + suppliers.add(supplier("a ti𠜎er", "aa ti𠜎er", null, 0)); + suppliers.add(supplier("abc𠜎𠜎", "𠜎𠜎𠜎", null, 0)); assert "🐱".length() == 2 && "🐶".length() == 2; assert "🐱".codePointCount(0, 2) == 1 && "🐶".codePointCount(0, 2) == 1; - assert "🐱".getBytes(UTF_8).length == 4 && "🐶".getBytes(UTF_8).length == 4; - assertThat(process("🐱Meow!🐶Woof!", "🐱Meow!🐶Woof!", 0), equalTo(1)); - assertThat(process("🐱Meow!🐶Woof!", "Meow!🐶Woof!", 0), equalTo(2)); - assertThat(process("🐱Meow!🐶Woof!", "eow!🐶Woof!", 0), equalTo(3)); + assert "🐱".getBytes(StandardCharsets.UTF_8).length == 4 && "🐶".getBytes(StandardCharsets.UTF_8).length == 4; + suppliers.add(supplier("🐱Meow!🐶Woof!", "🐱Meow!🐶Woof!", null, 1)); + suppliers.add(supplier("🐱Meow!🐶Woof!", "Meow!🐶Woof!", 0, 2)); + suppliers.add(supplier("🐱Meow!🐶Woof!", "eow!🐶Woof!", 0, 3)); + + return parameterSuppliersFromTypedData(suppliers); + } + + @Override + protected Expression build(Source source, List args) { + return new Locate(source, args.get(0), args.get(1), args.size() < 3 ? null : args.get(2)); } - private Integer process(String str, String substr, Integer start) { - try ( - EvalOperator.ExpressionEvaluator eval = evaluator( - new Locate( - Source.EMPTY, - field("str", DataTypes.KEYWORD), - field("substr", DataTypes.KEYWORD), - new Literal(Source.EMPTY, start, DataTypes.INTEGER) - ) - ).get(driverContext()); - Block block = eval.eval(row(List.of(new BytesRef(str), new BytesRef(substr)))) - ) { - return block.isNull(0) ? Integer.valueOf(0) : ((Integer) toJavaObject(block, 0)); + private static TestCaseSupplier supplier(String str, String substr, @Nullable Integer start, @Nullable Integer expectedValue) { + String name = String.format(Locale.ROOT, "\"%s\" in \"%s\"", substr, str); + if (start != null) { + name += " starting at " + start; } + + return new TestCaseSupplier( + name, + types(DataTypes.KEYWORD, DataTypes.KEYWORD, start != null), + () -> testCase(DataTypes.KEYWORD, DataTypes.KEYWORD, str, substr, start, expectedValue) + ); + } + + interface ExpectedValue { + int expectedValue(String str, String substr, Integer start); } private static TestCaseSupplier supplier( String name, - DataType firstType, - DataType secondType, + DataType strType, + DataType substrType, Supplier strValueSupplier, - Supplier substrValueSupplier, - Supplier startSupplier + Function substrValueSupplier, + @Nullable Supplier startSupplier, + ExpectedValue expectedValue ) { - return new TestCaseSupplier(name, List.of(firstType, secondType), () -> { - List values = new ArrayList<>(); - String expectedToString = "LocateEvaluator[str=Attribute[channel=0], substr=Attribute[channel=1], start=Attribute[channel=2]]"; - - String value = strValueSupplier.get(); - values.add(new TestCaseSupplier.TypedData(new BytesRef(value), firstType, "0")); + List types = types(strType, substrType, startSupplier != null); + return new TestCaseSupplier(name + TestCaseSupplier.nameFromTypes(types), types, () -> { + String str = strValueSupplier.get(); + String substr = substrValueSupplier.apply(str); + Integer start = startSupplier == null ? null : startSupplier.get(); + return testCase(strType, substrType, str, substr, start, expectedValue.expectedValue(str, substr, start)); + }); + } - String substrValue = substrValueSupplier.get(); - values.add(new TestCaseSupplier.TypedData(new BytesRef(substrValue), secondType, "1")); + private static String expectedToString(boolean hasStart) { + if (hasStart) { + return "LocateEvaluator[str=Attribute[channel=0], substr=Attribute[channel=1], start=Attribute[channel=2]]"; + } + return "LocateNoStartEvaluator[str=Attribute[channel=0], substr=Attribute[channel=1]]"; + } - Integer startValue = startSupplier.get(); - values.add(new TestCaseSupplier.TypedData(startValue, DataTypes.INTEGER, "2")); + private static List types(DataType firstType, DataType secondType, boolean hasStart) { + List types = new ArrayList<>(); + types.add(firstType); + types.add(secondType); + if (hasStart) { + types.add(DataTypes.INTEGER); + } + return types; + } - int expectedValue = 1 + value.indexOf(substrValue); - return new TestCaseSupplier.TestCase(values, expectedToString, DataTypes.INTEGER, equalTo(expectedValue)); - }); + private static TestCaseSupplier.TestCase testCase( + DataType strType, + DataType substrType, + String str, + String substr, + Integer start, + Integer expectedValue + ) { + List values = new ArrayList<>(); + values.add(new TestCaseSupplier.TypedData(str == null ? null : new BytesRef(str), strType, "str")); + values.add(new TestCaseSupplier.TypedData(substr == null ? null : new BytesRef(substr), substrType, "substr")); + if (start != null) { + values.add(new TestCaseSupplier.TypedData(start, DataTypes.INTEGER, "start")); + } + return new TestCaseSupplier.TestCase(values, expectedToString(start != null), DataTypes.INTEGER, equalTo(expectedValue)); } } From 96227a1970c9da0ffbd05dcbea7fa61eb4ce1df9 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 9 Apr 2024 14:19:48 -0400 Subject: [PATCH 114/173] ESQL: Generate kibana inline docs (#106782) This takes a stab at generating the markdown files that Kibana uses for its inline help. It doesn't include all of the examples because the `@Example` annotation is not filled in - we're tracking that in https://github.com/elastic/elasticsearch/issues/104247#issuecomment-2018944371 There are some links in the output and they are in markdown syntax. We should figure out how to make them work for kibana. --- docs/reference/esql/functions/README.md | 2 + .../esql/functions/kibana/definition/abs.json | 60 + .../functions/kibana/definition/acos.json | 59 + .../functions/kibana/definition/asin.json | 59 + .../functions/kibana/definition/atan.json | 59 + .../functions/kibana/definition/atan2.json | 299 +++++ .../kibana/definition/auto_bucket.json | 1088 +++++++++++++++++ .../functions/kibana/definition/case.json | 32 + .../functions/kibana/definition/ceil.json | 60 + .../functions/kibana/definition/coalesce.json | 161 +++ .../functions/kibana/definition/concat.json | 44 + .../esql/functions/kibana/definition/cos.json | 59 + .../functions/kibana/definition/cosh.json | 59 + .../kibana/definition/date_diff.json | 56 + .../kibana/definition/date_extract.json | 44 + .../kibana/definition/date_format.json | 44 + .../kibana/definition/date_parse.json | 62 + .../kibana/definition/date_trunc.json | 49 + .../esql/functions/kibana/definition/e.json | 12 + .../kibana/definition/ends_with.json | 44 + .../functions/kibana/definition/floor.json | 56 + .../functions/kibana/definition/greatest.json | 212 ++++ .../functions/kibana/definition/least.json | 212 ++++ .../functions/kibana/definition/left.json | 47 + .../functions/kibana/definition/length.json | 32 + .../functions/kibana/definition/locate.json | 176 +++ .../esql/functions/kibana/definition/log.json | 348 ++++++ .../functions/kibana/definition/log10.json | 59 + .../functions/kibana/definition/ltrim.json | 32 + .../functions/kibana/definition/mv_avg.json | 56 + .../kibana/definition/mv_concat.json | 80 ++ .../functions/kibana/definition/mv_count.json | 176 +++ .../kibana/definition/mv_dedupe.json | 116 ++ .../functions/kibana/definition/mv_first.json | 176 +++ .../functions/kibana/definition/mv_last.json | 176 +++ .../functions/kibana/definition/mv_max.json | 128 ++ .../kibana/definition/mv_median.json | 56 + .../functions/kibana/definition/mv_min.json | 128 ++ .../functions/kibana/definition/mv_slice.json | 320 +++++ .../functions/kibana/definition/mv_sort.json | 170 +++ .../functions/kibana/definition/mv_sum.json | 56 + .../functions/kibana/definition/mv_zip.json | 56 + .../esql/functions/kibana/definition/pi.json | 12 + .../esql/functions/kibana/definition/pow.json | 296 +++++ .../functions/kibana/definition/replace.json | 200 +++ .../functions/kibana/definition/right.json | 44 + .../functions/kibana/definition/round.json | 26 + .../functions/kibana/definition/rtrim.json | 32 + .../functions/kibana/definition/signum.json | 59 + .../esql/functions/kibana/definition/sin.json | 59 + .../functions/kibana/definition/sinh.json | 59 + .../functions/kibana/definition/split.json | 44 + .../functions/kibana/definition/sqrt.json | 56 + .../kibana/definition/st_contains.json | 155 +++ .../kibana/definition/st_disjoint.json | 155 +++ .../kibana/definition/st_intersects.json | 155 +++ .../kibana/definition/st_within.json | 155 +++ .../functions/kibana/definition/st_x.json | 32 + .../functions/kibana/definition/st_y.json | 32 + .../kibana/definition/starts_with.json | 44 + .../kibana/definition/substring.json | 56 + .../esql/functions/kibana/definition/tan.json | 59 + .../functions/kibana/definition/tanh.json | 59 + .../esql/functions/kibana/definition/tau.json | 12 + .../kibana/definition/to_boolean.json | 92 ++ .../kibana/definition/to_cartesianpoint.json | 44 + .../kibana/definition/to_cartesianshape.json | 56 + .../kibana/definition/to_datetime.json | 92 ++ .../kibana/definition/to_degrees.json | 56 + .../kibana/definition/to_double.json | 104 ++ .../kibana/definition/to_geopoint.json | 44 + .../kibana/definition/to_geoshape.json | 56 + .../kibana/definition/to_integer.json | 104 ++ .../functions/kibana/definition/to_ip.json | 44 + .../functions/kibana/definition/to_long.json | 104 ++ .../functions/kibana/definition/to_lower.json | 32 + .../kibana/definition/to_radians.json | 56 + .../kibana/definition/to_string.json | 176 +++ .../kibana/definition/to_unsigned_long.json | 104 ++ .../functions/kibana/definition/to_upper.json | 32 + .../kibana/definition/to_version.json | 44 + .../functions/kibana/definition/trim.json | 32 + .../esql/functions/kibana/docs/abs.md | 11 + .../esql/functions/kibana/docs/acos.md | 11 + .../esql/functions/kibana/docs/asin.md | 12 + .../esql/functions/kibana/docs/atan.md | 12 + .../esql/functions/kibana/docs/atan2.md | 12 + .../esql/functions/kibana/docs/auto_bucket.md | 8 + .../esql/functions/kibana/docs/case.md | 8 + .../esql/functions/kibana/docs/ceil.md | 12 + .../esql/functions/kibana/docs/coalesce.md | 11 + .../esql/functions/kibana/docs/concat.md | 7 + .../esql/functions/kibana/docs/cos.md | 11 + .../esql/functions/kibana/docs/cosh.md | 11 + .../esql/functions/kibana/docs/date_diff.md | 7 + .../functions/kibana/docs/date_extract.md | 7 + .../esql/functions/kibana/docs/date_format.md | 7 + .../esql/functions/kibana/docs/date_parse.md | 7 + .../esql/functions/kibana/docs/date_trunc.md | 12 + .../reference/esql/functions/kibana/docs/e.md | 7 + .../esql/functions/kibana/docs/ends_with.md | 7 + .../esql/functions/kibana/docs/floor.md | 7 + .../esql/functions/kibana/docs/greatest.md | 7 + .../esql/functions/kibana/docs/least.md | 7 + .../esql/functions/kibana/docs/left.md | 14 + .../esql/functions/kibana/docs/length.md | 7 + .../esql/functions/kibana/docs/locate.md | 7 + .../esql/functions/kibana/docs/log.md | 13 + .../esql/functions/kibana/docs/log10.md | 13 + .../esql/functions/kibana/docs/ltrim.md | 7 + .../esql/functions/kibana/docs/mv_avg.md | 7 + .../esql/functions/kibana/docs/mv_concat.md | 7 + .../esql/functions/kibana/docs/mv_count.md | 7 + .../esql/functions/kibana/docs/mv_dedupe.md | 7 + .../esql/functions/kibana/docs/mv_first.md | 7 + .../esql/functions/kibana/docs/mv_last.md | 7 + .../esql/functions/kibana/docs/mv_max.md | 7 + .../esql/functions/kibana/docs/mv_median.md | 7 + .../esql/functions/kibana/docs/mv_min.md | 7 + .../esql/functions/kibana/docs/mv_slice.md | 7 + .../esql/functions/kibana/docs/mv_sort.md | 7 + .../esql/functions/kibana/docs/mv_sum.md | 7 + .../esql/functions/kibana/docs/mv_zip.md | 7 + .../esql/functions/kibana/docs/pi.md | 7 + .../esql/functions/kibana/docs/pow.md | 7 + .../esql/functions/kibana/docs/replace.md | 7 + .../esql/functions/kibana/docs/right.md | 7 + .../esql/functions/kibana/docs/round.md | 7 + .../esql/functions/kibana/docs/rtrim.md | 7 + .../esql/functions/kibana/docs/signum.md | 12 + .../esql/functions/kibana/docs/sin.md | 11 + .../esql/functions/kibana/docs/sinh.md | 11 + .../esql/functions/kibana/docs/split.md | 7 + .../esql/functions/kibana/docs/sqrt.md | 7 + .../esql/functions/kibana/docs/st_contains.md | 12 + .../esql/functions/kibana/docs/st_disjoint.md | 12 + .../functions/kibana/docs/st_intersects.md | 11 + .../esql/functions/kibana/docs/st_within.md | 12 + .../esql/functions/kibana/docs/st_x.md | 7 + .../esql/functions/kibana/docs/st_y.md | 7 + .../esql/functions/kibana/docs/starts_with.md | 7 + .../esql/functions/kibana/docs/substring.md | 7 + .../esql/functions/kibana/docs/tan.md | 11 + .../esql/functions/kibana/docs/tanh.md | 11 + .../esql/functions/kibana/docs/tau.md | 7 + .../esql/functions/kibana/docs/to_boolean.md | 7 + .../kibana/docs/to_cartesianpoint.md | 7 + .../kibana/docs/to_cartesianshape.md | 7 + .../esql/functions/kibana/docs/to_datetime.md | 7 + .../esql/functions/kibana/docs/to_degrees.md | 7 + .../esql/functions/kibana/docs/to_double.md | 7 + .../esql/functions/kibana/docs/to_geopoint.md | 7 + .../esql/functions/kibana/docs/to_geoshape.md | 7 + .../esql/functions/kibana/docs/to_integer.md | 7 + .../esql/functions/kibana/docs/to_ip.md | 7 + .../esql/functions/kibana/docs/to_long.md | 7 + .../esql/functions/kibana/docs/to_lower.md | 7 + .../esql/functions/kibana/docs/to_radians.md | 7 + .../esql/functions/kibana/docs/to_string.md | 7 + .../functions/kibana/docs/to_unsigned_long.md | 7 + .../esql/functions/kibana/docs/to_upper.md | 7 + .../esql/functions/kibana/docs/to_version.md | 7 + .../esql/functions/kibana/docs/trim.md | 7 + .../esql/functions/signature/case.svg | 1 + x-pack/plugin/esql/build.gradle | 33 +- .../function/EsqlFunctionRegistry.java | 17 +- .../function/AbstractFunctionTestCase.java | 248 +++- 167 files changed, 9196 insertions(+), 69 deletions(-) create mode 100644 docs/reference/esql/functions/kibana/definition/abs.json create mode 100644 docs/reference/esql/functions/kibana/definition/acos.json create mode 100644 docs/reference/esql/functions/kibana/definition/asin.json create mode 100644 docs/reference/esql/functions/kibana/definition/atan.json create mode 100644 docs/reference/esql/functions/kibana/definition/atan2.json create mode 100644 docs/reference/esql/functions/kibana/definition/auto_bucket.json create mode 100644 docs/reference/esql/functions/kibana/definition/case.json create mode 100644 docs/reference/esql/functions/kibana/definition/ceil.json create mode 100644 docs/reference/esql/functions/kibana/definition/coalesce.json create mode 100644 docs/reference/esql/functions/kibana/definition/concat.json create mode 100644 docs/reference/esql/functions/kibana/definition/cos.json create mode 100644 docs/reference/esql/functions/kibana/definition/cosh.json create mode 100644 docs/reference/esql/functions/kibana/definition/date_diff.json create mode 100644 docs/reference/esql/functions/kibana/definition/date_extract.json create mode 100644 docs/reference/esql/functions/kibana/definition/date_format.json create mode 100644 docs/reference/esql/functions/kibana/definition/date_parse.json create mode 100644 docs/reference/esql/functions/kibana/definition/date_trunc.json create mode 100644 docs/reference/esql/functions/kibana/definition/e.json create mode 100644 docs/reference/esql/functions/kibana/definition/ends_with.json create mode 100644 docs/reference/esql/functions/kibana/definition/floor.json create mode 100644 docs/reference/esql/functions/kibana/definition/greatest.json create mode 100644 docs/reference/esql/functions/kibana/definition/least.json create mode 100644 docs/reference/esql/functions/kibana/definition/left.json create mode 100644 docs/reference/esql/functions/kibana/definition/length.json create mode 100644 docs/reference/esql/functions/kibana/definition/locate.json create mode 100644 docs/reference/esql/functions/kibana/definition/log.json create mode 100644 docs/reference/esql/functions/kibana/definition/log10.json create mode 100644 docs/reference/esql/functions/kibana/definition/ltrim.json create mode 100644 docs/reference/esql/functions/kibana/definition/mv_avg.json create mode 100644 docs/reference/esql/functions/kibana/definition/mv_concat.json create mode 100644 docs/reference/esql/functions/kibana/definition/mv_count.json create mode 100644 docs/reference/esql/functions/kibana/definition/mv_dedupe.json create mode 100644 docs/reference/esql/functions/kibana/definition/mv_first.json create mode 100644 docs/reference/esql/functions/kibana/definition/mv_last.json create mode 100644 docs/reference/esql/functions/kibana/definition/mv_max.json create mode 100644 docs/reference/esql/functions/kibana/definition/mv_median.json create mode 100644 docs/reference/esql/functions/kibana/definition/mv_min.json create mode 100644 docs/reference/esql/functions/kibana/definition/mv_slice.json create mode 100644 docs/reference/esql/functions/kibana/definition/mv_sort.json create mode 100644 docs/reference/esql/functions/kibana/definition/mv_sum.json create mode 100644 docs/reference/esql/functions/kibana/definition/mv_zip.json create mode 100644 docs/reference/esql/functions/kibana/definition/pi.json create mode 100644 docs/reference/esql/functions/kibana/definition/pow.json create mode 100644 docs/reference/esql/functions/kibana/definition/replace.json create mode 100644 docs/reference/esql/functions/kibana/definition/right.json create mode 100644 docs/reference/esql/functions/kibana/definition/round.json create mode 100644 docs/reference/esql/functions/kibana/definition/rtrim.json create mode 100644 docs/reference/esql/functions/kibana/definition/signum.json create mode 100644 docs/reference/esql/functions/kibana/definition/sin.json create mode 100644 docs/reference/esql/functions/kibana/definition/sinh.json create mode 100644 docs/reference/esql/functions/kibana/definition/split.json create mode 100644 docs/reference/esql/functions/kibana/definition/sqrt.json create mode 100644 docs/reference/esql/functions/kibana/definition/st_contains.json create mode 100644 docs/reference/esql/functions/kibana/definition/st_disjoint.json create mode 100644 docs/reference/esql/functions/kibana/definition/st_intersects.json create mode 100644 docs/reference/esql/functions/kibana/definition/st_within.json create mode 100644 docs/reference/esql/functions/kibana/definition/st_x.json create mode 100644 docs/reference/esql/functions/kibana/definition/st_y.json create mode 100644 docs/reference/esql/functions/kibana/definition/starts_with.json create mode 100644 docs/reference/esql/functions/kibana/definition/substring.json create mode 100644 docs/reference/esql/functions/kibana/definition/tan.json create mode 100644 docs/reference/esql/functions/kibana/definition/tanh.json create mode 100644 docs/reference/esql/functions/kibana/definition/tau.json create mode 100644 docs/reference/esql/functions/kibana/definition/to_boolean.json create mode 100644 docs/reference/esql/functions/kibana/definition/to_cartesianpoint.json create mode 100644 docs/reference/esql/functions/kibana/definition/to_cartesianshape.json create mode 100644 docs/reference/esql/functions/kibana/definition/to_datetime.json create mode 100644 docs/reference/esql/functions/kibana/definition/to_degrees.json create mode 100644 docs/reference/esql/functions/kibana/definition/to_double.json create mode 100644 docs/reference/esql/functions/kibana/definition/to_geopoint.json create mode 100644 docs/reference/esql/functions/kibana/definition/to_geoshape.json create mode 100644 docs/reference/esql/functions/kibana/definition/to_integer.json create mode 100644 docs/reference/esql/functions/kibana/definition/to_ip.json create mode 100644 docs/reference/esql/functions/kibana/definition/to_long.json create mode 100644 docs/reference/esql/functions/kibana/definition/to_lower.json create mode 100644 docs/reference/esql/functions/kibana/definition/to_radians.json create mode 100644 docs/reference/esql/functions/kibana/definition/to_string.json create mode 100644 docs/reference/esql/functions/kibana/definition/to_unsigned_long.json create mode 100644 docs/reference/esql/functions/kibana/definition/to_upper.json create mode 100644 docs/reference/esql/functions/kibana/definition/to_version.json create mode 100644 docs/reference/esql/functions/kibana/definition/trim.json create mode 100644 docs/reference/esql/functions/kibana/docs/abs.md create mode 100644 docs/reference/esql/functions/kibana/docs/acos.md create mode 100644 docs/reference/esql/functions/kibana/docs/asin.md create mode 100644 docs/reference/esql/functions/kibana/docs/atan.md create mode 100644 docs/reference/esql/functions/kibana/docs/atan2.md create mode 100644 docs/reference/esql/functions/kibana/docs/auto_bucket.md create mode 100644 docs/reference/esql/functions/kibana/docs/case.md create mode 100644 docs/reference/esql/functions/kibana/docs/ceil.md create mode 100644 docs/reference/esql/functions/kibana/docs/coalesce.md create mode 100644 docs/reference/esql/functions/kibana/docs/concat.md create mode 100644 docs/reference/esql/functions/kibana/docs/cos.md create mode 100644 docs/reference/esql/functions/kibana/docs/cosh.md create mode 100644 docs/reference/esql/functions/kibana/docs/date_diff.md create mode 100644 docs/reference/esql/functions/kibana/docs/date_extract.md create mode 100644 docs/reference/esql/functions/kibana/docs/date_format.md create mode 100644 docs/reference/esql/functions/kibana/docs/date_parse.md create mode 100644 docs/reference/esql/functions/kibana/docs/date_trunc.md create mode 100644 docs/reference/esql/functions/kibana/docs/e.md create mode 100644 docs/reference/esql/functions/kibana/docs/ends_with.md create mode 100644 docs/reference/esql/functions/kibana/docs/floor.md create mode 100644 docs/reference/esql/functions/kibana/docs/greatest.md create mode 100644 docs/reference/esql/functions/kibana/docs/least.md create mode 100644 docs/reference/esql/functions/kibana/docs/left.md create mode 100644 docs/reference/esql/functions/kibana/docs/length.md create mode 100644 docs/reference/esql/functions/kibana/docs/locate.md create mode 100644 docs/reference/esql/functions/kibana/docs/log.md create mode 100644 docs/reference/esql/functions/kibana/docs/log10.md create mode 100644 docs/reference/esql/functions/kibana/docs/ltrim.md create mode 100644 docs/reference/esql/functions/kibana/docs/mv_avg.md create mode 100644 docs/reference/esql/functions/kibana/docs/mv_concat.md create mode 100644 docs/reference/esql/functions/kibana/docs/mv_count.md create mode 100644 docs/reference/esql/functions/kibana/docs/mv_dedupe.md create mode 100644 docs/reference/esql/functions/kibana/docs/mv_first.md create mode 100644 docs/reference/esql/functions/kibana/docs/mv_last.md create mode 100644 docs/reference/esql/functions/kibana/docs/mv_max.md create mode 100644 docs/reference/esql/functions/kibana/docs/mv_median.md create mode 100644 docs/reference/esql/functions/kibana/docs/mv_min.md create mode 100644 docs/reference/esql/functions/kibana/docs/mv_slice.md create mode 100644 docs/reference/esql/functions/kibana/docs/mv_sort.md create mode 100644 docs/reference/esql/functions/kibana/docs/mv_sum.md create mode 100644 docs/reference/esql/functions/kibana/docs/mv_zip.md create mode 100644 docs/reference/esql/functions/kibana/docs/pi.md create mode 100644 docs/reference/esql/functions/kibana/docs/pow.md create mode 100644 docs/reference/esql/functions/kibana/docs/replace.md create mode 100644 docs/reference/esql/functions/kibana/docs/right.md create mode 100644 docs/reference/esql/functions/kibana/docs/round.md create mode 100644 docs/reference/esql/functions/kibana/docs/rtrim.md create mode 100644 docs/reference/esql/functions/kibana/docs/signum.md create mode 100644 docs/reference/esql/functions/kibana/docs/sin.md create mode 100644 docs/reference/esql/functions/kibana/docs/sinh.md create mode 100644 docs/reference/esql/functions/kibana/docs/split.md create mode 100644 docs/reference/esql/functions/kibana/docs/sqrt.md create mode 100644 docs/reference/esql/functions/kibana/docs/st_contains.md create mode 100644 docs/reference/esql/functions/kibana/docs/st_disjoint.md create mode 100644 docs/reference/esql/functions/kibana/docs/st_intersects.md create mode 100644 docs/reference/esql/functions/kibana/docs/st_within.md create mode 100644 docs/reference/esql/functions/kibana/docs/st_x.md create mode 100644 docs/reference/esql/functions/kibana/docs/st_y.md create mode 100644 docs/reference/esql/functions/kibana/docs/starts_with.md create mode 100644 docs/reference/esql/functions/kibana/docs/substring.md create mode 100644 docs/reference/esql/functions/kibana/docs/tan.md create mode 100644 docs/reference/esql/functions/kibana/docs/tanh.md create mode 100644 docs/reference/esql/functions/kibana/docs/tau.md create mode 100644 docs/reference/esql/functions/kibana/docs/to_boolean.md create mode 100644 docs/reference/esql/functions/kibana/docs/to_cartesianpoint.md create mode 100644 docs/reference/esql/functions/kibana/docs/to_cartesianshape.md create mode 100644 docs/reference/esql/functions/kibana/docs/to_datetime.md create mode 100644 docs/reference/esql/functions/kibana/docs/to_degrees.md create mode 100644 docs/reference/esql/functions/kibana/docs/to_double.md create mode 100644 docs/reference/esql/functions/kibana/docs/to_geopoint.md create mode 100644 docs/reference/esql/functions/kibana/docs/to_geoshape.md create mode 100644 docs/reference/esql/functions/kibana/docs/to_integer.md create mode 100644 docs/reference/esql/functions/kibana/docs/to_ip.md create mode 100644 docs/reference/esql/functions/kibana/docs/to_long.md create mode 100644 docs/reference/esql/functions/kibana/docs/to_lower.md create mode 100644 docs/reference/esql/functions/kibana/docs/to_radians.md create mode 100644 docs/reference/esql/functions/kibana/docs/to_string.md create mode 100644 docs/reference/esql/functions/kibana/docs/to_unsigned_long.md create mode 100644 docs/reference/esql/functions/kibana/docs/to_upper.md create mode 100644 docs/reference/esql/functions/kibana/docs/to_version.md create mode 100644 docs/reference/esql/functions/kibana/docs/trim.md create mode 100644 docs/reference/esql/functions/signature/case.svg diff --git a/docs/reference/esql/functions/README.md b/docs/reference/esql/functions/README.md index 7be4c70fbe6b0..35b852ba060f1 100644 --- a/docs/reference/esql/functions/README.md +++ b/docs/reference/esql/functions/README.md @@ -5,6 +5,8 @@ The files in these subdirectories are generated by ESQL's test suite: * `signature` - railroad diagram of the syntax to invoke each function * `types` - a table of each combination of support type for each parameter. These are generated from tests. * `layout` - a fully generated description for each function +* `kibana/definition` - function definitions for kibana's ESQL editor +* `kibana/docs` - the inline docs for kibana Most functions can use the generated docs generated in the `layout` directory. If we need something more custom for the function we can make a file in this diff --git a/docs/reference/esql/functions/kibana/definition/abs.json b/docs/reference/esql/functions/kibana/definition/abs.json new file mode 100644 index 0000000000000..82c3c205d7512 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/abs.json @@ -0,0 +1,60 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "abs", + "description" : "Returns the absolute value.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + } + ], + "examples" : [ + "ROW number = -1.0 \n| EVAL abs_number = ABS(number)", + "FROM employees\n| KEEP first_name, last_name, height\n| EVAL abs_height = ABS(0.0 - height)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/acos.json b/docs/reference/esql/functions/kibana/definition/acos.json new file mode 100644 index 0000000000000..6a6ab59278639 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/acos.json @@ -0,0 +1,59 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "acos", + "description" : "Returns the arccosine of `n` as an angle, expressed in radians.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "Number between -1 and 1. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "Number between -1 and 1. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "Number between -1 and 1. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "Number between -1 and 1. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW a=.9\n| EVAL acos=ACOS(a)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/asin.json b/docs/reference/esql/functions/kibana/definition/asin.json new file mode 100644 index 0000000000000..f5ebb817fff33 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/asin.json @@ -0,0 +1,59 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "asin", + "description" : "Returns the arcsine of the input\nnumeric expression as an angle, expressed in radians.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "Number between -1 and 1. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "Number between -1 and 1. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "Number between -1 and 1. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "Number between -1 and 1. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW a=.9\n| EVAL asin=ASIN(a)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/atan.json b/docs/reference/esql/functions/kibana/definition/atan.json new file mode 100644 index 0000000000000..654a48b8ca76d --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/atan.json @@ -0,0 +1,59 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "atan", + "description" : "Returns the arctangent of the input\nnumeric expression as an angle, expressed in radians.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW a=12.9\n| EVAL atan=ATAN(a)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/atan2.json b/docs/reference/esql/functions/kibana/definition/atan2.json new file mode 100644 index 0000000000000..63940831241f7 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/atan2.json @@ -0,0 +1,299 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "atan2", + "description" : "The angle between the positive x-axis and the ray from the\norigin to the point (x , y) in the Cartesian plane, expressed in radians.", + "signatures" : [ + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "double", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "double", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "double", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "integer", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "double", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "long", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "double", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "unsigned_long", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "integer", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "double", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "integer", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "integer", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "integer", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "long", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "integer", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "unsigned_long", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "long", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "double", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "long", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "integer", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "long", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "long", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "long", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "unsigned_long", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "unsigned_long", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "double", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "unsigned_long", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "integer", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "unsigned_long", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "long", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "unsigned_long", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "unsigned_long", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW y=12.9, x=.6\n| EVAL atan2=ATAN2(y, x)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/auto_bucket.json b/docs/reference/esql/functions/kibana/definition/auto_bucket.json new file mode 100644 index 0000000000000..96940e5f051f2 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/auto_bucket.json @@ -0,0 +1,1088 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "auto_bucket", + "description" : "Creates human-friendly buckets and returns a datetime value\nfor each row that corresponds to the resulting bucket the row falls into.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "datetime", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "datetime", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "datetime", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/case.json b/docs/reference/esql/functions/kibana/definition/case.json new file mode 100644 index 0000000000000..73bc215ac6ade --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/case.json @@ -0,0 +1,32 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "case", + "description" : "Accepts pairs of conditions and values.\nThe function returns the value that belongs to the first condition that evaluates to true.", + "signatures" : [ + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "" + }, + { + "name" : "trueValue", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "falseValue", + "type" : "keyword", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "keyword" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/ceil.json b/docs/reference/esql/functions/kibana/definition/ceil.json new file mode 100644 index 0000000000000..b8ac9ad55f31a --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/ceil.json @@ -0,0 +1,60 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "ceil", + "description" : "Round a number up to the nearest integer.", + "note" : "This is a noop for `long` (including unsigned) and `integer`. For `double` this picks the closest `double` value to the integer similar to Math.ceil.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + } + ], + "examples" : [ + "ROW a=1.8\n| EVAL a=CEIL(a)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/coalesce.json b/docs/reference/esql/functions/kibana/definition/coalesce.json new file mode 100644 index 0000000000000..87feead06d091 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/coalesce.json @@ -0,0 +1,161 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "coalesce", + "description" : "Returns the first of its arguments that is not null. If all arguments are null, it returns `null`.", + "signatures" : [ + { + "params" : [ + { + "name" : "first", + "type" : "boolean", + "optional" : false, + "description" : "Expression to evaluate" + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "first", + "type" : "boolean", + "optional" : false, + "description" : "Expression to evaluate" + }, + { + "name" : "rest", + "type" : "boolean", + "optional" : true, + "description" : "Other expression to evaluate" + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "first", + "type" : "integer", + "optional" : false, + "description" : "Expression to evaluate" + } + ], + "variadic" : true, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "first", + "type" : "integer", + "optional" : false, + "description" : "Expression to evaluate" + }, + { + "name" : "rest", + "type" : "integer", + "optional" : true, + "description" : "Other expression to evaluate" + } + ], + "variadic" : true, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "first", + "type" : "keyword", + "optional" : false, + "description" : "Expression to evaluate" + } + ], + "variadic" : true, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "first", + "type" : "keyword", + "optional" : false, + "description" : "Expression to evaluate" + }, + { + "name" : "rest", + "type" : "keyword", + "optional" : true, + "description" : "Other expression to evaluate" + } + ], + "variadic" : true, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "first", + "type" : "long", + "optional" : false, + "description" : "Expression to evaluate" + } + ], + "variadic" : true, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "first", + "type" : "long", + "optional" : false, + "description" : "Expression to evaluate" + }, + { + "name" : "rest", + "type" : "long", + "optional" : true, + "description" : "Other expression to evaluate" + } + ], + "variadic" : true, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "first", + "type" : "text", + "optional" : false, + "description" : "Expression to evaluate" + } + ], + "variadic" : true, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "first", + "type" : "text", + "optional" : false, + "description" : "Expression to evaluate" + }, + { + "name" : "rest", + "type" : "text", + "optional" : true, + "description" : "Other expression to evaluate" + } + ], + "variadic" : true, + "returnType" : "text" + } + ], + "examples" : [ + "ROW a=null, b=\"b\"\n| EVAL COALESCE(a, b)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/concat.json b/docs/reference/esql/functions/kibana/definition/concat.json new file mode 100644 index 0000000000000..bb1b84f67aff9 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/concat.json @@ -0,0 +1,44 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "concat", + "description" : "Concatenates two or more strings.", + "signatures" : [ + { + "params" : [ + { + "name" : "string1", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "string2", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string1", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "string2", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "keyword" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/cos.json b/docs/reference/esql/functions/kibana/definition/cos.json new file mode 100644 index 0000000000000..c7757fbd4071d --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/cos.json @@ -0,0 +1,59 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "cos", + "description" : "Returns the cosine of an angle.", + "signatures" : [ + { + "params" : [ + { + "name" : "angle", + "type" : "double", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "integer", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "unsigned_long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW a=1.8 \n| EVAL cos=COS(a)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/cosh.json b/docs/reference/esql/functions/kibana/definition/cosh.json new file mode 100644 index 0000000000000..a34eee15be37e --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/cosh.json @@ -0,0 +1,59 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "cosh", + "description" : "Returns the hyperbolic cosine of an angle.", + "signatures" : [ + { + "params" : [ + { + "name" : "angle", + "type" : "double", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "integer", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "unsigned_long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW a=1.8 \n| EVAL cosh=COSH(a)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/date_diff.json b/docs/reference/esql/functions/kibana/definition/date_diff.json new file mode 100644 index 0000000000000..aa030ea163709 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/date_diff.json @@ -0,0 +1,56 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "date_diff", + "description" : "Subtract 2 dates and return their difference in multiples of a unit specified in the 1st argument", + "signatures" : [ + { + "params" : [ + { + "name" : "unit", + "type" : "keyword", + "optional" : false, + "description" : "A valid date unit" + }, + { + "name" : "startTimestamp", + "type" : "datetime", + "optional" : false, + "description" : "A string representing a start timestamp" + }, + { + "name" : "endTimestamp", + "type" : "datetime", + "optional" : false, + "description" : "A string representing an end timestamp" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "unit", + "type" : "text", + "optional" : false, + "description" : "A valid date unit" + }, + { + "name" : "startTimestamp", + "type" : "datetime", + "optional" : false, + "description" : "A string representing a start timestamp" + }, + { + "name" : "endTimestamp", + "type" : "datetime", + "optional" : false, + "description" : "A string representing an end timestamp" + } + ], + "variadic" : false, + "returnType" : "integer" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/date_extract.json b/docs/reference/esql/functions/kibana/definition/date_extract.json new file mode 100644 index 0000000000000..c5edf5ac14109 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/date_extract.json @@ -0,0 +1,44 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "date_extract", + "description" : "Extracts parts of a date, like year, month, day, hour.", + "signatures" : [ + { + "params" : [ + { + "name" : "datePart", + "type" : "keyword", + "optional" : false, + "description" : "Part of the date to extract. Can be: aligned_day_of_week_in_month; aligned_day_of_week_in_year; aligned_week_of_month; aligned_week_of_year; ampm_of_day; clock_hour_of_ampm; clock_hour_of_day; day_of_month; day_of_week; day_of_year; epoch_day; era; hour_of_ampm; hour_of_day; instant_seconds; micro_of_day; micro_of_second; milli_of_day; milli_of_second; minute_of_day; minute_of_hour; month_of_year; nano_of_day; nano_of_second; offset_seconds; proleptic_month; second_of_day; second_of_minute; year; or year_of_era." + }, + { + "name" : "date", + "type" : "datetime", + "optional" : false, + "description" : "Date expression" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "datePart", + "type" : "text", + "optional" : false, + "description" : "Part of the date to extract. Can be: aligned_day_of_week_in_month; aligned_day_of_week_in_year; aligned_week_of_month; aligned_week_of_year; ampm_of_day; clock_hour_of_ampm; clock_hour_of_day; day_of_month; day_of_week; day_of_year; epoch_day; era; hour_of_ampm; hour_of_day; instant_seconds; micro_of_day; micro_of_second; milli_of_day; milli_of_second; minute_of_day; minute_of_hour; month_of_year; nano_of_day; nano_of_second; offset_seconds; proleptic_month; second_of_day; second_of_minute; year; or year_of_era." + }, + { + "name" : "date", + "type" : "datetime", + "optional" : false, + "description" : "Date expression" + } + ], + "variadic" : false, + "returnType" : "long" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/date_format.json b/docs/reference/esql/functions/kibana/definition/date_format.json new file mode 100644 index 0000000000000..8807e5d330f84 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/date_format.json @@ -0,0 +1,44 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "date_format", + "description" : "Returns a string representation of a date, in the provided format.", + "signatures" : [ + { + "params" : [ + { + "name" : "dateFormat", + "type" : "keyword", + "optional" : true, + "description" : "A valid date pattern" + }, + { + "name" : "date", + "type" : "datetime", + "optional" : false, + "description" : "Date expression" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "dateFormat", + "type" : "text", + "optional" : true, + "description" : "A valid date pattern" + }, + { + "name" : "date", + "type" : "datetime", + "optional" : false, + "description" : "Date expression" + } + ], + "variadic" : false, + "returnType" : "keyword" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/date_parse.json b/docs/reference/esql/functions/kibana/definition/date_parse.json new file mode 100644 index 0000000000000..85bce19532020 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/date_parse.json @@ -0,0 +1,62 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "date_parse", + "description" : "Parses a string into a date value", + "signatures" : [ + { + "params" : [ + { + "name" : "datePattern", + "type" : "keyword", + "optional" : true, + "description" : "A valid date pattern" + }, + { + "name" : "dateString", + "type" : "keyword", + "optional" : false, + "description" : "A string representing a date" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "datePattern", + "type" : "keyword", + "optional" : true, + "description" : "A valid date pattern" + }, + { + "name" : "dateString", + "type" : "text", + "optional" : false, + "description" : "A string representing a date" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "datePattern", + "type" : "text", + "optional" : true, + "description" : "A valid date pattern" + }, + { + "name" : "dateString", + "type" : "text", + "optional" : false, + "description" : "A string representing a date" + } + ], + "variadic" : false, + "returnType" : "datetime" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/date_trunc.json b/docs/reference/esql/functions/kibana/definition/date_trunc.json new file mode 100644 index 0000000000000..3d8658c496529 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/date_trunc.json @@ -0,0 +1,49 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "date_trunc", + "description" : "Rounds down a date to the closest interval.", + "signatures" : [ + { + "params" : [ + { + "name" : "interval", + "type" : "date_period", + "optional" : false, + "description" : "Interval; expressed using the timespan literal syntax." + }, + { + "name" : "date", + "type" : "datetime", + "optional" : false, + "description" : "Date expression" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "interval", + "type" : "time_duration", + "optional" : false, + "description" : "Interval; expressed using the timespan literal syntax." + }, + { + "name" : "date", + "type" : "datetime", + "optional" : false, + "description" : "Date expression" + } + ], + "variadic" : false, + "returnType" : "datetime" + } + ], + "examples" : [ + "FROM employees\n| KEEP first_name, last_name, hire_date\n| EVAL year_hired = DATE_TRUNC(1 year, hire_date)", + "FROM employees\n| EVAL year = DATE_TRUNC(1 year, hire_date)\n| STATS hires = COUNT(emp_no) BY year\n| SORT year", + "FROM sample_data\n| EVAL error = CASE(message LIKE \"*error*\", 1, 0)\n| EVAL hour = DATE_TRUNC(1 hour, @timestamp)\n| STATS error_rate = AVG(error) by hour\n| SORT hour" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/e.json b/docs/reference/esql/functions/kibana/definition/e.json new file mode 100644 index 0000000000000..97d33b752d042 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/e.json @@ -0,0 +1,12 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "e", + "description" : "Euler’s number.", + "signatures" : [ + { + "params" : [ ], + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/ends_with.json b/docs/reference/esql/functions/kibana/definition/ends_with.json new file mode 100644 index 0000000000000..66f4c7404905c --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/ends_with.json @@ -0,0 +1,44 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "ends_with", + "description" : "Returns a boolean that indicates whether a keyword string ends with another string", + "signatures" : [ + { + "params" : [ + { + "name" : "str", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "suffix", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "str", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "suffix", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/floor.json b/docs/reference/esql/functions/kibana/definition/floor.json new file mode 100644 index 0000000000000..18ab8031558bd --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/floor.json @@ -0,0 +1,56 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "floor", + "description" : "Round a number down to the nearest integer.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/greatest.json b/docs/reference/esql/functions/kibana/definition/greatest.json new file mode 100644 index 0000000000000..f72f54708c6b1 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/greatest.json @@ -0,0 +1,212 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "greatest", + "description" : "Returns the maximum value from many columns.", + "signatures" : [ + { + "params" : [ + { + "name" : "first", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "first", + "type" : "boolean", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "boolean", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "first", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "double", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "first", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "first", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "integer", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "first", + "type" : "ip", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "ip", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "first", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "first", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "keyword", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "first", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "first", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "long", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "first", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "first", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "text", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "first", + "type" : "version", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "version", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "version" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/least.json b/docs/reference/esql/functions/kibana/definition/least.json new file mode 100644 index 0000000000000..66efedc0c9fe5 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/least.json @@ -0,0 +1,212 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "least", + "description" : "Returns the minimum value from many columns.", + "signatures" : [ + { + "params" : [ + { + "name" : "first", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "first", + "type" : "boolean", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "boolean", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "first", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "double", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "first", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "first", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "integer", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "first", + "type" : "ip", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "ip", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "first", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "first", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "keyword", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "first", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "first", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "long", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "first", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "first", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "text", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "first", + "type" : "version", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "version", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "version" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/left.json b/docs/reference/esql/functions/kibana/definition/left.json new file mode 100644 index 0000000000000..bcda92b887bb0 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/left.json @@ -0,0 +1,47 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "left", + "description" : "Returns the substring that extracts 'length' chars from 'string' starting from the left.", + "signatures" : [ + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "The string from which to return a substring." + }, + { + "name" : "length", + "type" : "integer", + "optional" : false, + "description" : "The number of characters to return." + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "The string from which to return a substring." + }, + { + "name" : "length", + "type" : "integer", + "optional" : false, + "description" : "The number of characters to return." + } + ], + "variadic" : false, + "returnType" : "keyword" + } + ], + "examples" : [ + "FROM employees\n| KEEP last_name\n| EVAL left = LEFT(last_name, 3)\n| SORT last_name ASC\n| LIMIT 5" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/length.json b/docs/reference/esql/functions/kibana/definition/length.json new file mode 100644 index 0000000000000..a42656b71d471 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/length.json @@ -0,0 +1,32 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "length", + "description" : "Returns the character length of a string.", + "signatures" : [ + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/locate.json b/docs/reference/esql/functions/kibana/definition/locate.json new file mode 100644 index 0000000000000..9629b81820f8a --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/locate.json @@ -0,0 +1,176 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "locate", + "description" : "Returns an integer that indicates the position of a keyword substring within another string", + "signatures" : [ + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "An input string" + }, + { + "name" : "substring", + "type" : "keyword", + "optional" : false, + "description" : "A substring to locate in the input string" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "An input string" + }, + { + "name" : "substring", + "type" : "keyword", + "optional" : false, + "description" : "A substring to locate in the input string" + }, + { + "name" : "start", + "type" : "integer", + "optional" : true, + "description" : "The start index" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "An input string" + }, + { + "name" : "substring", + "type" : "text", + "optional" : false, + "description" : "A substring to locate in the input string" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "An input string" + }, + { + "name" : "substring", + "type" : "text", + "optional" : false, + "description" : "A substring to locate in the input string" + }, + { + "name" : "start", + "type" : "integer", + "optional" : true, + "description" : "The start index" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "An input string" + }, + { + "name" : "substring", + "type" : "keyword", + "optional" : false, + "description" : "A substring to locate in the input string" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "An input string" + }, + { + "name" : "substring", + "type" : "keyword", + "optional" : false, + "description" : "A substring to locate in the input string" + }, + { + "name" : "start", + "type" : "integer", + "optional" : true, + "description" : "The start index" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "An input string" + }, + { + "name" : "substring", + "type" : "text", + "optional" : false, + "description" : "A substring to locate in the input string" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "An input string" + }, + { + "name" : "substring", + "type" : "text", + "optional" : false, + "description" : "A substring to locate in the input string" + }, + { + "name" : "start", + "type" : "integer", + "optional" : true, + "description" : "The start index" + } + ], + "variadic" : false, + "returnType" : "integer" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/log.json b/docs/reference/esql/functions/kibana/definition/log.json new file mode 100644 index 0000000000000..0edafefc4dd1a --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/log.json @@ -0,0 +1,348 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "log", + "description" : "Returns the logarithm of a value to a base. The input can be any numeric value, the return value is always a double.\n\nLogs of zero, negative numbers, and base of one return `null` as well as a warning.", + "signatures" : [ + { + "params" : [ + { + "name" : "base", + "type" : "double", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "double", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "double", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "double", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "double", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "integer", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "integer", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "integer", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "integer", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "integer", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "long", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "long", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "long", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "long", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "long", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "unsigned_long", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "unsigned_long", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "unsigned_long", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "unsigned_long", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "unsigned_long", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW base = 2.0, value = 8.0\n| EVAL s = LOG(base, value)", + "row value = 100\n| EVAL s = LOG(value);" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/log10.json b/docs/reference/esql/functions/kibana/definition/log10.json new file mode 100644 index 0000000000000..ca506b0df33e2 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/log10.json @@ -0,0 +1,59 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "log10", + "description" : "Returns the logarithm of a value to base 10. The input can be any numeric value, the return value is always a double.\n\nLogs of 0 and negative numbers return `null` as well as a warning.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW d = 1000.0 \n| EVAL s = LOG10(d)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/ltrim.json b/docs/reference/esql/functions/kibana/definition/ltrim.json new file mode 100644 index 0000000000000..bcf51f6b9e9fb --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/ltrim.json @@ -0,0 +1,32 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "ltrim", + "description" : "Removes leading whitespaces from a string.", + "signatures" : [ + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "text" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_avg.json b/docs/reference/esql/functions/kibana/definition/mv_avg.json new file mode 100644 index 0000000000000..2fa14f0c91d51 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_avg.json @@ -0,0 +1,56 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_avg", + "description" : "Converts a multivalued field into a single valued field containing the average of all of the values.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_concat.json b/docs/reference/esql/functions/kibana/definition/mv_concat.json new file mode 100644 index 0000000000000..1f6936857bcff --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_concat.json @@ -0,0 +1,80 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_concat", + "description" : "Reduce a multivalued string field to a single valued field by concatenating all values.", + "signatures" : [ + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "values to join" + }, + { + "name" : "delim", + "type" : "keyword", + "optional" : false, + "description" : "delimiter" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "values to join" + }, + { + "name" : "delim", + "type" : "text", + "optional" : false, + "description" : "delimiter" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "values to join" + }, + { + "name" : "delim", + "type" : "keyword", + "optional" : false, + "description" : "delimiter" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "values to join" + }, + { + "name" : "delim", + "type" : "text", + "optional" : false, + "description" : "delimiter" + } + ], + "variadic" : false, + "returnType" : "keyword" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_count.json b/docs/reference/esql/functions/kibana/definition/mv_count.json new file mode 100644 index 0000000000000..d27821451899b --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_count.json @@ -0,0 +1,176 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_count", + "description" : "Reduce a multivalued field to a single valued field containing the count of values.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_dedupe.json b/docs/reference/esql/functions/kibana/definition/mv_dedupe.json new file mode 100644 index 0000000000000..c0f02d9febc42 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_dedupe.json @@ -0,0 +1,116 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_dedupe", + "description" : "Remove duplicate values from a multivalued field.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "version" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_first.json b/docs/reference/esql/functions/kibana/definition/mv_first.json new file mode 100644 index 0000000000000..d73b3ae002be3 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_first.json @@ -0,0 +1,176 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_first", + "description" : "Reduce a multivalued field to a single valued field containing the first value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_point" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_point" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "version" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_last.json b/docs/reference/esql/functions/kibana/definition/mv_last.json new file mode 100644 index 0000000000000..0484bfa0b488b --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_last.json @@ -0,0 +1,176 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_last", + "description" : "Reduce a multivalued field to a single valued field containing the last value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_point" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_point" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "version" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_max.json b/docs/reference/esql/functions/kibana/definition/mv_max.json new file mode 100644 index 0000000000000..62a6e15f3346a --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_max.json @@ -0,0 +1,128 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_max", + "description" : "Reduce a multivalued field to a single valued field containing the maximum value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "version" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_median.json b/docs/reference/esql/functions/kibana/definition/mv_median.json new file mode 100644 index 0000000000000..a6d79f7e6f0a3 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_median.json @@ -0,0 +1,56 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_median", + "description" : "Converts a multivalued field into a single valued field containing the median value.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_min.json b/docs/reference/esql/functions/kibana/definition/mv_min.json new file mode 100644 index 0000000000000..8a6f485aedc57 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_min.json @@ -0,0 +1,128 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_min", + "description" : "Reduce a multivalued field to a single valued field containing the minimum value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "version" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_slice.json b/docs/reference/esql/functions/kibana/definition/mv_slice.json new file mode 100644 index 0000000000000..6d3aa873d8d01 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_slice.json @@ -0,0 +1,320 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_slice", + "description" : "Returns a subset of the multivalued field using the start and end index values.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_point", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "cartesian_point" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_shape", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "cartesian_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_point", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "geo_point" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_shape", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "geo_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "version" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_sort.json b/docs/reference/esql/functions/kibana/definition/mv_sort.json new file mode 100644 index 0000000000000..f647d51a2cfaf --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_sort.json @@ -0,0 +1,170 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_sort", + "description" : "Sorts a multivalued field in lexicographical order.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "order", + "type" : "keyword", + "optional" : true, + "description" : "sort order" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "order", + "type" : "keyword", + "optional" : true, + "description" : "sort order" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "order", + "type" : "keyword", + "optional" : true, + "description" : "sort order" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "order", + "type" : "keyword", + "optional" : true, + "description" : "sort order" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "order", + "type" : "keyword", + "optional" : true, + "description" : "sort order" + } + ], + "variadic" : false, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "order", + "type" : "keyword", + "optional" : true, + "description" : "sort order" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "order", + "type" : "keyword", + "optional" : true, + "description" : "sort order" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "order", + "type" : "keyword", + "optional" : true, + "description" : "sort order" + } + ], + "variadic" : false, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "order", + "type" : "keyword", + "optional" : true, + "description" : "sort order" + } + ], + "variadic" : false, + "returnType" : "version" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_sum.json b/docs/reference/esql/functions/kibana/definition/mv_sum.json new file mode 100644 index 0000000000000..25f687efed675 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_sum.json @@ -0,0 +1,56 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_sum", + "description" : "Converts a multivalued field into a single valued field containing the sum of all of the values.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_zip.json b/docs/reference/esql/functions/kibana/definition/mv_zip.json new file mode 100644 index 0000000000000..7fabc0e56f12d --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_zip.json @@ -0,0 +1,56 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_zip", + "description" : "Combines the values from two multivalued fields with a delimiter that joins them together.", + "signatures" : [ + { + "params" : [ + { + "name" : "string1", + "type" : "keyword", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "string2", + "type" : "keyword", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "delim", + "type" : "keyword", + "optional" : true, + "description" : "delimiter" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string1", + "type" : "text", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "string2", + "type" : "text", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "delim", + "type" : "text", + "optional" : true, + "description" : "delimiter" + } + ], + "variadic" : false, + "returnType" : "keyword" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/pi.json b/docs/reference/esql/functions/kibana/definition/pi.json new file mode 100644 index 0000000000000..d1d700d2011ee --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/pi.json @@ -0,0 +1,12 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "pi", + "description" : "The ratio of a circle’s circumference to its diameter.", + "signatures" : [ + { + "params" : [ ], + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/pow.json b/docs/reference/esql/functions/kibana/definition/pow.json new file mode 100644 index 0000000000000..9970a45847cc7 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/pow.json @@ -0,0 +1,296 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "pow", + "description" : "Returns the value of a base raised to the power of an exponent.", + "signatures" : [ + { + "params" : [ + { + "name" : "base", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + }, + { + "name" : "exponent", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/replace.json b/docs/reference/esql/functions/kibana/definition/replace.json new file mode 100644 index 0000000000000..cf54b296555a4 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/replace.json @@ -0,0 +1,200 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "replace", + "description" : "The function substitutes in the string any match of the regular expression with the replacement string.", + "signatures" : [ + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "regex", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "newString", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "regex", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "newString", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "regex", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "newString", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "regex", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "newString", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "regex", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "newString", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "regex", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "newString", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "regex", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "newString", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "regex", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "newString", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/right.json b/docs/reference/esql/functions/kibana/definition/right.json new file mode 100644 index 0000000000000..58d081c3782bf --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/right.json @@ -0,0 +1,44 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "right", + "description" : "Return the substring that extracts length chars from the string starting from the right.", + "signatures" : [ + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "length", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "length", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/round.json b/docs/reference/esql/functions/kibana/definition/round.json new file mode 100644 index 0000000000000..e12672d8ee6e2 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/round.json @@ -0,0 +1,26 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "round", + "description" : "Rounds a number to the closest number with the specified number of digits.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "The numeric value to round" + }, + { + "name" : "decimals", + "type" : "integer", + "optional" : true, + "description" : "The number of decimal places to round to. Defaults to 0." + } + ], + "variadic" : false, + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/rtrim.json b/docs/reference/esql/functions/kibana/definition/rtrim.json new file mode 100644 index 0000000000000..586d53a3f84da --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/rtrim.json @@ -0,0 +1,32 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "rtrim", + "description" : "Removes trailing whitespaces from a string.", + "signatures" : [ + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "text" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/signum.json b/docs/reference/esql/functions/kibana/definition/signum.json new file mode 100644 index 0000000000000..b8343283f457e --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/signum.json @@ -0,0 +1,59 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "signum", + "description" : "Returns the sign of the given number.\nIt returns `-1` for negative numbers, `0` for `0` and `1` for positive numbers.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW d = 100.0\n| EVAL s = SIGNUM(d)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/sin.json b/docs/reference/esql/functions/kibana/definition/sin.json new file mode 100644 index 0000000000000..8d092bd0c15a3 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/sin.json @@ -0,0 +1,59 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "sin", + "description" : "Returns ths Sine trigonometric function of an angle.", + "signatures" : [ + { + "params" : [ + { + "name" : "angle", + "type" : "double", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "integer", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "unsigned_long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW a=1.8 \n| EVAL sin=SIN(a)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/sinh.json b/docs/reference/esql/functions/kibana/definition/sinh.json new file mode 100644 index 0000000000000..2261b18134f6c --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/sinh.json @@ -0,0 +1,59 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "sinh", + "description" : "Returns the hyperbolic sine of an angle.", + "signatures" : [ + { + "params" : [ + { + "name" : "angle", + "type" : "double", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "integer", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "unsigned_long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW a=1.8 \n| EVAL sinh=SINH(a)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/split.json b/docs/reference/esql/functions/kibana/definition/split.json new file mode 100644 index 0000000000000..b64def1b813fc --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/split.json @@ -0,0 +1,44 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "split", + "description" : "Split a single valued string into multiple strings.", + "signatures" : [ + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "delim", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "delim", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/sqrt.json b/docs/reference/esql/functions/kibana/definition/sqrt.json new file mode 100644 index 0000000000000..6036fcfd113f3 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/sqrt.json @@ -0,0 +1,56 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "sqrt", + "description" : "Returns the square root of a number.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/st_contains.json b/docs/reference/esql/functions/kibana/definition/st_contains.json new file mode 100644 index 0000000000000..f4f8003917908 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/st_contains.json @@ -0,0 +1,155 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "st_contains", + "description" : "Returns whether the first geometry contains the second geometry.", + "signatures" : [ + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + } + ], + "examples" : [ + "FROM airport_city_boundaries\n| WHERE ST_CONTAINS(city_boundary, TO_GEOSHAPE(\"POLYGON((109.35 18.3, 109.45 18.3, 109.45 18.4, 109.35 18.4, 109.35 18.3))\"))\n| KEEP abbrev, airport, region, city, city_location" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/st_disjoint.json b/docs/reference/esql/functions/kibana/definition/st_disjoint.json new file mode 100644 index 0000000000000..98647b63ff18f --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/st_disjoint.json @@ -0,0 +1,155 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "st_disjoint", + "description" : "Returns whether the two geometries or geometry columns are disjoint.", + "signatures" : [ + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + } + ], + "examples" : [ + "FROM airport_city_boundaries\n| WHERE ST_DISJOINT(city_boundary, TO_GEOSHAPE(\"POLYGON((-10 -60, 120 -60, 120 60, -10 60, -10 -60))\"))\n| KEEP abbrev, airport, region, city, city_location" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/st_intersects.json b/docs/reference/esql/functions/kibana/definition/st_intersects.json new file mode 100644 index 0000000000000..ba619fe57ecf5 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/st_intersects.json @@ -0,0 +1,155 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "st_intersects", + "description" : "Returns whether the two geometries or geometry columns intersect.", + "signatures" : [ + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + } + ], + "examples" : [ + "FROM airports\n| WHERE ST_INTERSECTS(location, TO_GEOSHAPE(\"POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))\"))" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/st_within.json b/docs/reference/esql/functions/kibana/definition/st_within.json new file mode 100644 index 0000000000000..ee98337441ab7 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/st_within.json @@ -0,0 +1,155 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "st_within", + "description" : "Returns whether the first geometry is within the second geometry.", + "signatures" : [ + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + } + ], + "examples" : [ + "FROM airport_city_boundaries\n| WHERE ST_WITHIN(city_boundary, TO_GEOSHAPE(\"POLYGON((109.1 18.15, 109.6 18.15, 109.6 18.65, 109.1 18.65, 109.1 18.15))\"))\n| KEEP abbrev, airport, region, city, city_location" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/st_x.json b/docs/reference/esql/functions/kibana/definition/st_x.json new file mode 100644 index 0000000000000..57598b3470e11 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/st_x.json @@ -0,0 +1,32 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "st_x", + "description" : "Extracts the x-coordinate from a point geometry.", + "signatures" : [ + { + "params" : [ + { + "name" : "point", + "type" : "cartesian_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "point", + "type" : "geo_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/st_y.json b/docs/reference/esql/functions/kibana/definition/st_y.json new file mode 100644 index 0000000000000..0dacaa56bb8de --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/st_y.json @@ -0,0 +1,32 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "st_y", + "description" : "Extracts the y-coordinate from a point geometry.", + "signatures" : [ + { + "params" : [ + { + "name" : "point", + "type" : "cartesian_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "point", + "type" : "geo_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/starts_with.json b/docs/reference/esql/functions/kibana/definition/starts_with.json new file mode 100644 index 0000000000000..918940d110651 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/starts_with.json @@ -0,0 +1,44 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "starts_with", + "description" : "Returns a boolean that indicates whether a keyword string starts with another string", + "signatures" : [ + { + "params" : [ + { + "name" : "str", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "prefix", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "str", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "prefix", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/substring.json b/docs/reference/esql/functions/kibana/definition/substring.json new file mode 100644 index 0000000000000..89c62258f4516 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/substring.json @@ -0,0 +1,56 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "substring", + "description" : "Returns a substring of a string, specified by a start position and an optional length", + "signatures" : [ + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "length", + "type" : "integer", + "optional" : true, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "length", + "type" : "integer", + "optional" : true, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/tan.json b/docs/reference/esql/functions/kibana/definition/tan.json new file mode 100644 index 0000000000000..7498964dc1a2c --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/tan.json @@ -0,0 +1,59 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "tan", + "description" : "Returns the Tangent trigonometric function of an angle.", + "signatures" : [ + { + "params" : [ + { + "name" : "angle", + "type" : "double", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "integer", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "unsigned_long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW a=1.8 \n| EVAL tan=TAN(a)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/tanh.json b/docs/reference/esql/functions/kibana/definition/tanh.json new file mode 100644 index 0000000000000..507f62d394be3 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/tanh.json @@ -0,0 +1,59 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "tanh", + "description" : "Returns the Tangent hyperbolic function of an angle.", + "signatures" : [ + { + "params" : [ + { + "name" : "angle", + "type" : "double", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "integer", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "unsigned_long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW a=1.8 \n| EVAL tanh=TANH(a)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/tau.json b/docs/reference/esql/functions/kibana/definition/tau.json new file mode 100644 index 0000000000000..6ad20f86be4de --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/tau.json @@ -0,0 +1,12 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "tau", + "description" : "The ratio of a circle’s circumference to its radius.", + "signatures" : [ + { + "params" : [ ], + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_boolean.json b/docs/reference/esql/functions/kibana/definition/to_boolean.json new file mode 100644 index 0000000000000..314df3f7a4ca9 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_boolean.json @@ -0,0 +1,92 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_boolean", + "description" : "Converts an input value to a boolean value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_cartesianpoint.json b/docs/reference/esql/functions/kibana/definition/to_cartesianpoint.json new file mode 100644 index 0000000000000..59b0c0b38f850 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_cartesianpoint.json @@ -0,0 +1,44 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_cartesianpoint", + "description" : "Converts an input value to a point value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_point" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_point" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_point" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_cartesianshape.json b/docs/reference/esql/functions/kibana/definition/to_cartesianshape.json new file mode 100644 index 0000000000000..75c1f05bd7738 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_cartesianshape.json @@ -0,0 +1,56 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_cartesianshape", + "description" : "Converts an input value to a shape value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_shape" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_datetime.json b/docs/reference/esql/functions/kibana/definition/to_datetime.json new file mode 100644 index 0000000000000..e2b10e54f4a29 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_datetime.json @@ -0,0 +1,92 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_datetime", + "description" : "Converts an input value to a date value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_degrees.json b/docs/reference/esql/functions/kibana/definition/to_degrees.json new file mode 100644 index 0000000000000..7652254fcebe1 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_degrees.json @@ -0,0 +1,56 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_degrees", + "description" : "Converts a number in radians to degrees.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_double.json b/docs/reference/esql/functions/kibana/definition/to_double.json new file mode 100644 index 0000000000000..7fad85d7be129 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_double.json @@ -0,0 +1,104 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_double", + "description" : "Converts an input value to a double value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_geopoint.json b/docs/reference/esql/functions/kibana/definition/to_geopoint.json new file mode 100644 index 0000000000000..b8a7ca9b9a19f --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_geopoint.json @@ -0,0 +1,44 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_geopoint", + "description" : "Converts an input value to a geo_point value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "geo_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_point" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_point" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_point" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_geoshape.json b/docs/reference/esql/functions/kibana/definition/to_geoshape.json new file mode 100644 index 0000000000000..d3dee5812510c --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_geoshape.json @@ -0,0 +1,56 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_geoshape", + "description" : "Converts an input value to a geo_shape value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "geo_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_shape" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_integer.json b/docs/reference/esql/functions/kibana/definition/to_integer.json new file mode 100644 index 0000000000000..3e8a7897bda7b --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_integer.json @@ -0,0 +1,104 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_integer", + "description" : "Converts an input value to an integer value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_ip.json b/docs/reference/esql/functions/kibana/definition/to_ip.json new file mode 100644 index 0000000000000..f99ef65752559 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_ip.json @@ -0,0 +1,44 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_ip", + "description" : "Converts an input string to an IP value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "ip" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_long.json b/docs/reference/esql/functions/kibana/definition/to_long.json new file mode 100644 index 0000000000000..56fd5dc83e721 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_long.json @@ -0,0 +1,104 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_long", + "description" : "Converts an input value to a long value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_lower.json b/docs/reference/esql/functions/kibana/definition/to_lower.json new file mode 100644 index 0000000000000..4b3121da437ed --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_lower.json @@ -0,0 +1,32 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_lower", + "description" : "Returns a new string representing the input string converted to lower case.", + "signatures" : [ + { + "params" : [ + { + "name" : "str", + "type" : "keyword", + "optional" : false, + "description" : "The input string" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "str", + "type" : "text", + "optional" : false, + "description" : "The input string" + } + ], + "variadic" : false, + "returnType" : "text" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_radians.json b/docs/reference/esql/functions/kibana/definition/to_radians.json new file mode 100644 index 0000000000000..8b8fc287318ab --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_radians.json @@ -0,0 +1,56 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_radians", + "description" : "Converts a number in degrees to radians.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_string.json b/docs/reference/esql/functions/kibana/definition/to_string.json new file mode 100644 index 0000000000000..bb77c68bf59e4 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_string.json @@ -0,0 +1,176 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_string", + "description" : "Converts a field into a string.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_unsigned_long.json b/docs/reference/esql/functions/kibana/definition/to_unsigned_long.json new file mode 100644 index 0000000000000..923294c19ffba --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_unsigned_long.json @@ -0,0 +1,104 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_unsigned_long", + "description" : "Converts an input value to an unsigned long value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_upper.json b/docs/reference/esql/functions/kibana/definition/to_upper.json new file mode 100644 index 0000000000000..d5ecb1f47206f --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_upper.json @@ -0,0 +1,32 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_upper", + "description" : "Returns a new string representing the input string converted to upper case.", + "signatures" : [ + { + "params" : [ + { + "name" : "str", + "type" : "keyword", + "optional" : false, + "description" : "The input string" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "str", + "type" : "text", + "optional" : false, + "description" : "The input string" + } + ], + "variadic" : false, + "returnType" : "text" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_version.json b/docs/reference/esql/functions/kibana/definition/to_version.json new file mode 100644 index 0000000000000..6076f8dfd70c0 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_version.json @@ -0,0 +1,44 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_version", + "description" : "Converts an input string to a version value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "version" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "version" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "version" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/trim.json b/docs/reference/esql/functions/kibana/definition/trim.json new file mode 100644 index 0000000000000..8e194df0eb84d --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/trim.json @@ -0,0 +1,32 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "trim", + "description" : "Removes leading and trailing whitespaces from a string.", + "signatures" : [ + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "text" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/docs/abs.md b/docs/reference/esql/functions/kibana/docs/abs.md new file mode 100644 index 0000000000000..9dc2c5c76f4f6 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/abs.md @@ -0,0 +1,11 @@ + + +### ABS +Returns the absolute value. + +``` +ROW number = -1.0 +| EVAL abs_number = ABS(number) +``` diff --git a/docs/reference/esql/functions/kibana/docs/acos.md b/docs/reference/esql/functions/kibana/docs/acos.md new file mode 100644 index 0000000000000..19ae2522d48b4 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/acos.md @@ -0,0 +1,11 @@ + + +### ACOS +Returns the {wikipedia}/Inverse_trigonometric_functions[arccosine] of `n` as an angle, expressed in radians. + +``` +ROW a=.9 +| EVAL acos=ACOS(a) +``` diff --git a/docs/reference/esql/functions/kibana/docs/asin.md b/docs/reference/esql/functions/kibana/docs/asin.md new file mode 100644 index 0000000000000..c072ac19b5b92 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/asin.md @@ -0,0 +1,12 @@ + + +### ASIN +Returns the {wikipedia}/Inverse_trigonometric_functions[arcsine] of the input +numeric expression as an angle, expressed in radians. + +``` +ROW a=.9 +| EVAL asin=ASIN(a) +``` diff --git a/docs/reference/esql/functions/kibana/docs/atan.md b/docs/reference/esql/functions/kibana/docs/atan.md new file mode 100644 index 0000000000000..62686f2fbab2c --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/atan.md @@ -0,0 +1,12 @@ + + +### ATAN +Returns the {wikipedia}/Inverse_trigonometric_functions[arctangent] of the input +numeric expression as an angle, expressed in radians. + +``` +ROW a=12.9 +| EVAL atan=ATAN(a) +``` diff --git a/docs/reference/esql/functions/kibana/docs/atan2.md b/docs/reference/esql/functions/kibana/docs/atan2.md new file mode 100644 index 0000000000000..0000c532236d9 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/atan2.md @@ -0,0 +1,12 @@ + + +### ATAN2 +The {wikipedia}/Atan2[angle] between the positive x-axis and the ray from the +origin to the point (x , y) in the Cartesian plane, expressed in radians. + +``` +ROW y=12.9, x=.6 +| EVAL atan2=ATAN2(y, x) +``` diff --git a/docs/reference/esql/functions/kibana/docs/auto_bucket.md b/docs/reference/esql/functions/kibana/docs/auto_bucket.md new file mode 100644 index 0000000000000..df3999f968486 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/auto_bucket.md @@ -0,0 +1,8 @@ + + +### AUTO_BUCKET +Creates human-friendly buckets and returns a datetime value +for each row that corresponds to the resulting bucket the row falls into. + diff --git a/docs/reference/esql/functions/kibana/docs/case.md b/docs/reference/esql/functions/kibana/docs/case.md new file mode 100644 index 0000000000000..e1494a5c2af8c --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/case.md @@ -0,0 +1,8 @@ + + +### CASE +Accepts pairs of conditions and values. +The function returns the value that belongs to the first condition that evaluates to true. + diff --git a/docs/reference/esql/functions/kibana/docs/ceil.md b/docs/reference/esql/functions/kibana/docs/ceil.md new file mode 100644 index 0000000000000..812b139206c35 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/ceil.md @@ -0,0 +1,12 @@ + + +### CEIL +Round a number up to the nearest integer. + +``` +ROW a=1.8 +| EVAL a=CEIL(a) +``` +Note: This is a noop for `long` (including unsigned) and `integer`. For `double` this picks the closest `double` value to the integer similar to {javadoc}/java.base/java/lang/Math.html#ceil(double)[Math.ceil]. diff --git a/docs/reference/esql/functions/kibana/docs/coalesce.md b/docs/reference/esql/functions/kibana/docs/coalesce.md new file mode 100644 index 0000000000000..89cca3f3a286a --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/coalesce.md @@ -0,0 +1,11 @@ + + +### COALESCE +Returns the first of its arguments that is not null. If all arguments are null, it returns `null`. + +``` +ROW a=null, b="b" +| EVAL COALESCE(a, b) +``` diff --git a/docs/reference/esql/functions/kibana/docs/concat.md b/docs/reference/esql/functions/kibana/docs/concat.md new file mode 100644 index 0000000000000..9c30d978370dc --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/concat.md @@ -0,0 +1,7 @@ + + +### CONCAT +Concatenates two or more strings. + diff --git a/docs/reference/esql/functions/kibana/docs/cos.md b/docs/reference/esql/functions/kibana/docs/cos.md new file mode 100644 index 0000000000000..9e8abebaddb89 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/cos.md @@ -0,0 +1,11 @@ + + +### COS +Returns the {wikipedia}/Sine_and_cosine[cosine] of an angle. + +``` +ROW a=1.8 +| EVAL cos=COS(a) +``` diff --git a/docs/reference/esql/functions/kibana/docs/cosh.md b/docs/reference/esql/functions/kibana/docs/cosh.md new file mode 100644 index 0000000000000..b8fae70ae2eed --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/cosh.md @@ -0,0 +1,11 @@ + + +### COSH +Returns the {wikipedia}/Hyperbolic_functions[hyperbolic cosine] of an angle. + +``` +ROW a=1.8 +| EVAL cosh=COSH(a) +``` diff --git a/docs/reference/esql/functions/kibana/docs/date_diff.md b/docs/reference/esql/functions/kibana/docs/date_diff.md new file mode 100644 index 0000000000000..8d33e21d2f92c --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/date_diff.md @@ -0,0 +1,7 @@ + + +### DATE_DIFF +Subtract 2 dates and return their difference in multiples of a unit specified in the 1st argument + diff --git a/docs/reference/esql/functions/kibana/docs/date_extract.md b/docs/reference/esql/functions/kibana/docs/date_extract.md new file mode 100644 index 0000000000000..49eb2391c188e --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/date_extract.md @@ -0,0 +1,7 @@ + + +### DATE_EXTRACT +Extracts parts of a date, like year, month, day, hour. + diff --git a/docs/reference/esql/functions/kibana/docs/date_format.md b/docs/reference/esql/functions/kibana/docs/date_format.md new file mode 100644 index 0000000000000..fbf7fcbf0cb48 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/date_format.md @@ -0,0 +1,7 @@ + + +### DATE_FORMAT +Returns a string representation of a date, in the provided format. + diff --git a/docs/reference/esql/functions/kibana/docs/date_parse.md b/docs/reference/esql/functions/kibana/docs/date_parse.md new file mode 100644 index 0000000000000..8cf0769c38f3b --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/date_parse.md @@ -0,0 +1,7 @@ + + +### DATE_PARSE +Parses a string into a date value + diff --git a/docs/reference/esql/functions/kibana/docs/date_trunc.md b/docs/reference/esql/functions/kibana/docs/date_trunc.md new file mode 100644 index 0000000000000..6aa81ebbac3c3 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/date_trunc.md @@ -0,0 +1,12 @@ + + +### DATE_TRUNC +Rounds down a date to the closest interval. + +``` +FROM employees +| KEEP first_name, last_name, hire_date +| EVAL year_hired = DATE_TRUNC(1 year, hire_date) +``` diff --git a/docs/reference/esql/functions/kibana/docs/e.md b/docs/reference/esql/functions/kibana/docs/e.md new file mode 100644 index 0000000000000..da85eadf2e74e --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/e.md @@ -0,0 +1,7 @@ + + +### E +Euler’s number. + diff --git a/docs/reference/esql/functions/kibana/docs/ends_with.md b/docs/reference/esql/functions/kibana/docs/ends_with.md new file mode 100644 index 0000000000000..74f02c732edef --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/ends_with.md @@ -0,0 +1,7 @@ + + +### ENDS_WITH +Returns a boolean that indicates whether a keyword string ends with another string + diff --git a/docs/reference/esql/functions/kibana/docs/floor.md b/docs/reference/esql/functions/kibana/docs/floor.md new file mode 100644 index 0000000000000..a0a095525e08d --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/floor.md @@ -0,0 +1,7 @@ + + +### FLOOR +Round a number down to the nearest integer. + diff --git a/docs/reference/esql/functions/kibana/docs/greatest.md b/docs/reference/esql/functions/kibana/docs/greatest.md new file mode 100644 index 0000000000000..3db0c9ed87aa5 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/greatest.md @@ -0,0 +1,7 @@ + + +### GREATEST +Returns the maximum value from many columns. + diff --git a/docs/reference/esql/functions/kibana/docs/least.md b/docs/reference/esql/functions/kibana/docs/least.md new file mode 100644 index 0000000000000..ff2c19592c8e1 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/least.md @@ -0,0 +1,7 @@ + + +### LEAST +Returns the minimum value from many columns. + diff --git a/docs/reference/esql/functions/kibana/docs/left.md b/docs/reference/esql/functions/kibana/docs/left.md new file mode 100644 index 0000000000000..73b79f7976512 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/left.md @@ -0,0 +1,14 @@ + + +### LEFT +Returns the substring that extracts 'length' chars from 'string' starting from the left. + +``` +FROM employees +| KEEP last_name +| EVAL left = LEFT(last_name, 3) +| SORT last_name ASC +| LIMIT 5 +``` diff --git a/docs/reference/esql/functions/kibana/docs/length.md b/docs/reference/esql/functions/kibana/docs/length.md new file mode 100644 index 0000000000000..bb1cefd390c71 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/length.md @@ -0,0 +1,7 @@ + + +### LENGTH +Returns the character length of a string. + diff --git a/docs/reference/esql/functions/kibana/docs/locate.md b/docs/reference/esql/functions/kibana/docs/locate.md new file mode 100644 index 0000000000000..0b4d4c625c17e --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/locate.md @@ -0,0 +1,7 @@ + + +### LOCATE +Returns an integer that indicates the position of a keyword substring within another string + diff --git a/docs/reference/esql/functions/kibana/docs/log.md b/docs/reference/esql/functions/kibana/docs/log.md new file mode 100644 index 0000000000000..7ac136d31f720 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/log.md @@ -0,0 +1,13 @@ + + +### LOG +Returns the logarithm of a value to a base. The input can be any numeric value, the return value is always a double. + +Logs of zero, negative numbers, and base of one return `null` as well as a warning. + +``` +ROW base = 2.0, value = 8.0 +| EVAL s = LOG(base, value) +``` diff --git a/docs/reference/esql/functions/kibana/docs/log10.md b/docs/reference/esql/functions/kibana/docs/log10.md new file mode 100644 index 0000000000000..23ec30643e51e --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/log10.md @@ -0,0 +1,13 @@ + + +### LOG10 +Returns the logarithm of a value to base 10. The input can be any numeric value, the return value is always a double. + +Logs of 0 and negative numbers return `null` as well as a warning. + +``` +ROW d = 1000.0 +| EVAL s = LOG10(d) +``` diff --git a/docs/reference/esql/functions/kibana/docs/ltrim.md b/docs/reference/esql/functions/kibana/docs/ltrim.md new file mode 100644 index 0000000000000..33fe7b8da1b6f --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/ltrim.md @@ -0,0 +1,7 @@ + + +### LTRIM +Removes leading whitespaces from a string. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_avg.md b/docs/reference/esql/functions/kibana/docs/mv_avg.md new file mode 100644 index 0000000000000..73636e07fa6e4 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_avg.md @@ -0,0 +1,7 @@ + + +### MV_AVG +Converts a multivalued field into a single valued field containing the average of all of the values. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_concat.md b/docs/reference/esql/functions/kibana/docs/mv_concat.md new file mode 100644 index 0000000000000..f8092e47aaed0 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_concat.md @@ -0,0 +1,7 @@ + + +### MV_CONCAT +Reduce a multivalued string field to a single valued field by concatenating all values. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_count.md b/docs/reference/esql/functions/kibana/docs/mv_count.md new file mode 100644 index 0000000000000..ceea555d0d05c --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_count.md @@ -0,0 +1,7 @@ + + +### MV_COUNT +Reduce a multivalued field to a single valued field containing the count of values. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_dedupe.md b/docs/reference/esql/functions/kibana/docs/mv_dedupe.md new file mode 100644 index 0000000000000..6968c4dd9b3a9 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_dedupe.md @@ -0,0 +1,7 @@ + + +### MV_DEDUPE +Remove duplicate values from a multivalued field. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_first.md b/docs/reference/esql/functions/kibana/docs/mv_first.md new file mode 100644 index 0000000000000..6ed8bb7570a93 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_first.md @@ -0,0 +1,7 @@ + + +### MV_FIRST +Reduce a multivalued field to a single valued field containing the first value. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_last.md b/docs/reference/esql/functions/kibana/docs/mv_last.md new file mode 100644 index 0000000000000..5b68b84b4393f --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_last.md @@ -0,0 +1,7 @@ + + +### MV_LAST +Reduce a multivalued field to a single valued field containing the last value. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_max.md b/docs/reference/esql/functions/kibana/docs/mv_max.md new file mode 100644 index 0000000000000..acb29f7a592f6 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_max.md @@ -0,0 +1,7 @@ + + +### MV_MAX +Reduce a multivalued field to a single valued field containing the maximum value. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_median.md b/docs/reference/esql/functions/kibana/docs/mv_median.md new file mode 100644 index 0000000000000..81de2c3b2c689 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_median.md @@ -0,0 +1,7 @@ + + +### MV_MEDIAN +Converts a multivalued field into a single valued field containing the median value. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_min.md b/docs/reference/esql/functions/kibana/docs/mv_min.md new file mode 100644 index 0000000000000..637211487a972 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_min.md @@ -0,0 +1,7 @@ + + +### MV_MIN +Reduce a multivalued field to a single valued field containing the minimum value. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_slice.md b/docs/reference/esql/functions/kibana/docs/mv_slice.md new file mode 100644 index 0000000000000..7bbf36f67079d --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_slice.md @@ -0,0 +1,7 @@ + + +### MV_SLICE +Returns a subset of the multivalued field using the start and end index values. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_sort.md b/docs/reference/esql/functions/kibana/docs/mv_sort.md new file mode 100644 index 0000000000000..65a74d0455f4b --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_sort.md @@ -0,0 +1,7 @@ + + +### MV_SORT +Sorts a multivalued field in lexicographical order. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_sum.md b/docs/reference/esql/functions/kibana/docs/mv_sum.md new file mode 100644 index 0000000000000..a2b1bfb8ac481 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_sum.md @@ -0,0 +1,7 @@ + + +### MV_SUM +Converts a multivalued field into a single valued field containing the sum of all of the values. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_zip.md b/docs/reference/esql/functions/kibana/docs/mv_zip.md new file mode 100644 index 0000000000000..b6de218ecb45b --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_zip.md @@ -0,0 +1,7 @@ + + +### MV_ZIP +Combines the values from two multivalued fields with a delimiter that joins them together. + diff --git a/docs/reference/esql/functions/kibana/docs/pi.md b/docs/reference/esql/functions/kibana/docs/pi.md new file mode 100644 index 0000000000000..f796ace56607d --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/pi.md @@ -0,0 +1,7 @@ + + +### PI +The ratio of a circle’s circumference to its diameter. + diff --git a/docs/reference/esql/functions/kibana/docs/pow.md b/docs/reference/esql/functions/kibana/docs/pow.md new file mode 100644 index 0000000000000..6cb9139dd91cc --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/pow.md @@ -0,0 +1,7 @@ + + +### POW +Returns the value of a base raised to the power of an exponent. + diff --git a/docs/reference/esql/functions/kibana/docs/replace.md b/docs/reference/esql/functions/kibana/docs/replace.md new file mode 100644 index 0000000000000..9744a9ad7244b --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/replace.md @@ -0,0 +1,7 @@ + + +### REPLACE +The function substitutes in the string any match of the regular expression with the replacement string. + diff --git a/docs/reference/esql/functions/kibana/docs/right.md b/docs/reference/esql/functions/kibana/docs/right.md new file mode 100644 index 0000000000000..6e211ae079f62 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/right.md @@ -0,0 +1,7 @@ + + +### RIGHT +Return the substring that extracts length chars from the string starting from the right. + diff --git a/docs/reference/esql/functions/kibana/docs/round.md b/docs/reference/esql/functions/kibana/docs/round.md new file mode 100644 index 0000000000000..2f8fd0864badf --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/round.md @@ -0,0 +1,7 @@ + + +### ROUND +Rounds a number to the closest number with the specified number of digits. + diff --git a/docs/reference/esql/functions/kibana/docs/rtrim.md b/docs/reference/esql/functions/kibana/docs/rtrim.md new file mode 100644 index 0000000000000..fc5636e40e804 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/rtrim.md @@ -0,0 +1,7 @@ + + +### RTRIM +Removes trailing whitespaces from a string. + diff --git a/docs/reference/esql/functions/kibana/docs/signum.md b/docs/reference/esql/functions/kibana/docs/signum.md new file mode 100644 index 0000000000000..f2e66b84c69c8 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/signum.md @@ -0,0 +1,12 @@ + + +### SIGNUM +Returns the sign of the given number. +It returns `-1` for negative numbers, `0` for `0` and `1` for positive numbers. + +``` +ROW d = 100.0 +| EVAL s = SIGNUM(d) +``` diff --git a/docs/reference/esql/functions/kibana/docs/sin.md b/docs/reference/esql/functions/kibana/docs/sin.md new file mode 100644 index 0000000000000..a87b4e4f452af --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/sin.md @@ -0,0 +1,11 @@ + + +### SIN +Returns ths {wikipedia}/Sine_and_cosine[Sine] trigonometric function of an angle. + +``` +ROW a=1.8 +| EVAL sin=SIN(a) +``` diff --git a/docs/reference/esql/functions/kibana/docs/sinh.md b/docs/reference/esql/functions/kibana/docs/sinh.md new file mode 100644 index 0000000000000..81e8d9fd473d5 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/sinh.md @@ -0,0 +1,11 @@ + + +### SINH +Returns the {wikipedia}/Hyperbolic_functions[hyperbolic sine] of an angle. + +``` +ROW a=1.8 +| EVAL sinh=SINH(a) +``` diff --git a/docs/reference/esql/functions/kibana/docs/split.md b/docs/reference/esql/functions/kibana/docs/split.md new file mode 100644 index 0000000000000..d06d8857967f4 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/split.md @@ -0,0 +1,7 @@ + + +### SPLIT +Split a single valued string into multiple strings. + diff --git a/docs/reference/esql/functions/kibana/docs/sqrt.md b/docs/reference/esql/functions/kibana/docs/sqrt.md new file mode 100644 index 0000000000000..6e52bfed4037b --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/sqrt.md @@ -0,0 +1,7 @@ + + +### SQRT +Returns the square root of a number. + diff --git a/docs/reference/esql/functions/kibana/docs/st_contains.md b/docs/reference/esql/functions/kibana/docs/st_contains.md new file mode 100644 index 0000000000000..6e23bb9b0f116 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/st_contains.md @@ -0,0 +1,12 @@ + + +### ST_CONTAINS +Returns whether the first geometry contains the second geometry. + +``` +FROM airport_city_boundaries +| WHERE ST_CONTAINS(city_boundary, TO_GEOSHAPE("POLYGON((109.35 18.3, 109.45 18.3, 109.45 18.4, 109.35 18.4, 109.35 18.3))")) +| KEEP abbrev, airport, region, city, city_location +``` diff --git a/docs/reference/esql/functions/kibana/docs/st_disjoint.md b/docs/reference/esql/functions/kibana/docs/st_disjoint.md new file mode 100644 index 0000000000000..7cf66b168bd70 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/st_disjoint.md @@ -0,0 +1,12 @@ + + +### ST_DISJOINT +Returns whether the two geometries or geometry columns are disjoint. + +``` +FROM airport_city_boundaries +| WHERE ST_DISJOINT(city_boundary, TO_GEOSHAPE("POLYGON((-10 -60, 120 -60, 120 60, -10 60, -10 -60))")) +| KEEP abbrev, airport, region, city, city_location +``` diff --git a/docs/reference/esql/functions/kibana/docs/st_intersects.md b/docs/reference/esql/functions/kibana/docs/st_intersects.md new file mode 100644 index 0000000000000..e4db33429dbe3 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/st_intersects.md @@ -0,0 +1,11 @@ + + +### ST_INTERSECTS +Returns whether the two geometries or geometry columns intersect. + +``` +FROM airports +| WHERE ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) +``` diff --git a/docs/reference/esql/functions/kibana/docs/st_within.md b/docs/reference/esql/functions/kibana/docs/st_within.md new file mode 100644 index 0000000000000..cbb3ae5ee9aca --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/st_within.md @@ -0,0 +1,12 @@ + + +### ST_WITHIN +Returns whether the first geometry is within the second geometry. + +``` +FROM airport_city_boundaries +| WHERE ST_WITHIN(city_boundary, TO_GEOSHAPE("POLYGON((109.1 18.15, 109.6 18.15, 109.6 18.65, 109.1 18.65, 109.1 18.15))")) +| KEEP abbrev, airport, region, city, city_location +``` diff --git a/docs/reference/esql/functions/kibana/docs/st_x.md b/docs/reference/esql/functions/kibana/docs/st_x.md new file mode 100644 index 0000000000000..af2f4de1487cd --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/st_x.md @@ -0,0 +1,7 @@ + + +### ST_X +Extracts the x-coordinate from a point geometry. + diff --git a/docs/reference/esql/functions/kibana/docs/st_y.md b/docs/reference/esql/functions/kibana/docs/st_y.md new file mode 100644 index 0000000000000..575a5bd3c7d33 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/st_y.md @@ -0,0 +1,7 @@ + + +### ST_Y +Extracts the y-coordinate from a point geometry. + diff --git a/docs/reference/esql/functions/kibana/docs/starts_with.md b/docs/reference/esql/functions/kibana/docs/starts_with.md new file mode 100644 index 0000000000000..5af544c855051 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/starts_with.md @@ -0,0 +1,7 @@ + + +### STARTS_WITH +Returns a boolean that indicates whether a keyword string starts with another string + diff --git a/docs/reference/esql/functions/kibana/docs/substring.md b/docs/reference/esql/functions/kibana/docs/substring.md new file mode 100644 index 0000000000000..d1d9c696f7813 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/substring.md @@ -0,0 +1,7 @@ + + +### SUBSTRING +Returns a substring of a string, specified by a start position and an optional length + diff --git a/docs/reference/esql/functions/kibana/docs/tan.md b/docs/reference/esql/functions/kibana/docs/tan.md new file mode 100644 index 0000000000000..edfb4210f7dd2 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/tan.md @@ -0,0 +1,11 @@ + + +### TAN +Returns the {wikipedia}/Sine_and_cosine[Tangent] trigonometric function of an angle. + +``` +ROW a=1.8 +| EVAL tan=TAN(a) +``` diff --git a/docs/reference/esql/functions/kibana/docs/tanh.md b/docs/reference/esql/functions/kibana/docs/tanh.md new file mode 100644 index 0000000000000..d3d8c7d4e9196 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/tanh.md @@ -0,0 +1,11 @@ + + +### TANH +Returns the {wikipedia}/Hyperbolic_functions[Tangent] hyperbolic function of an angle. + +``` +ROW a=1.8 +| EVAL tanh=TANH(a) +``` diff --git a/docs/reference/esql/functions/kibana/docs/tau.md b/docs/reference/esql/functions/kibana/docs/tau.md new file mode 100644 index 0000000000000..9a530e61dd342 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/tau.md @@ -0,0 +1,7 @@ + + +### TAU +The ratio of a circle’s circumference to its radius. + diff --git a/docs/reference/esql/functions/kibana/docs/to_boolean.md b/docs/reference/esql/functions/kibana/docs/to_boolean.md new file mode 100644 index 0000000000000..9c1bd747d168f --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_boolean.md @@ -0,0 +1,7 @@ + + +### TO_BOOLEAN +Converts an input value to a boolean value. + diff --git a/docs/reference/esql/functions/kibana/docs/to_cartesianpoint.md b/docs/reference/esql/functions/kibana/docs/to_cartesianpoint.md new file mode 100644 index 0000000000000..dbaa76d1d23e0 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_cartesianpoint.md @@ -0,0 +1,7 @@ + + +### TO_CARTESIANPOINT +Converts an input value to a point value. + diff --git a/docs/reference/esql/functions/kibana/docs/to_cartesianshape.md b/docs/reference/esql/functions/kibana/docs/to_cartesianshape.md new file mode 100644 index 0000000000000..e3fd29e8f9907 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_cartesianshape.md @@ -0,0 +1,7 @@ + + +### TO_CARTESIANSHAPE +Converts an input value to a shape value. + diff --git a/docs/reference/esql/functions/kibana/docs/to_datetime.md b/docs/reference/esql/functions/kibana/docs/to_datetime.md new file mode 100644 index 0000000000000..8326866c7166d --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_datetime.md @@ -0,0 +1,7 @@ + + +### TO_DATETIME +Converts an input value to a date value. + diff --git a/docs/reference/esql/functions/kibana/docs/to_degrees.md b/docs/reference/esql/functions/kibana/docs/to_degrees.md new file mode 100644 index 0000000000000..dc5e36a592b2c --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_degrees.md @@ -0,0 +1,7 @@ + + +### TO_DEGREES +Converts a number in radians to degrees. + diff --git a/docs/reference/esql/functions/kibana/docs/to_double.md b/docs/reference/esql/functions/kibana/docs/to_double.md new file mode 100644 index 0000000000000..4f531e1c8fdde --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_double.md @@ -0,0 +1,7 @@ + + +### TO_DOUBLE +Converts an input value to a double value. + diff --git a/docs/reference/esql/functions/kibana/docs/to_geopoint.md b/docs/reference/esql/functions/kibana/docs/to_geopoint.md new file mode 100644 index 0000000000000..7f9b8ca59bc8f --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_geopoint.md @@ -0,0 +1,7 @@ + + +### TO_GEOPOINT +Converts an input value to a geo_point value. + diff --git a/docs/reference/esql/functions/kibana/docs/to_geoshape.md b/docs/reference/esql/functions/kibana/docs/to_geoshape.md new file mode 100644 index 0000000000000..cdfbdc5b6ffd9 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_geoshape.md @@ -0,0 +1,7 @@ + + +### TO_GEOSHAPE +Converts an input value to a geo_shape value. + diff --git a/docs/reference/esql/functions/kibana/docs/to_integer.md b/docs/reference/esql/functions/kibana/docs/to_integer.md new file mode 100644 index 0000000000000..ad04ecbd1e304 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_integer.md @@ -0,0 +1,7 @@ + + +### TO_INTEGER +Converts an input value to an integer value. + diff --git a/docs/reference/esql/functions/kibana/docs/to_ip.md b/docs/reference/esql/functions/kibana/docs/to_ip.md new file mode 100644 index 0000000000000..47d06e9ab755e --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_ip.md @@ -0,0 +1,7 @@ + + +### TO_IP +Converts an input string to an IP value. + diff --git a/docs/reference/esql/functions/kibana/docs/to_long.md b/docs/reference/esql/functions/kibana/docs/to_long.md new file mode 100644 index 0000000000000..c19273376bd4b --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_long.md @@ -0,0 +1,7 @@ + + +### TO_LONG +Converts an input value to a long value. + diff --git a/docs/reference/esql/functions/kibana/docs/to_lower.md b/docs/reference/esql/functions/kibana/docs/to_lower.md new file mode 100644 index 0000000000000..f63926ba13825 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_lower.md @@ -0,0 +1,7 @@ + + +### TO_LOWER +Returns a new string representing the input string converted to lower case. + diff --git a/docs/reference/esql/functions/kibana/docs/to_radians.md b/docs/reference/esql/functions/kibana/docs/to_radians.md new file mode 100644 index 0000000000000..071d9ff05e0b6 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_radians.md @@ -0,0 +1,7 @@ + + +### TO_RADIANS +Converts a number in degrees to radians. + diff --git a/docs/reference/esql/functions/kibana/docs/to_string.md b/docs/reference/esql/functions/kibana/docs/to_string.md new file mode 100644 index 0000000000000..a066f488363aa --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_string.md @@ -0,0 +1,7 @@ + + +### TO_STRING +Converts a field into a string. + diff --git a/docs/reference/esql/functions/kibana/docs/to_unsigned_long.md b/docs/reference/esql/functions/kibana/docs/to_unsigned_long.md new file mode 100644 index 0000000000000..fbe9e22215ee8 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_unsigned_long.md @@ -0,0 +1,7 @@ + + +### TO_UNSIGNED_LONG +Converts an input value to an unsigned long value. + diff --git a/docs/reference/esql/functions/kibana/docs/to_upper.md b/docs/reference/esql/functions/kibana/docs/to_upper.md new file mode 100644 index 0000000000000..4c4f5fe02b646 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_upper.md @@ -0,0 +1,7 @@ + + +### TO_UPPER +Returns a new string representing the input string converted to upper case. + diff --git a/docs/reference/esql/functions/kibana/docs/to_version.md b/docs/reference/esql/functions/kibana/docs/to_version.md new file mode 100644 index 0000000000000..23cd9fcb152a3 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_version.md @@ -0,0 +1,7 @@ + + +### TO_VERSION +Converts an input string to a version value. + diff --git a/docs/reference/esql/functions/kibana/docs/trim.md b/docs/reference/esql/functions/kibana/docs/trim.md new file mode 100644 index 0000000000000..2911abbf5e1a9 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/trim.md @@ -0,0 +1,7 @@ + + +### TRIM +Removes leading and trailing whitespaces from a string. + diff --git a/docs/reference/esql/functions/signature/case.svg b/docs/reference/esql/functions/signature/case.svg new file mode 100644 index 0000000000000..d6fd7da38aca6 --- /dev/null +++ b/docs/reference/esql/functions/signature/case.svg @@ -0,0 +1 @@ +CASE(condition,trueValue) \ No newline at end of file diff --git a/x-pack/plugin/esql/build.gradle b/x-pack/plugin/esql/build.gradle index 87ef4dd0b3eff..86245e1c93e97 100644 --- a/x-pack/plugin/esql/build.gradle +++ b/x-pack/plugin/esql/build.gradle @@ -70,27 +70,50 @@ tasks.named("test").configure { doLast { List signatures = file("${projectDir}/build/testrun/test/temp/esql/functions/signature").list().findAll {it.endsWith("svg")} List types = file("${projectDir}/build/testrun/test/temp/esql/functions/types").list().findAll {it.endsWith("asciidoc")} - int count = signatures == null ? 0 : signatures.size() + int count = types == null ? 0 : types.size() + Closure readExample = line -> { + line.replaceAll(/read-example::([^\[]+)\[tag=([^,\]]+)(, ?json)?\]/, { + String file = it[1] + String tag = it[2] + boolean isJson = it[3] + String allExamples = new File("${projectDir}/qa/testFixtures/src/main/resources/${file}").text + int start = allExamples.indexOf("tag::${tag}[]") + int end = allExamples.indexOf("end::${tag}[]", start) + if (start < 0 || end < 0) { + throw new IllegalAccessException("can't find example ${file}::${tag}") + } + // Slice out the newlines + start = allExamples.indexOf('\n', start) + 1 + end = allExamples.lastIndexOf('\n', end) + String example = allExamples.substring(start, end) + if (isJson) { + example = example.replace("\"", "\\\"").replace("\n", "\\n") + } + return example; + }) + } if (count == 0) { logger.quiet("ESQL Docs: No function signatures created. Skipping sync.") } else if (count == 1) { - logger.quiet("ESQL Docs: Only updated $signatures and $types, patching them into place") + logger.quiet("ESQL Docs: Only files related to $types, patching them into place") project.sync { from "${projectDir}/build/testrun/test/temp/esql/functions" into "${rootDir}/docs/reference/esql/functions" - include '**/*.asciidoc', '**/*.svg' + include '**/*.asciidoc', '**/*.svg', '**/*.md', '**/*.json' preserve { - include '/*.asciidoc', '**/*.asciidoc', '**/*.svg', 'README.md' + include '/*.asciidoc', '**/*.asciidoc', '**/*.md', '**/*.json', '**/*.svg', 'README.md' } + filter readExample } } else { project.sync { from "${projectDir}/build/testrun/test/temp/esql/functions" into "${rootDir}/docs/reference/esql/functions" - include '**/*.asciidoc', '**/*.svg' + include '**/*.asciidoc', '**/*.svg', '**/*.md', '**/*.json' preserve { include '/*.asciidoc', 'README.md' } + filter readExample } } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index 178c714950b05..a1a7c95ece2f9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -108,6 +108,7 @@ import java.lang.reflect.Constructor; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.Locale; @@ -245,7 +246,21 @@ public static String normalizeName(String name) { return name.toLowerCase(Locale.ROOT); } - public record ArgSignature(String name, String[] type, String description, boolean optional) {} + public record ArgSignature(String name, String[] type, String description, boolean optional) { + @Override + public String toString() { + return "ArgSignature{" + + "name='" + + name + + "', type=" + + Arrays.toString(type) + + ", description='" + + description + + "', optional=" + + optional + + '}'; + } + } public record FunctionDescription( String name, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 889dfbf4c9b17..bc7a67d9eaefa 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -7,6 +7,9 @@ package org.elasticsearch.xpack.esql.expression.function; +import com.carrotsearch.randomizedtesting.ClassModel; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.sandbox.document.HalfFloatPoint; import org.apache.lucene.util.BytesRef; @@ -33,7 +36,9 @@ import org.elasticsearch.geo.ShapeTestUtils; import org.elasticsearch.indices.CrankyCircuitBreakerService; import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.evaluator.EvalMapper; @@ -61,14 +66,11 @@ import org.hamcrest.Matcher; import org.junit.After; import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.rules.TestRule; -import org.junit.runner.Description; -import org.junit.runners.model.Statement; import java.io.IOException; import java.io.UncheckedIOException; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; import java.nio.file.Files; import java.nio.file.Path; import java.time.Duration; @@ -99,6 +101,7 @@ import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.GEO; import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; @@ -524,45 +527,28 @@ public void testSerializationOfSimple() { assertSerialization(buildFieldExpression(testCase)); } - private static boolean ranAllTests = false; - - @ClassRule - public static TestRule rule = new TestRule() { - @Override - public Statement apply(Statement base, Description description) { - for (Description d : description.getChildren()) { - if (d.getChildren().size() > 1) { - ranAllTests = true; - return base; - } - } - return base; - } - }; - @AfterClass public static void testFunctionInfo() { - if (ranAllTests == false) { - LogManager.getLogger(getTestClass()).info("Skipping function info checks because we're running a portion of the tests"); - return; - } + Logger log = LogManager.getLogger(getTestClass()); FunctionDefinition definition = definition(functionName()); if (definition == null) { - LogManager.getLogger(getTestClass()).info("Skipping function info checks because the function isn't registered"); + log.info("Skipping function info checks because the function isn't registered"); return; } - LogManager.getLogger(getTestClass()).info("Running function info checks"); + // TODO fix case tests to include all supported types + assumeFalse("CASE test incomplete", definition.name().equals("case")); + log.info("Running function info checks"); EsqlFunctionRegistry.FunctionDescription description = EsqlFunctionRegistry.description(definition); List args = description.args(); - assertTrue("expect description to be defined", description.description() != null && description.description().length() > 0); + assertTrue("expect description to be defined", description.description() != null && false == description.description().isEmpty()); List> typesFromSignature = new ArrayList<>(); Set returnFromSignature = new HashSet<>(); for (int i = 0; i < args.size(); i++) { typesFromSignature.add(new HashSet<>()); } - for (Map.Entry, DataType> entry : signatures.entrySet()) { + for (Map.Entry, DataType> entry : signatures().entrySet()) { List types = entry.getKey(); for (int i = 0; i < args.size() && i < types.size(); i++) { typesFromSignature.get(i).add(signatureType(types.get(i))); @@ -571,15 +557,18 @@ public static void testFunctionInfo() { } for (int i = 0; i < args.size(); i++) { - Set annotationTypes = Arrays.stream(args.get(i).type()).collect(Collectors.toCollection(() -> new TreeSet<>())); + EsqlFunctionRegistry.ArgSignature arg = args.get(i); + Set annotationTypes = Arrays.stream(arg.type()).collect(Collectors.toCollection(TreeSet::new)); Set signatureTypes = typesFromSignature.get(i); if (signatureTypes.isEmpty()) { + log.info("{}: skipping", arg.name()); continue; } + log.info("{}: tested {} vs annotated {}", arg.name(), signatureTypes, annotationTypes); assertEquals(signatureTypes, annotationTypes); } - Set returnTypes = Arrays.stream(description.returnType()).collect(Collectors.toCollection(() -> new TreeSet<>())); + Set returnTypes = Arrays.stream(description.returnType()).collect(Collectors.toCollection(TreeSet::new)); assertEquals(returnFromSignature, returnTypes); } @@ -994,10 +983,6 @@ public static void renderSignature() throws IOException { if (System.getProperty("generateDocs") == null) { return; } - if (ranAllTests == false) { - LogManager.getLogger(getTestClass()).info("Skipping rendering signature because we're running a portion of the tests"); - return; - } String rendered = buildSignatureSvg(functionName()); if (rendered == null) { LogManager.getLogger(getTestClass()).info("Skipping rendering signature because the function isn't registered"); @@ -1023,37 +1008,39 @@ private static String buildSignatureSvg(String name) throws IOException { return null; } + private static Class classGeneratingSignatures = null; /** - * Unique signatures encountered by this test. - *

    - * We clear this at the beginning of the test class with - * {@link #clearSignatures} out of paranoia. It is - * shared by many tests, after all. - *

    - *

    - * After each test method we add the signature it operated on via - * {@link #trackSignature}. Once the test class is done we render - * all the unique signatures to a temp file with {@link #renderTypes}. - * We use a temp file because that's all we're allowed to write to. - * Gradle will move the files into the docs after this is done. - *

    + * Unique signatures in this test's parameters. */ - private static final Map, DataType> signatures = new HashMap<>(); - - @BeforeClass - public static void clearSignatures() { - signatures.clear(); - } + private static Map, DataType> signatures; - @After - public void trackSignature() { - if (testCase.getExpectedTypeError() != null) { - return; + private static Map, DataType> signatures() { + Class testClass = getTestClass(); + if (signatures != null && classGeneratingSignatures == testClass) { + return signatures; } - if (testCase.getData().stream().anyMatch(t -> t.type() == DataTypes.NULL)) { - return; + signatures = new HashMap<>(); + Set paramsFactories = new ClassModel(testClass).getAnnotatedLeafMethods(ParametersFactory.class).keySet(); + assertThat(paramsFactories, hasSize(1)); + Method paramsFactory = paramsFactories.iterator().next(); + List params; + try { + params = (List) paramsFactory.invoke(null); + } catch (InvocationTargetException | IllegalAccessException e) { + throw new RuntimeException(e); } - signatures.putIfAbsent(testCase.getData().stream().map(TestCaseSupplier.TypedData::type).toList(), testCase.expectedType()); + for (Object p : params) { + TestCaseSupplier tcs = (TestCaseSupplier) ((Object[]) p)[0]; + TestCaseSupplier.TestCase tc = tcs.get(); + if (tc.getExpectedTypeError() != null) { + continue; + } + if (tc.getData().stream().anyMatch(t -> t.type() == DataTypes.NULL)) { + continue; + } + signatures.putIfAbsent(tc.getData().stream().map(TestCaseSupplier.TypedData::type).toList(), tc.expectedType()); + } + return signatures; } @AfterClass @@ -1079,6 +1066,17 @@ public static void renderDocs() throws IOException { renderDescription(description.description(), info.note()); boolean hasExamples = renderExamples(info); renderFullLayout(name, hasExamples); + renderKibanaInlineDocs(name, info); + List args = description.args(); + if (name.equals("case")) { + EsqlFunctionRegistry.ArgSignature falseValue = args.get(1); + args = List.of( + args.get(0), + falseValue, + new EsqlFunctionRegistry.ArgSignature("falseValue", falseValue.type(), falseValue.description(), true) + ); + } + renderKibanaFunctionDefinition(name, info, args, description.variadic()); return; } LogManager.getLogger(getTestClass()).info("Skipping rendering types because the function '" + name + "' isn't registered"); @@ -1095,7 +1093,7 @@ private static void renderTypes(List argNames) throws IOException { header.append("result"); List table = new ArrayList<>(); - for (Map.Entry, DataType> sig : signatures.entrySet()) { + for (Map.Entry, DataType> sig : signatures().entrySet()) { // TODO flip to using sortedSignatures if (sig.getKey().size() != argNames.size()) { continue; } @@ -1198,6 +1196,130 @@ private static void renderFullLayout(String name, boolean hasExamples) throws IO writeToTempDir("layout", rendered, "asciidoc"); } + private static void renderKibanaInlineDocs(String name, FunctionInfo info) throws IOException { + StringBuilder builder = new StringBuilder(); + builder.append(""" + + + """); + builder.append("### ").append(name.toUpperCase(Locale.ROOT)).append("\n"); + builder.append(info.description()).append("\n\n"); + + if (info.examples().length > 0) { + Example example = info.examples()[0]; + builder.append("```\n"); + builder.append("read-example::").append(example.file()).append(".csv-spec[tag=").append(example.tag()).append("]\n"); + builder.append("```\n"); + } + if (Strings.isNullOrEmpty(info.note()) == false) { + builder.append("Note: ").append(info.note()).append("\n"); + } + String rendered = builder.toString(); + LogManager.getLogger(getTestClass()).info("Writing kibana inline docs for [{}]:\n{}", functionName(), rendered); + writeToTempDir("kibana/docs", rendered, "md"); + } + + private static void renderKibanaFunctionDefinition( + String name, + FunctionInfo info, + List args, + boolean variadic + ) throws IOException { + + XContentBuilder builder = JsonXContent.contentBuilder().prettyPrint().lfAtEnd().startObject(); + builder.field( + "comment", + "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it." + ); + builder.field("type", "eval"); // TODO aggs in here too + builder.field("name", name); + builder.field("description", removeAsciidocLinks(info.description())); + if (Strings.isNullOrEmpty(info.note()) == false) { + builder.field("note", removeAsciidocLinks(info.note())); + } + // TODO aliases + + builder.startArray("signatures"); + if (args.isEmpty()) { + builder.startObject(); + builder.startArray("params"); + builder.endArray(); + // There should only be one return type so just use that as the example + builder.field("returnType", signatures().values().iterator().next().typeName()); + builder.endObject(); + } else { + int minArgCount = (int) args.stream().filter(a -> false == a.optional()).count(); + for (Map.Entry, DataType> sig : sortedSignatures()) { + if (variadic && sig.getKey().size() > args.size()) { + // For variadic functions we test much longer signatures, let's just stop at the last one + continue; + } + // TODO make constants for auto_bucket so the signatures get recognized + if (name.equals("auto_bucket") == false && sig.getKey().size() < minArgCount) { + throw new IllegalArgumentException("signature " + sig.getKey() + " is missing non-optional arg for " + args); + } + builder.startObject(); + builder.startArray("params"); + for (int i = 0; i < sig.getKey().size(); i++) { + EsqlFunctionRegistry.ArgSignature arg = args.get(i); + builder.startObject(); + builder.field("name", arg.name()); + builder.field("type", sig.getKey().get(i).typeName()); + builder.field("optional", arg.optional()); + builder.field("description", arg.description()); + builder.endObject(); + } + builder.endArray(); + builder.field("variadic", variadic); + builder.field("returnType", sig.getValue().typeName()); + builder.endObject(); + } + } + builder.endArray(); + + if (info.examples().length > 0) { + builder.startArray("examples"); + for (Example example : info.examples()) { + builder.value("read-example::" + example.file() + ".csv-spec[tag=" + example.tag() + ", json]"); + } + builder.endArray(); + } + + String rendered = Strings.toString(builder.endObject()); + LogManager.getLogger(getTestClass()).info("Writing kibana function definition for [{}]:\n{}", functionName(), rendered); + writeToTempDir("kibana/definition", rendered, "json"); + } + + private static String removeAsciidocLinks(String asciidoc) { + return asciidoc.replaceAll("[^ ]+\\[([^\\]]+)\\]", "$1"); + } + + private static List, DataType>> sortedSignatures() { + List, DataType>> sortedSignatures = new ArrayList<>(signatures().entrySet()); + Collections.sort(sortedSignatures, new Comparator<>() { + @Override + public int compare(Map.Entry, DataType> lhs, Map.Entry, DataType> rhs) { + int maxlen = Math.max(lhs.getKey().size(), rhs.getKey().size()); + for (int i = 0; i < maxlen; i++) { + if (lhs.getKey().size() <= i) { + return -1; + } + if (rhs.getKey().size() <= i) { + return 1; + } + int c = lhs.getKey().get(i).typeName().compareTo(rhs.getKey().get(i).typeName()); + if (c != 0) { + return c; + } + } + return lhs.getValue().typeName().compareTo(rhs.getValue().typeName()); + } + }); + return sortedSignatures; + } + protected static String functionName() { Class testClass = getTestClass(); if (testClass.isAnnotationPresent(FunctionName.class)) { @@ -1262,7 +1384,7 @@ private static void writeToTempDir(String subdir, String str, String extension) Files.createDirectories(dir); Path file = dir.resolve(functionName() + "." + extension); Files.writeString(file, str); - LogManager.getLogger(getTestClass()).info("Wrote function types for [{}] to file: {}", functionName(), file); + LogManager.getLogger(getTestClass()).info("Wrote to file: {}", file); } private final List breakers = Collections.synchronizedList(new ArrayList<>()); From 12398ee6a9ea703dac0ea752628214d556f6c221 Mon Sep 17 00:00:00 2001 From: Niels Bauman <33722607+nielsbauman@users.noreply.github.com> Date: Tue, 9 Apr 2024 21:37:20 +0200 Subject: [PATCH 115/173] Don't overwrite `DataStream.rolloverOnWrite` flag on failure store rollover (#107247) --- .../java/org/elasticsearch/cluster/metadata/DataStream.java | 2 +- .../org/elasticsearch/cluster/metadata/DataStreamTests.java | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java index 364a1b31ceeba..d4fd57427793b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java @@ -527,7 +527,7 @@ public DataStream unsafeRolloverFailureStore(Index writeIndex, long generation) lifecycle, failureStore, failureIndices, - false, + rolloverOnWrite, autoShardingEvent ); } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java index f086b52c1b491..d54fcbd8a9e41 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java @@ -177,6 +177,8 @@ public void testRollover() { assertThat(rolledDs.getIndices().size(), equalTo(ds.getIndices().size() + 1)); assertTrue(rolledDs.getIndices().containsAll(ds.getIndices())); assertTrue(rolledDs.getIndices().contains(rolledDs.getWriteIndex())); + // Irrespective of whether the rollover was performed lazily, rolloverOnWrite should always be set to false after rollover. + assertFalse(rolledDs.rolloverOnWrite()); } public void testRolloverWithConflictingBackingIndexName() { @@ -272,6 +274,8 @@ public void testRolloverFailureStore() { assertThat(rolledDs.getName(), equalTo(ds.getName())); assertThat(rolledDs.getGeneration(), equalTo(ds.getGeneration() + 1)); assertThat(rolledDs.getIndices().size(), equalTo(ds.getIndices().size())); + // Ensure that the rolloverOnWrite flag hasn't changed when rolling over a failure store. + assertThat(rolledDs.rolloverOnWrite(), equalTo(ds.rolloverOnWrite())); assertThat(rolledDs.getFailureIndices().size(), equalTo(ds.getFailureIndices().size() + 1)); assertTrue(rolledDs.getIndices().containsAll(ds.getIndices())); assertTrue(rolledDs.getIndices().contains(rolledDs.getWriteIndex())); From 6ff6dc1fb4a344deea3ad8f858a2c051c8880bb8 Mon Sep 17 00:00:00 2001 From: Pat Whelan Date: Tue, 9 Apr 2024 18:16:42 -0400 Subject: [PATCH 116/173] [Transform] Only trigger action once per thread (#107232) TransformScheduler can trigger its tasks on multiple threads. TransformTask uses an AtomicReference to manage one trigger event per thread by cycling between "Started" and "Indexing". The Retry Listener now has the same protection. "shouldRunAction" will cycle to false during execution and back to true if the action fails and should be retried. Fix #107215 --- docs/changelog/107232.yaml | 6 ++ .../TransformRetryableStartUpListener.java | 21 +++--- ...ransformRetryableStartUpListenerTests.java | 64 +++++++++++++++++++ 3 files changed, 80 insertions(+), 11 deletions(-) create mode 100644 docs/changelog/107232.yaml diff --git a/docs/changelog/107232.yaml b/docs/changelog/107232.yaml new file mode 100644 index 0000000000000..1422848cb1c91 --- /dev/null +++ b/docs/changelog/107232.yaml @@ -0,0 +1,6 @@ +pr: 107232 +summary: Only trigger action once per thread +area: Transform +type: bug +issues: + - 107215 diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformRetryableStartUpListener.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformRetryableStartUpListener.java index 17548fd8d427f..33b20d5513bc5 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformRetryableStartUpListener.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformRetryableStartUpListener.java @@ -22,7 +22,7 @@ class TransformRetryableStartUpListener implements TransformScheduler. private final Supplier shouldRetry; private final TransformContext context; private final AtomicBoolean isFirstRun; - private final AtomicBoolean isRunning; + private final AtomicBoolean shouldRunAction; /** * @param transformId the transform associated with this listener. All events to this listener must be for the same transformId. @@ -53,30 +53,28 @@ class TransformRetryableStartUpListener implements TransformScheduler. this.shouldRetry = shouldRetry; this.context = context; this.isFirstRun = new AtomicBoolean(true); - this.isRunning = new AtomicBoolean(true); + this.shouldRunAction = new AtomicBoolean(true); } @Override public void triggered(TransformScheduler.Event event) { - if (isRunning.get() && transformId.equals(event.transformId())) { + if (transformId.equals(event.transformId()) && shouldRunAction.compareAndSet(true, false)) { action.accept(ActionListener.wrap(this::actionSucceeded, this::actionFailed)); } } - private void markDone() { - if (isRunning.compareAndSet(true, false)) { - synchronized (context) { - context.resetStartUpFailureCount(); - } - } - } - private void actionSucceeded(Response r) { maybeNotifyRetryListener(false); markDone(); actionListener.onResponse(r); } + private void markDone() { + synchronized (context) { + context.resetStartUpFailureCount(); + } + } + private void maybeNotifyRetryListener(boolean response) { if (isFirstRun.compareAndSet(true, false)) { retryScheduledListener.onResponse(response); @@ -87,6 +85,7 @@ private void actionFailed(Exception e) { if (shouldRetry.get()) { maybeNotifyRetryListener(true); recordError(e); + shouldRunAction.set(true); } else { maybeNotifyRetryListener(false); markDone(); diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformRetryableStartUpListenerTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformRetryableStartUpListenerTests.java index 1a2bbfd434455..77b290e015d9a 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformRetryableStartUpListenerTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformRetryableStartUpListenerTests.java @@ -18,6 +18,7 @@ import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; import static org.mockito.Mockito.only; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; @@ -236,4 +237,67 @@ public void testCancelRetryImmediately() { assertFalse("Retries should not be scheduled.", retryResult.get()); verify(context, only()).resetStartUpFailureCount(); } + + /** + * Given triggered has been called + * When we call trigger a second time + * And the first call has not finished + * Then we should not take any action + * + * Given the first call has finished + * When we call trigger a third time + * Then we should successfully call the action + */ + public void testRunOneAtATime() { + var retryResult = new AtomicReference(); + var responseResult = new AtomicInteger(0); + var context = mock(TransformContext.class); + + var savedListener = new AtomicReference>(); + Consumer> action = l -> { + if (savedListener.compareAndSet(null, l) == false) { + fail("Action should only be called once."); + } + }; + + var listener = new TransformRetryableStartUpListener<>( + "transformId", + action, + responseListener(responseResult), + retryListener(retryResult), + () -> true, + context + ); + + callThreeTimes("transformId", listener); + + // verify the action has been called + assertNotNull(savedListener.get()); + + // assert the listener has not been called yet + assertEquals("Response Listener should never be called once.", 0, responseResult.get()); + assertNull("Retry Listener should not be called.", retryResult.get()); + verifyNoInteractions(context); + + savedListener.get().onFailure(new IllegalStateException("first call fails")); + + // assert only 1 retry and 0 success + assertEquals("Response Listener should only be called once.", 0, responseResult.get()); + assertNotNull("Retry Listener should be called.", retryResult.get()); + assertTrue("Retries should be scheduled.", retryResult.get()); + verify(context, times(1)).incrementAndGetStartUpFailureCount(any(IllegalStateException.class)); + verify(context, never()).resetStartUpFailureCount(); + + // rerun and succeed + savedListener.set(null); + callThreeTimes("transformId", listener); + savedListener.get().onResponse(null); + + // assert only 1 retry and 1 failure + assertEquals("Response Listener should only be called once.", 1, responseResult.get()); + assertNotNull("Retry Listener should be called.", retryResult.get()); + assertTrue("Retries should be scheduled.", retryResult.get()); + verify(context, times(1)).incrementAndGetStartUpFailureCount(any(IllegalStateException.class)); + verify(context, times(1)).resetStartUpFailureCount(); + } } From 13f95fdcec25bafe699ca9d0130970d7d86a6a06 Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Wed, 10 Apr 2024 07:27:59 +0200 Subject: [PATCH 117/173] [Profiling] Divide into more packages (#107201) With this commit we split the Universal Profiling plugin into three packages: * `persistence` contains everything w.r.t index management * `rest` contains the REST API * `action` contains the transport API The `action` / `rest` structure follows the already established structure in the rest of the code base. We divide this plugin into multiple packages mainly because the different functionalities will be maintained by different teams in the future. This restructuring helps clarify boundaries. --- .../{ => action}/CancellationIT.java | 2 +- .../{ => action}/GetFlameGraphActionIT.java | 2 +- .../{ => action}/GetStackTracesActionIT.java | 2 +- .../{ => action}/GetStatusActionIT.java | 2 +- .../GetTopNFunctionsActionIT.java | 2 +- .../LocalStateProfilingXPackPlugin.java | 3 +- .../{ => action}/ProfilingTestCase.java | 4 ++- .../xpack/profiling/ProfilingPlugin.java | 19 ++++++++++++ .../profiling/{ => action}/CO2Calculator.java | 2 +- .../{ => action}/CloudProviders.java | 2 +- .../{ => action}/CostCalculator.java | 2 +- .../profiling/{ => action}/CostEntry.java | 2 +- .../xpack/profiling/{ => action}/Frame.java | 2 +- .../profiling/{ => action}/FrameGroupID.java | 2 +- .../{ => action}/GetFlamegraphAction.java | 2 +- .../{ => action}/GetFlamegraphResponse.java | 2 +- .../{ => action}/GetStackTracesAction.java | 2 +- .../{ => action}/GetStackTracesRequest.java | 3 +- .../{ => action}/GetStackTracesResponse.java | 2 +- .../GetStackTracesResponseBuilder.java | 2 +- .../{ => action}/GetStatusAction.java | 2 +- .../{ => action}/GetTopNFunctionsAction.java | 2 +- .../GetTopNFunctionsResponse.java | 2 +- .../profiling/{ => action}/HostMetadata.java | 2 +- .../{ => action}/IndexAllocation.java | 2 +- .../profiling/{ => action}/InstanceType.java | 2 +- .../{ => action}/InstanceTypeService.java | 2 +- .../{ => action}/KvIndexResolver.java | 2 +- .../profiling/{ => action}/NumberUtils.java | 2 +- .../ProfilingInfoTransportAction.java | 2 +- .../{ => action}/ProfilingLicenseChecker.java | 2 +- .../ProfilingUsageTransportAction.java | 2 +- .../profiling/{ => action}/Resampler.java | 2 +- .../profiling/{ => action}/StackFrame.java | 2 +- .../profiling/{ => action}/StackTrace.java | 2 +- .../profiling/{ => action}/StopWatch.java | 2 +- .../profiling/{ => action}/TopNFunction.java | 2 +- .../profiling/{ => action}/TraceEvent.java | 2 +- .../TransportGetFlamegraphAction.java | 2 +- .../TransportGetStackTracesAction.java | 4 ++- .../TransportGetStatusAction.java | 8 ++++- .../TransportGetTopNFunctionsAction.java | 2 +- .../AbstractProfilingPersistenceManager.java | 2 +- .../{ => persistence}/EventsIndex.java | 2 +- .../{ => persistence}/IndexState.java | 2 +- .../{ => persistence}/IndexStateResolver.java | 6 ++-- .../{ => persistence}/IndexStatus.java | 2 +- .../{ => persistence}/Migration.java | 2 +- .../ProfilingDataStreamManager.java | 11 +++++-- .../ProfilingIndexAbstraction.java | 2 +- .../ProfilingIndexManager.java | 23 +++++++++------ .../ProfilingIndexTemplateRegistry.java | 2 +- .../{ => rest}/RestGetFlamegraphAction.java | 4 ++- .../{ => rest}/RestGetStackTracesAction.java | 4 ++- .../{ => rest}/RestGetStatusAction.java | 3 +- .../RestGetTopNFunctionsAction.java | 4 ++- .../{ => action}/CO2CalculatorTests.java | 2 +- .../{ => action}/CarthesianCombinator.java | 2 +- .../{ => action}/CostCalculatorTests.java | 2 +- .../{ => action}/FrameGroupIDTests.java | 2 +- .../GetStackTracesRequestTests.java | 2 +- .../GetStackTracesResponseTests.java | 2 +- .../{ => action}/HostMetadataTests.java | 2 +- .../{ => action}/IndexAllocationTests.java | 2 +- .../{ => action}/KvIndexResolverTests.java | 2 +- .../{ => action}/NumberUtilsTests.java | 2 +- .../ProfilingInfoTransportActionTests.java | 2 +- .../{ => action}/ResamplerTests.java | 2 +- .../{ => action}/StackFrameTests.java | 2 +- .../{ => action}/StackTraceTests.java | 2 +- .../{ => action}/TopNFunctionTests.java | 2 +- .../TransportGetFlamegraphActionTests.java | 2 +- .../TransportGetStackTracesActionTests.java | 2 +- .../TransportGetTopNFunctionsActionTests.java | 2 +- .../{ => persistence}/EventsIndexTests.java | 2 +- .../ProfilingDataStreamManagerTests.java | 2 +- .../ProfilingIndexManagerTests.java | 2 +- .../ProfilingIndexTemplateRegistryTests.java | 2 +- .../{ => persistence}/VerifyingClient.java | 4 +-- .../RestGetStackTracesActionTests.java | 29 ++++++++----------- 80 files changed, 151 insertions(+), 108 deletions(-) rename x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/{ => action}/CancellationIT.java (99%) rename x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/{ => action}/GetFlameGraphActionIT.java (96%) rename x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/{ => action}/GetStackTracesActionIT.java (99%) rename x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/{ => action}/GetStatusActionIT.java (98%) rename x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/{ => action}/GetTopNFunctionsActionIT.java (98%) rename x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/{ => action}/LocalStateProfilingXPackPlugin.java (89%) rename x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/{ => action}/ProfilingTestCase.java (97%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/CO2Calculator.java (98%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/CloudProviders.java (99%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/CostCalculator.java (98%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/CostEntry.java (95%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/Frame.java (88%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/FrameGroupID.java (96%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/GetFlamegraphAction.java (92%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/GetFlamegraphResponse.java (99%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/GetStackTracesAction.java (92%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/GetStackTracesRequest.java (99%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/GetStackTracesResponse.java (99%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/GetStackTracesResponseBuilder.java (99%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/GetStatusAction.java (99%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/GetTopNFunctionsAction.java (92%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/GetTopNFunctionsResponse.java (97%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/HostMetadata.java (98%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/IndexAllocation.java (97%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/InstanceType.java (99%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/InstanceTypeService.java (98%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/KvIndexResolver.java (99%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/NumberUtils.java (95%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/ProfilingInfoTransportAction.java (96%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/ProfilingLicenseChecker.java (96%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/ProfilingUsageTransportAction.java (97%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/Resampler.java (97%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/StackFrame.java (98%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/StackTrace.java (99%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/StopWatch.java (94%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/TopNFunction.java (99%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/TraceEvent.java (96%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/TransportGetFlamegraphAction.java (99%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/TransportGetStackTracesAction.java (99%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/TransportGetStatusAction.java (94%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => action}/TransportGetTopNFunctionsAction.java (99%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => persistence}/AbstractProfilingPersistenceManager.java (99%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => persistence}/EventsIndex.java (98%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => persistence}/IndexState.java (95%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => persistence}/IndexStateResolver.java (97%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => persistence}/IndexStatus.java (92%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => persistence}/Migration.java (99%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => persistence}/ProfilingDataStreamManager.java (96%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => persistence}/ProfilingIndexAbstraction.java (94%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => persistence}/ProfilingIndexManager.java (95%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => persistence}/ProfilingIndexTemplateRegistry.java (99%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => rest}/RestGetFlamegraphAction.java (90%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => rest}/RestGetStackTracesAction.java (90%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => rest}/RestGetStatusAction.java (93%) rename x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/{ => rest}/RestGetTopNFunctionsAction.java (90%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => action}/CO2CalculatorTests.java (98%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => action}/CarthesianCombinator.java (97%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => action}/CostCalculatorTests.java (98%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => action}/FrameGroupIDTests.java (98%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => action}/GetStackTracesRequestTests.java (99%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => action}/GetStackTracesResponseTests.java (98%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => action}/HostMetadataTests.java (99%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => action}/IndexAllocationTests.java (99%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => action}/KvIndexResolverTests.java (99%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => action}/NumberUtilsTests.java (95%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => action}/ProfilingInfoTransportActionTests.java (97%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => action}/ResamplerTests.java (99%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => action}/StackFrameTests.java (98%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => action}/StackTraceTests.java (99%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => action}/TopNFunctionTests.java (98%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => action}/TransportGetFlamegraphActionTests.java (99%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => action}/TransportGetStackTracesActionTests.java (98%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => action}/TransportGetTopNFunctionsActionTests.java (99%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => persistence}/EventsIndexTests.java (97%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => persistence}/ProfilingDataStreamManagerTests.java (99%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => persistence}/ProfilingIndexManagerTests.java (99%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => persistence}/ProfilingIndexTemplateRegistryTests.java (99%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => persistence}/VerifyingClient.java (94%) rename x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/{ => rest}/RestGetStackTracesActionTests.java (87%) diff --git a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/CancellationIT.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/CancellationIT.java similarity index 99% rename from x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/CancellationIT.java rename to x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/CancellationIT.java index ef5198499ff09..183ef3786a62d 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/CancellationIT.java +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/CancellationIT.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; diff --git a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetFlameGraphActionIT.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/GetFlameGraphActionIT.java similarity index 96% rename from x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetFlameGraphActionIT.java rename to x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/GetFlameGraphActionIT.java index 20519d53459ba..49a5cfa7ca067 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetFlameGraphActionIT.java +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/GetFlameGraphActionIT.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; public class GetFlameGraphActionIT extends ProfilingTestCase { public void testGetStackTracesUnfiltered() throws Exception { diff --git a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStackTracesActionIT.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/GetStackTracesActionIT.java similarity index 99% rename from x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStackTracesActionIT.java rename to x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/GetStackTracesActionIT.java index 30de2173e8903..9de148c33c467 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStackTracesActionIT.java +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/GetStackTracesActionIT.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; diff --git a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStatusActionIT.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/GetStatusActionIT.java similarity index 98% rename from x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStatusActionIT.java rename to x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/GetStatusActionIT.java index f3417dbf5d472..27fe2b8acb79b 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStatusActionIT.java +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/GetStatusActionIT.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.core.TimeValue; import org.elasticsearch.rest.RestStatus; diff --git a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsActionIT.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/GetTopNFunctionsActionIT.java similarity index 98% rename from x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsActionIT.java rename to x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/GetTopNFunctionsActionIT.java index 05d0e1cb0471b..ab5bbc3812eb5 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsActionIT.java +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/GetTopNFunctionsActionIT.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; diff --git a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/LocalStateProfilingXPackPlugin.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/LocalStateProfilingXPackPlugin.java similarity index 89% rename from x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/LocalStateProfilingXPackPlugin.java rename to x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/LocalStateProfilingXPackPlugin.java index 3a033e2686b2b..1953007a6c39a 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/LocalStateProfilingXPackPlugin.java +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/LocalStateProfilingXPackPlugin.java @@ -5,10 +5,11 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; +import org.elasticsearch.xpack.profiling.ProfilingPlugin; import java.nio.file.Path; diff --git a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/ProfilingTestCase.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/ProfilingTestCase.java similarity index 97% rename from x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/ProfilingTestCase.java rename to x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/ProfilingTestCase.java index 58b018a13e096..67825f6ce8570 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/ProfilingTestCase.java +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/ProfilingTestCase.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse; @@ -25,6 +25,8 @@ import org.elasticsearch.xpack.core.ilm.LifecycleSettings; import org.elasticsearch.xpack.countedkeyword.CountedKeywordMapperPlugin; import org.elasticsearch.xpack.ilm.IndexLifecycle; +import org.elasticsearch.xpack.profiling.ProfilingPlugin; +import org.elasticsearch.xpack.profiling.persistence.ProfilingIndexManager; import org.elasticsearch.xpack.unsignedlong.UnsignedLongMapperPlugin; import org.elasticsearch.xpack.versionfield.VersionFieldPlugin; import org.junit.After; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingPlugin.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingPlugin.java index 0615bef7a4980..6962dedb734ae 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingPlugin.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingPlugin.java @@ -35,6 +35,25 @@ import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; +import org.elasticsearch.xpack.profiling.action.GetFlamegraphAction; +import org.elasticsearch.xpack.profiling.action.GetStackTracesAction; +import org.elasticsearch.xpack.profiling.action.GetStatusAction; +import org.elasticsearch.xpack.profiling.action.GetTopNFunctionsAction; +import org.elasticsearch.xpack.profiling.action.ProfilingInfoTransportAction; +import org.elasticsearch.xpack.profiling.action.ProfilingLicenseChecker; +import org.elasticsearch.xpack.profiling.action.ProfilingUsageTransportAction; +import org.elasticsearch.xpack.profiling.action.TransportGetFlamegraphAction; +import org.elasticsearch.xpack.profiling.action.TransportGetStackTracesAction; +import org.elasticsearch.xpack.profiling.action.TransportGetStatusAction; +import org.elasticsearch.xpack.profiling.action.TransportGetTopNFunctionsAction; +import org.elasticsearch.xpack.profiling.persistence.IndexStateResolver; +import org.elasticsearch.xpack.profiling.persistence.ProfilingDataStreamManager; +import org.elasticsearch.xpack.profiling.persistence.ProfilingIndexManager; +import org.elasticsearch.xpack.profiling.persistence.ProfilingIndexTemplateRegistry; +import org.elasticsearch.xpack.profiling.rest.RestGetFlamegraphAction; +import org.elasticsearch.xpack.profiling.rest.RestGetStackTracesAction; +import org.elasticsearch.xpack.profiling.rest.RestGetStatusAction; +import org.elasticsearch.xpack.profiling.rest.RestGetTopNFunctionsAction; import java.util.ArrayList; import java.util.Collection; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CO2Calculator.java similarity index 98% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CO2Calculator.java index fcdc116cab725..398a004edd448 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CO2Calculator.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.core.UpdateForV9; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CloudProviders.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CloudProviders.java similarity index 99% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CloudProviders.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CloudProviders.java index 0245df13f8fad..de2feb727a029 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CloudProviders.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CloudProviders.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import java.util.Map; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CostCalculator.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CostCalculator.java similarity index 98% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CostCalculator.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CostCalculator.java index 3db9b543bdb88..b8ee54f5f29e8 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CostCalculator.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CostCalculator.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import java.util.Map; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CostEntry.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CostEntry.java similarity index 95% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CostEntry.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CostEntry.java index b6795294e7f06..ded99eec428f2 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CostEntry.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CostEntry.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import java.util.Map; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/Frame.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/Frame.java similarity index 88% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/Frame.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/Frame.java index b2a37b7cfa903..5bd2d82237fc3 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/Frame.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/Frame.java @@ -5,6 +5,6 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; public record Frame(String fileName, String functionName, int functionOffset, int lineNumber, boolean inline, boolean last) {} diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/FrameGroupID.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/FrameGroupID.java similarity index 96% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/FrameGroupID.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/FrameGroupID.java index 32273d56d0176..4674a2cb0e12f 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/FrameGroupID.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/FrameGroupID.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.common.Strings; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetFlamegraphAction.java similarity index 92% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetFlamegraphAction.java index 3719722ad2d62..6866281c8dbeb 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetFlamegraphAction.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.action.ActionType; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphResponse.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetFlamegraphResponse.java similarity index 99% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphResponse.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetFlamegraphResponse.java index c851b372cb2db..e4ea3c1521d22 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphResponse.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetFlamegraphResponse.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.TransportAction; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesAction.java similarity index 92% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesAction.java index 1fd87740d6292..6871cc9e296f2 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesAction.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.action.ActionType; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesRequest.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesRequest.java similarity index 99% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesRequest.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesRequest.java index 038a576cd77fc..be30c9662fddb 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesRequest.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesRequest.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; @@ -19,6 +19,7 @@ import org.elasticsearch.tasks.TaskId; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.profiling.persistence.EventsIndex; import java.io.IOException; import java.util.ArrayList; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesResponse.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesResponse.java similarity index 99% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesResponse.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesResponse.java index 4cad1104f783b..532ad374c3c4b 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesResponse.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesResponse.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.TransportAction; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesResponseBuilder.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesResponseBuilder.java similarity index 99% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesResponseBuilder.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesResponseBuilder.java index 44c9c987fc6c7..1b31642d07be1 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesResponseBuilder.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesResponseBuilder.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import java.time.Instant; import java.util.List; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStatusAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStatusAction.java similarity index 99% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStatusAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStatusAction.java index 59132d45995e3..0d8f3aad27daa 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStatusAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStatusAction.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetTopNFunctionsAction.java similarity index 92% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetTopNFunctionsAction.java index b11e74cbbf93d..5d7dc17cd348e 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetTopNFunctionsAction.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.action.ActionType; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsResponse.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetTopNFunctionsResponse.java similarity index 97% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsResponse.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetTopNFunctionsResponse.java index b8785bc607b18..b16ce6f43685f 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetTopNFunctionsResponse.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetTopNFunctionsResponse.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.TransportAction; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/HostMetadata.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/HostMetadata.java similarity index 98% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/HostMetadata.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/HostMetadata.java index aae6615114f43..29f3b66956d55 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/HostMetadata.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/HostMetadata.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.xcontent.ToXContentObject; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexAllocation.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/IndexAllocation.java similarity index 97% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexAllocation.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/IndexAllocation.java index 7d1c5bdbf66a3..8b97f1139d6ad 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexAllocation.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/IndexAllocation.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/InstanceType.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/InstanceType.java similarity index 99% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/InstanceType.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/InstanceType.java index d694ffd2cbebc..5628b64ea67b7 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/InstanceType.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/InstanceType.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.xcontent.ToXContentObject; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/InstanceTypeService.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/InstanceTypeService.java similarity index 98% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/InstanceTypeService.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/InstanceTypeService.java index 3a1cad38f7781..05367cc3fbaaf 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/InstanceTypeService.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/InstanceTypeService.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/KvIndexResolver.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/KvIndexResolver.java similarity index 99% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/KvIndexResolver.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/KvIndexResolver.java index 53962c1f93cee..dbc60aa47a235 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/KvIndexResolver.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/KvIndexResolver.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/NumberUtils.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/NumberUtils.java similarity index 95% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/NumberUtils.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/NumberUtils.java index d346dd279f250..f8093091f56c5 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/NumberUtils.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/NumberUtils.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; final class NumberUtils { private NumberUtils() { diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingInfoTransportAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/ProfilingInfoTransportAction.java similarity index 96% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingInfoTransportAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/ProfilingInfoTransportAction.java index 115b165f3e791..1a6809774f7f6 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingInfoTransportAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/ProfilingInfoTransportAction.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.common.inject.Inject; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingLicenseChecker.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/ProfilingLicenseChecker.java similarity index 96% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingLicenseChecker.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/ProfilingLicenseChecker.java index 1100c6b10c5f7..a479dca379c4a 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingLicenseChecker.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/ProfilingLicenseChecker.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.license.License; import org.elasticsearch.license.LicenseUtils; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingUsageTransportAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/ProfilingUsageTransportAction.java similarity index 97% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingUsageTransportAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/ProfilingUsageTransportAction.java index 7e7b431759cd4..738a7a4e52ddb 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingUsageTransportAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/ProfilingUsageTransportAction.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/Resampler.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/Resampler.java similarity index 97% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/Resampler.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/Resampler.java index b70807e472536..54401ce1d3a5a 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/Resampler.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/Resampler.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import java.util.Random; import java.util.random.RandomGenerator; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackFrame.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/StackFrame.java similarity index 98% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackFrame.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/StackFrame.java index 5f7102c63d3d7..b3b2b0b8caea5 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackFrame.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/StackFrame.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackTrace.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/StackTrace.java similarity index 99% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackTrace.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/StackTrace.java index d24127824dafd..2a4e5f42fe657 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackTrace.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/StackTrace.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.xcontent.ObjectPath; import org.elasticsearch.xcontent.ToXContentObject; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StopWatch.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/StopWatch.java similarity index 94% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StopWatch.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/StopWatch.java index c423fe12f3581..6197a0d6a0c4f 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StopWatch.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/StopWatch.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; /** * Measures time and logs it in milliseconds. diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TopNFunction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TopNFunction.java similarity index 99% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TopNFunction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TopNFunction.java index 777d8e247335c..402d2ff012839 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TopNFunction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TopNFunction.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TraceEvent.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TraceEvent.java similarity index 96% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TraceEvent.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TraceEvent.java index adb88848a418e..f020ad9e6a905 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TraceEvent.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TraceEvent.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import java.util.HashMap; import java.util.Map; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetFlamegraphAction.java similarity index 99% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetFlamegraphAction.java index 7a25319d3a1cc..4f3778081563b 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetFlamegraphAction.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStackTracesAction.java similarity index 99% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStackTracesAction.java index d7c9e61b73a3a..5467f0c10ccc8 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStackTracesAction.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -48,6 +48,8 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.ObjectPath; +import org.elasticsearch.xpack.profiling.ProfilingPlugin; +import org.elasticsearch.xpack.profiling.persistence.EventsIndex; import java.time.Duration; import java.time.Instant; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStatusAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStatusAction.java similarity index 94% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStatusAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStatusAction.java index d918a0def7ebb..88f19a62bbedf 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStatusAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStatusAction.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -34,6 +34,12 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackSettings; +import org.elasticsearch.xpack.profiling.ProfilingPlugin; +import org.elasticsearch.xpack.profiling.persistence.EventsIndex; +import org.elasticsearch.xpack.profiling.persistence.IndexStateResolver; +import org.elasticsearch.xpack.profiling.persistence.ProfilingDataStreamManager; +import org.elasticsearch.xpack.profiling.persistence.ProfilingIndexManager; +import org.elasticsearch.xpack.profiling.persistence.ProfilingIndexTemplateRegistry; public class TransportGetStatusAction extends TransportMasterNodeAction { private static final Logger log = LogManager.getLogger(TransportGetStatusAction.class); diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetTopNFunctionsAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetTopNFunctionsAction.java similarity index 99% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetTopNFunctionsAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetTopNFunctionsAction.java index cb5f2da6c3731..05e2202c7b91c 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetTopNFunctionsAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetTopNFunctionsAction.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/AbstractProfilingPersistenceManager.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/AbstractProfilingPersistenceManager.java similarity index 99% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/AbstractProfilingPersistenceManager.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/AbstractProfilingPersistenceManager.java index d74eaa8c5650e..528d6f28a7115 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/AbstractProfilingPersistenceManager.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/AbstractProfilingPersistenceManager.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.persistence; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/EventsIndex.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/EventsIndex.java similarity index 98% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/EventsIndex.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/EventsIndex.java index f246a34f3362d..b87f3345579aa 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/EventsIndex.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/EventsIndex.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.persistence; import java.util.Collection; import java.util.Collections; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexState.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/IndexState.java similarity index 95% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexState.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/IndexState.java index c34858acf5986..81262e6d33cad 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexState.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/IndexState.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.persistence; import org.elasticsearch.index.Index; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexStateResolver.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/IndexStateResolver.java similarity index 97% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexStateResolver.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/IndexStateResolver.java index a09d162c32967..b5efe66423679 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexStateResolver.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/IndexStateResolver.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.persistence; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -21,12 +21,12 @@ import java.util.List; import java.util.Map; -class IndexStateResolver { +public class IndexStateResolver { private static final Logger logger = LogManager.getLogger(IndexStateResolver.class); private volatile boolean checkOutdatedIndices; - IndexStateResolver(boolean checkOutdatedIndices) { + public IndexStateResolver(boolean checkOutdatedIndices) { this.checkOutdatedIndices = checkOutdatedIndices; } diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexStatus.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/IndexStatus.java similarity index 92% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexStatus.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/IndexStatus.java index 389c0de80cc5f..0dc3da7bc7f80 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexStatus.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/IndexStatus.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.persistence; enum IndexStatus { CLOSED(false), diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/Migration.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/Migration.java similarity index 99% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/Migration.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/Migration.java index b6ccc2cee91c9..138c2301fd636 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/Migration.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/Migration.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.persistence; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingDataStreamManager.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/ProfilingDataStreamManager.java similarity index 96% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingDataStreamManager.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/ProfilingDataStreamManager.java index 722a7d1dbac63..331d93b066da5 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingDataStreamManager.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/ProfilingDataStreamManager.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.persistence; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionResponse; @@ -36,7 +36,7 @@ /** * Creates all data streams that are required for using Elastic Universal Profiling. */ -class ProfilingDataStreamManager extends AbstractProfilingPersistenceManager { +public class ProfilingDataStreamManager extends AbstractProfilingPersistenceManager { public static final List PROFILING_DATASTREAMS; static { @@ -51,7 +51,12 @@ class ProfilingDataStreamManager extends AbstractProfilingPersistenceManager { +public class ProfilingIndexManager extends AbstractProfilingPersistenceManager { // For testing public static final List PROFILING_INDICES = List.of( ProfilingIndex.regular( @@ -68,7 +68,12 @@ class ProfilingIndexManager extends AbstractProfilingPersistenceManager client.admin().indices().delete(req, l)); } - enum OnVersionBump { + public enum OnVersionBump { DELETE_OLD, KEEP_OLD } @@ -257,27 +262,27 @@ enum OnVersionBump { /** * An index that is used by Universal Profiling. */ - static class ProfilingIndex implements ProfilingIndexAbstraction { + public static class ProfilingIndex implements ProfilingIndexAbstraction { private final String namePrefix; private final int version; private final String generation; private final OnVersionBump onVersionBump; private final List migrations; - public static ProfilingIndex regular(String name, int version, OnVersionBump onVersionBump) { + static ProfilingIndex regular(String name, int version, OnVersionBump onVersionBump) { return regular(name, version, onVersionBump, null); } - public static ProfilingIndex regular(String name, int version, OnVersionBump onVersionBump, Migration.Builder builder) { + static ProfilingIndex regular(String name, int version, OnVersionBump onVersionBump, Migration.Builder builder) { List migrations = builder != null ? builder.build(version) : null; return new ProfilingIndex(name, version, null, onVersionBump, migrations); } - public static ProfilingIndex kv(String name, int version) { + static ProfilingIndex kv(String name, int version) { return kv(name, version, null); } - public static ProfilingIndex kv(String name, int version, Migration.Builder builder) { + static ProfilingIndex kv(String name, int version, Migration.Builder builder) { List migrations = builder != null ? builder.build(version) : null; // K/V indices will age automatically as per the ILM policy, and we won't force-upgrade them on version bumps return new ProfilingIndex(name, version, "000001", OnVersionBump.KEEP_OLD, migrations); diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistry.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/ProfilingIndexTemplateRegistry.java similarity index 99% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistry.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/ProfilingIndexTemplateRegistry.java index e1698e71afab2..61d3010bddf77 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistry.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/ProfilingIndexTemplateRegistry.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.persistence; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetFlamegraphAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/rest/RestGetFlamegraphAction.java similarity index 90% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetFlamegraphAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/rest/RestGetFlamegraphAction.java index 3b1b2e1789ad1..c6c9309077a34 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetFlamegraphAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/rest/RestGetFlamegraphAction.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.rest; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; @@ -13,6 +13,8 @@ import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestCancellableNodeClient; import org.elasticsearch.rest.action.RestRefCountedChunkedToXContentListener; +import org.elasticsearch.xpack.profiling.action.GetFlamegraphAction; +import org.elasticsearch.xpack.profiling.action.GetStackTracesRequest; import java.io.IOException; import java.util.List; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetStackTracesAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/rest/RestGetStackTracesAction.java similarity index 90% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetStackTracesAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/rest/RestGetStackTracesAction.java index ac7e9943b6566..4161f478bc2f3 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetStackTracesAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/rest/RestGetStackTracesAction.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.rest; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; @@ -13,6 +13,8 @@ import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestCancellableNodeClient; import org.elasticsearch.rest.action.RestRefCountedChunkedToXContentListener; +import org.elasticsearch.xpack.profiling.action.GetStackTracesAction; +import org.elasticsearch.xpack.profiling.action.GetStackTracesRequest; import java.io.IOException; import java.util.List; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetStatusAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/rest/RestGetStatusAction.java similarity index 93% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetStatusAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/rest/RestGetStatusAction.java index 331bab40cdacc..2d5cc7a71669c 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetStatusAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/rest/RestGetStatusAction.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.rest; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; @@ -13,6 +13,7 @@ import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.profiling.action.GetStatusAction; import java.util.List; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetTopNFunctionsAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/rest/RestGetTopNFunctionsAction.java similarity index 90% rename from x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetTopNFunctionsAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/rest/RestGetTopNFunctionsAction.java index b9896418d7b79..9c23d31964b5b 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetTopNFunctionsAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/rest/RestGetTopNFunctionsAction.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.rest; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; @@ -13,6 +13,8 @@ import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestCancellableNodeClient; import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.profiling.action.GetStackTracesRequest; +import org.elasticsearch.xpack.profiling.action.GetTopNFunctionsAction; import java.io.IOException; import java.util.List; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/CO2CalculatorTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/CO2CalculatorTests.java similarity index 98% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/CO2CalculatorTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/CO2CalculatorTests.java index 48cc535dbe7e4..a7b9a97b71acc 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/CO2CalculatorTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/CO2CalculatorTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/CarthesianCombinator.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/CarthesianCombinator.java similarity index 97% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/CarthesianCombinator.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/CarthesianCombinator.java index 2982df317a38c..1b41f30c3df8e 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/CarthesianCombinator.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/CarthesianCombinator.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import java.lang.reflect.Array; import java.util.function.Consumer; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/CostCalculatorTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/CostCalculatorTests.java similarity index 98% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/CostCalculatorTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/CostCalculatorTests.java index b6e743a0946dd..eaf6cf618eddb 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/CostCalculatorTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/CostCalculatorTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/FrameGroupIDTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/FrameGroupIDTests.java similarity index 98% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/FrameGroupIDTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/FrameGroupIDTests.java index 50cfdd28a98fc..2bd6d66f82c54 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/FrameGroupIDTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/FrameGroupIDTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesRequestTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/GetStackTracesRequestTests.java similarity index 99% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesRequestTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/GetStackTracesRequestTests.java index cfaa90b8adf85..70bb1abfc40ac 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesRequestTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/GetStackTracesRequestTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesResponseTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/GetStackTracesResponseTests.java similarity index 98% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesResponseTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/GetStackTracesResponseTests.java index 3ebd2ef6a8aeb..973f9ce3df820 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesResponseTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/GetStackTracesResponseTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.test.AbstractChunkedSerializingTestCase; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/HostMetadataTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/HostMetadataTests.java similarity index 99% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/HostMetadataTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/HostMetadataTests.java index 5c24e295909bc..b6b1ecef666c9 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/HostMetadataTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/HostMetadataTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/IndexAllocationTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/IndexAllocationTests.java similarity index 99% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/IndexAllocationTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/IndexAllocationTests.java index bd66645243a92..756636ef84f78 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/IndexAllocationTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/IndexAllocationTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/KvIndexResolverTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/KvIndexResolverTests.java similarity index 99% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/KvIndexResolverTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/KvIndexResolverTests.java index d6b9438611114..5229a398b0367 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/KvIndexResolverTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/KvIndexResolverTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.ClusterState; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/NumberUtilsTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/NumberUtilsTests.java similarity index 95% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/NumberUtilsTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/NumberUtilsTests.java index 0b8a410f9bb66..649759ba0309d 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/NumberUtilsTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/NumberUtilsTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingInfoTransportActionTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/ProfilingInfoTransportActionTests.java similarity index 97% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingInfoTransportActionTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/ProfilingInfoTransportActionTests.java index b66b8a3db50f9..d7eda19e45fbf 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingInfoTransportActionTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/ProfilingInfoTransportActionTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.common.settings.Settings; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ResamplerTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/ResamplerTests.java similarity index 99% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ResamplerTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/ResamplerTests.java index 0b37dcd154ca5..c2537edab6bbd 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ResamplerTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/ResamplerTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.RangeQueryBuilder; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/StackFrameTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/StackFrameTests.java similarity index 98% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/StackFrameTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/StackFrameTests.java index 3e1bc4eba202d..0888133759f45 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/StackFrameTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/StackFrameTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/StackTraceTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/StackTraceTests.java similarity index 99% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/StackTraceTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/StackTraceTests.java index 4f583b55f18f7..ee85c4b9cb01f 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/StackTraceTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/StackTraceTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TopNFunctionTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TopNFunctionTests.java similarity index 98% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TopNFunctionTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TopNFunctionTests.java index f30fd18443550..9623415b41554 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TopNFunctionTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TopNFunctionTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphActionTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TransportGetFlamegraphActionTests.java similarity index 99% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphActionTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TransportGetFlamegraphActionTests.java index e10892f0e73ce..46d8df0a91bbd 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphActionTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TransportGetFlamegraphActionTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesActionTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TransportGetStackTracesActionTests.java similarity index 98% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesActionTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TransportGetStackTracesActionTests.java index 2eccfb45f5958..80962ac5064a5 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesActionTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TransportGetStackTracesActionTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetTopNFunctionsActionTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TransportGetTopNFunctionsActionTests.java similarity index 99% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetTopNFunctionsActionTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TransportGetTopNFunctionsActionTests.java index f248d8e27bd43..6e5ed79579a0f 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetTopNFunctionsActionTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TransportGetTopNFunctionsActionTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.action; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/EventsIndexTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/persistence/EventsIndexTests.java similarity index 97% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/EventsIndexTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/persistence/EventsIndexTests.java index 4f943cbb62a7e..8de7c1c974785 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/EventsIndexTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/persistence/EventsIndexTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.persistence; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingDataStreamManagerTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/persistence/ProfilingDataStreamManagerTests.java similarity index 99% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingDataStreamManagerTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/persistence/ProfilingDataStreamManagerTests.java index 87b8aed1811e2..f2245baafe0c0 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingDataStreamManagerTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/persistence/ProfilingDataStreamManagerTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.persistence; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexManagerTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/persistence/ProfilingIndexManagerTests.java similarity index 99% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexManagerTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/persistence/ProfilingIndexManagerTests.java index ae1aa7072510d..db3037e09763d 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexManagerTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/persistence/ProfilingIndexManagerTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.persistence; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistryTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/persistence/ProfilingIndexTemplateRegistryTests.java similarity index 99% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistryTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/persistence/ProfilingIndexTemplateRegistryTests.java index fb1051add3f1b..81d6ed15804b6 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistryTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/persistence/ProfilingIndexTemplateRegistryTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.persistence; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/VerifyingClient.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/persistence/VerifyingClient.java similarity index 94% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/VerifyingClient.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/persistence/VerifyingClient.java index c37404c9209df..38a0c2fdf7e10 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/VerifyingClient.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/persistence/VerifyingClient.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.persistence; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; @@ -19,7 +19,7 @@ /** * A client that delegates to a verifying function for action/request/listener */ -public class VerifyingClient extends NoOpClient { +class VerifyingClient extends NoOpClient { private TriFunction, ActionRequest, ActionListener, ActionResponse> verifier = (a, r, l) -> { Assert.fail("verifier not set"); diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/RestGetStackTracesActionTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/rest/RestGetStackTracesActionTests.java similarity index 87% rename from x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/RestGetStackTracesActionTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/rest/RestGetStackTracesActionTests.java index 695bd3be0ef79..d5cd50e65c019 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/RestGetStackTracesActionTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/rest/RestGetStackTracesActionTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiling; +package org.elasticsearch.xpack.profiling.rest; import org.apache.lucene.util.SetOnce; import org.elasticsearch.common.bytes.BytesArray; @@ -17,6 +17,8 @@ import org.elasticsearch.test.rest.RestActionTestCase; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.profiling.action.GetStackTracesRequest; +import org.elasticsearch.xpack.profiling.action.GetStackTracesResponse; import org.junit.Before; import java.util.Collections; @@ -76,22 +78,15 @@ public void testPrepareParameterizedRequest() { assertThat(getStackTracesRequest.getCustomCostPerCoreHour(), is(0.083d)); assertThat(getStackTracesRequest.getQuery(), notNullValue(QueryBuilder.class)); executeCalled.set(true); - - GetStackTracesResponseBuilder responseBuilder = new GetStackTracesResponseBuilder(getStackTracesRequest); - responseBuilder.setSamplingRate(0.04d); - responseBuilder.setTotalFrames(523); - responseBuilder.setTotalSamples(3L); - - GetStackTracesResponse response = responseBuilder.build(); - assertNull(response.getStackTraces()); - assertNull(response.getStackFrames()); - assertNull(response.getExecutables()); - assertNull(response.getStackTraceEvents()); - assertEquals(response.getSamplingRate(), 0.04d, 0.0001d); - assertEquals(response.getTotalFrames(), 523); - assertEquals(response.getTotalSamples(), 3L); - - return response; + return new GetStackTracesResponse( + Collections.emptyMap(), + Collections.emptyMap(), + Collections.emptyMap(), + Collections.emptyMap(), + 523, + 0.04d, + 3L + ); }); RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.POST) .withPath("/_profiling/stacktraces") From ec2a4ca8b355116639d86028bc001b3a5394813c Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 10 Apr 2024 06:52:14 +0100 Subject: [PATCH 118/173] Expand release note for #105044 (#107257) Users of supposedly-S3-compatible storage may need to be aware of this change, so this commit expands the release notes to link to the relevant S3 documentation. --- docs/reference/release-notes/8.13.0.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/release-notes/8.13.0.asciidoc b/docs/reference/release-notes/8.13.0.asciidoc index 99ee4e5fb86e1..bcb533049f27d 100644 --- a/docs/reference/release-notes/8.13.0.asciidoc +++ b/docs/reference/release-notes/8.13.0.asciidoc @@ -389,7 +389,7 @@ Security:: Snapshot/Restore:: * Add s3 `HeadObject` request to request stats {es-pull}105105[#105105] -* Expose `OperationPurpose` via `CustomQueryParameter` to s3 logs {es-pull}105044[#105044] +* Expose `OperationPurpose` in S3 access logs using a https://docs.aws.amazon.com/AmazonS3/latest/userguide/LogFormat.html#LogFormatCustom[custom query-string parameter] {es-pull}105044[#105044] * Fix blob cache race, decay, time dependency {es-pull}104784[#104784] * Pause shard snapshots on graceful shutdown {es-pull}101717[#101717] * Retry indefinitely for s3 indices blob read errors {es-pull}103300[#103300] From de171b8f88bc1084a00df54f4e9d4fc37a2d41c1 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Wed, 10 Apr 2024 08:35:09 +0200 Subject: [PATCH 119/173] Use merge sort instead of hashing to avoid performance issues with many buckets (#107218) --- .../histogram/InternalAutoDateHistogram.java | 162 +++++++++--------- .../org/elasticsearch/TransportVersions.java | 1 + .../bucket/composite/InternalComposite.java | 154 +++++++---------- .../histogram/InternalDateHistogram.java | 99 +++++++++-- .../bucket/histogram/InternalHistogram.java | 100 ++++++++--- .../InternalVariableWidthHistogram.java | 126 ++++++++------ .../bucket/prefix/InternalIpPrefix.java | 95 ++++++---- 7 files changed, 434 insertions(+), 303 deletions(-) diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java index ab531b69be947..68e4dcf0d2d99 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java @@ -7,13 +7,12 @@ */ package org.elasticsearch.aggregations.bucket.histogram; +import org.apache.lucene.util.PriorityQueue; import org.elasticsearch.TransportVersions; import org.elasticsearch.aggregations.bucket.histogram.AutoDateHistogramAggregationBuilder.RoundingInfo; import org.elasticsearch.common.Rounding; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.util.LongObjectPagedHashMap; -import org.elasticsearch.core.Releasables; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.AggregatorReducer; @@ -22,6 +21,7 @@ import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.KeyComparable; import org.elasticsearch.search.aggregations.bucket.BucketReducer; +import org.elasticsearch.search.aggregations.bucket.IteratorAndCurrent; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; @@ -35,6 +35,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.Comparator; import java.util.List; import java.util.ListIterator; import java.util.Map; @@ -232,6 +233,11 @@ public InternalAutoDateHistogram(StreamInput in) throws IOException { } else { bucketInnerInterval = 1; // Calculated on merge. } + // we changed the order format in 8.13 for partial reduce, therefore we need to order them to perform merge sort + if (in.getTransportVersion().between(TransportVersions.ML_MODEL_IN_SERVICE_SETTINGS, TransportVersions.HISTOGRAM_AGGS_KEY_SORTED)) { + // list is mutable by #readCollectionAsList contract + buckets.sort(Comparator.comparingLong(b -> b.key)); + } } @Override @@ -287,6 +293,61 @@ public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) return new Bucket(prototype.key, prototype.docCount, prototype.format, aggregations); } + /** + * This method works almost exactly the same as + * InternalDateHistogram#reduceBuckets(List, ReduceContext), the different + * here is that we need to round all the keys we see using the highest level + * rounding returned across all the shards so the resolution of the buckets + * is the same and they can be reduced together. + */ + private BucketReduceResult reduceBuckets( + PriorityQueue> pq, + int reduceRoundingIdx, + long min, + long max, + AggregationReduceContext reduceContext + ) { + // First we need to find the highest level rounding used across all the + // shards + Rounding.Prepared reduceRounding = prepare(reduceRoundingIdx, min, max); + + List reducedBuckets = new ArrayList<>(); + if (pq.size() > 0) { + // list of buckets coming from different shards that have the same key + List currentBuckets = new ArrayList<>(); + long key = reduceRounding.round(pq.top().current().key); + + do { + final IteratorAndCurrent top = pq.top(); + + if (reduceRounding.round(top.current().key) != key) { + // the key changes, reduce what we already buffered and reset the buffer for current buckets + final Bucket reduced = reduceBucket(currentBuckets, reduceContext); + reducedBuckets.add(reduced); + currentBuckets.clear(); + key = reduceRounding.round(top.current().key); + } + + currentBuckets.add(top.current()); + + if (top.hasNext()) { + top.next(); + assert top.current().key > key : "shards must return data sorted by key"; + pq.updateTop(); + } else { + pq.pop(); + } + } while (pq.size() > 0); + + if (currentBuckets.isEmpty() == false) { + final Bucket reduced = reduceBucket(currentBuckets, reduceContext); + reducedBuckets.add(reduced); + } + } + + return mergeBucketsIfNeeded(new BucketReduceResult(reducedBuckets, reduceRoundingIdx, 1, reduceRounding, min, max), reduceContext); + } + private BucketReduceResult mergeBucketsIfNeeded(BucketReduceResult firstPassResult, AggregationReduceContext reduceContext) { int idx = firstPassResult.roundingIdx; RoundingInfo info = bucketInfo.roundingInfos[idx]; @@ -338,13 +399,12 @@ private List mergeBuckets( private Bucket reduceBucket(List buckets, AggregationReduceContext context) { assert buckets.isEmpty() == false; - long docCount = 0; - for (Bucket bucket : buckets) { - docCount += bucket.docCount; + try (BucketReducer reducer = new BucketReducer<>(buckets.get(0), context, buckets.size())) { + for (Bucket bucket : buckets) { + reducer.accept(bucket); + } + return createBucket(reducer.getProto().key, reducer.getDocCount(), reducer.getAggregations()); } - final List aggregations = new BucketAggregationList<>(buckets); - final InternalAggregations aggs = InternalAggregations.reduce(aggregations, context); - return new InternalAutoDateHistogram.Bucket(buckets.get(0).key, docCount, format, aggs); } private record BucketReduceResult( @@ -434,87 +494,33 @@ static int getAppropriateRounding(long minKey, long maxKey, int roundingIdx, Rou return currentRoundingIdx - 1; } - /** - * This method works almost exactly the same as - * InternalDateHistogram#reduceBuckets(List, ReduceContext), the different - * here is that we need to round all the keys we see using the highest level - * rounding returned across all the shards so the resolution of the buckets - * is the same and they can be reduced together. - */ @Override protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceContext, int size) { return new AggregatorReducer() { - private final LongObjectPagedHashMap> bucketsReducer = new LongObjectPagedHashMap<>( - getBuckets().size(), - reduceContext.bigArrays() - ); - int reduceRoundingIdx = 0; - long min = Long.MAX_VALUE; - long max = Long.MIN_VALUE; + private final PriorityQueue> pq = new PriorityQueue<>(size) { + @Override + protected boolean lessThan(IteratorAndCurrent a, IteratorAndCurrent b) { + return a.current().key < b.current().key; + } + }; + private int reduceRoundingIdx = 0; + private long min = Long.MAX_VALUE; + private long max = Long.MIN_VALUE; @Override public void accept(InternalAggregation aggregation) { - final InternalAutoDateHistogram histogram = (InternalAutoDateHistogram) aggregation; + InternalAutoDateHistogram histogram = (InternalAutoDateHistogram) aggregation; reduceRoundingIdx = Math.max(histogram.bucketInfo.roundingIdx, reduceRoundingIdx); - if (false == histogram.buckets.isEmpty()) { + if (histogram.buckets.isEmpty() == false) { min = Math.min(min, histogram.buckets.get(0).key); max = Math.max(max, histogram.buckets.get(histogram.buckets.size() - 1).key); - for (Bucket bucket : histogram.buckets) { - BucketReducer reducer = bucketsReducer.get(bucket.key); - if (reducer == null) { - reducer = new BucketReducer<>(bucket, reduceContext, size); - bucketsReducer.put(bucket.key, reducer); - } - reducer.accept(bucket); - } + pq.add(new IteratorAndCurrent<>(histogram.buckets.iterator())); } } @Override public InternalAggregation get() { - // First we need to find the highest level rounding used across all the - // shards - final Rounding.Prepared reduceRounding = prepare(reduceRoundingIdx, min, max); - - final long[] keys = new long[(int) bucketsReducer.size()]; - { - // fill the array and sort it - final int[] index = new int[] { 0 }; - bucketsReducer.forEach(c -> keys[index[0]++] = c.key); - Arrays.sort(keys); - } - - final List reducedBuckets = new ArrayList<>(); - if (keys.length > 0) { - // list of buckets coming from different shards that have the same key - BucketReducer currentReducer = null; - long key = reduceRounding.round(keys[0]); - for (long top : keys) { - if (reduceRounding.round(top) != key) { - assert currentReducer != null; - // the key changes, reduce what we already buffered and reset the buffer for current buckets - reducedBuckets.add(createBucket(key, currentReducer.getDocCount(), currentReducer.getAggregations())); - currentReducer = null; - key = reduceRounding.round(top); - } - - final BucketReducer nextReducer = bucketsReducer.get(top); - if (currentReducer == null) { - currentReducer = nextReducer; - } else { - currentReducer.accept(createBucket(key, nextReducer.getDocCount(), nextReducer.getAggregations())); - } - } - - if (currentReducer != null) { - reducedBuckets.add(createBucket(key, currentReducer.getDocCount(), currentReducer.getAggregations())); - } - } - - BucketReduceResult reducedBucketsResult = mergeBucketsIfNeeded( - new BucketReduceResult(reducedBuckets, reduceRoundingIdx, 1, reduceRounding, min, max), - reduceContext - ); + BucketReduceResult reducedBucketsResult = reduceBuckets(pq, reduceRoundingIdx, min, max, reduceContext); if (reduceContext.isFinalReduce()) { // adding empty buckets if needed @@ -543,12 +549,6 @@ public InternalAggregation get() { reducedBucketsResult.innerInterval ); } - - @Override - public void close() { - bucketsReducer.forEach(c -> Releasables.close(c.value)); - Releasables.close(bucketsReducer); - } }; } diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index e05487c9c88fe..8589e183a150e 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -165,6 +165,7 @@ static TransportVersion def(int id) { public static final TransportVersion ML_INFERENCE_COHERE_RERANK = def(8_624_00_0); public static final TransportVersion INDEXING_PRESSURE_DOCUMENT_REJECTIONS_COUNT = def(8_625_00_0); public static final TransportVersion ALIAS_ACTION_RESULTS = def(8_626_00_0); + public static final TransportVersion HISTOGRAM_AGGS_KEY_SORTED = def(8_627_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java index 22c967bb2ea14..1263532117ac0 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java @@ -9,12 +9,9 @@ package org.elasticsearch.search.aggregations.bucket.composite; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.PriorityQueue; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.util.ObjectArrayPriorityQueue; -import org.elasticsearch.common.util.ObjectObjectPagedHashMap; -import org.elasticsearch.core.Releasable; -import org.elasticsearch.core.Releasables; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.AggregatorReducer; @@ -22,7 +19,8 @@ import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.KeyComparable; -import org.elasticsearch.search.aggregations.bucket.DelayedBucketReducer; +import org.elasticsearch.search.aggregations.bucket.BucketReducer; +import org.elasticsearch.search.aggregations.bucket.IteratorAndCurrent; import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; @@ -204,36 +202,63 @@ int[] getReverseMuls() { @Override protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceContext, int size) { return new AggregatorReducer() { - final BucketsQueue queue = new BucketsQueue(reduceContext); - boolean earlyTerminated = false; + private final PriorityQueue> pq = new PriorityQueue<>(size) { + @Override + protected boolean lessThan(IteratorAndCurrent a, IteratorAndCurrent b) { + return a.current().compareKey(b.current()) < 0; + } + }; + private boolean earlyTerminated = false; @Override public void accept(InternalAggregation aggregation) { - InternalComposite sortedAgg = (InternalComposite) aggregation; + final InternalComposite sortedAgg = (InternalComposite) aggregation; earlyTerminated |= sortedAgg.earlyTerminated; - for (InternalBucket bucket : sortedAgg.getBuckets()) { - if (queue.add(bucket) == false) { - // if the bucket is not competitive, we can break - // because incoming buckets are sorted - break; - } + if (sortedAgg.buckets.isEmpty() == false) { + pq.add(new IteratorAndCurrent<>(sortedAgg.buckets.iterator())); } } @Override public InternalAggregation get() { - final List result = queue.get(); + InternalBucket lastBucket = null; + final List buckets = new ArrayList<>(); + final List result = new ArrayList<>(); + while (pq.size() > 0) { + IteratorAndCurrent top = pq.top(); + if (lastBucket != null && top.current().compareKey(lastBucket) != 0) { + InternalBucket reduceBucket = reduceBucket(buckets, reduceContext); + buckets.clear(); + result.add(reduceBucket); + if (result.size() >= getSize()) { + break; + } + } + lastBucket = top.current(); + buckets.add(top.current()); + if (top.hasNext()) { + top.next(); + pq.updateTop(); + } else { + pq.pop(); + } + } + if (buckets.size() > 0) { + InternalBucket reduceBucket = reduceBucket(buckets, reduceContext); + result.add(reduceBucket); + } + List reducedFormats = formats; CompositeKey lastKey = null; - if (result.isEmpty() == false) { - InternalBucket lastBucket = result.get(result.size() - 1); + if (result.size() > 0) { + lastBucket = result.get(result.size() - 1); /* Attach the formats from the last bucket to the reduced composite * so that we can properly format the after key. */ reducedFormats = lastBucket.formats; lastKey = lastBucket.getRawKey(); } reduceContext.consumeBucketsAndMaybeBreak(result.size()); - InternalComposite reduced = new InternalComposite( + final InternalComposite reduced = new InternalComposite( name, getSize(), sourceNames, @@ -248,85 +273,9 @@ public InternalAggregation get() { reduced.validateAfterKey(); return reduced; } - - @Override - public void close() { - Releasables.close(queue); - } }; } - private class BucketsQueue implements Releasable { - private final ObjectObjectPagedHashMap> bucketReducers; - private final ObjectArrayPriorityQueue queue; - private final AggregationReduceContext reduceContext; - - private BucketsQueue(AggregationReduceContext reduceContext) { - this.reduceContext = reduceContext; - bucketReducers = new ObjectObjectPagedHashMap<>(getSize(), reduceContext.bigArrays()); - queue = new ObjectArrayPriorityQueue<>(getSize(), reduceContext.bigArrays()) { - @Override - protected boolean lessThan(InternalBucket a, InternalBucket b) { - return b.compareKey(a) < 0; - } - }; - } - - /** adds a bucket to the queue. Return false if the bucket is not competitive, otherwise true.*/ - boolean add(InternalBucket bucket) { - DelayedBucketReducer delayed = bucketReducers.get(bucket.key); - if (delayed == null) { - final InternalBucket out = queue.insertWithOverflow(bucket); - if (out == null) { - // bucket is added - delayed = new DelayedBucketReducer<>(bucket, reduceContext); - } else if (out == bucket) { - // bucket is not competitive - return false; - } else { - // bucket replaces existing bucket - delayed = bucketReducers.remove(out.key); - assert delayed != null; - delayed.reset(bucket); - } - bucketReducers.put(bucket.key, delayed); - } - delayed.accept(bucket); - return true; - } - - /** Return the list of reduced buckets */ - List get() { - final int bucketsSize = (int) bucketReducers.size(); - final InternalBucket[] result = new InternalBucket[bucketsSize]; - for (int i = bucketsSize - 1; i >= 0; i--) { - final InternalBucket bucket = queue.pop(); - assert bucket != null; - /* Use the formats from the bucket because they'll be right to format - * the key. The formats on the InternalComposite doing the reducing are - * just whatever formats make sense for *its* index. This can be real - * trouble when the index doing the reducing is unmapped. */ - final var reducedFormats = bucket.formats; - final DelayedBucketReducer reducer = Objects.requireNonNull(bucketReducers.get(bucket.key)); - result[i] = new InternalBucket( - sourceNames, - reducedFormats, - bucket.key, - reverseMuls, - missingOrders, - reducer.getDocCount(), - reducer.getAggregations() - ); - } - return List.of(result); - } - - @Override - public void close() { - Releasables.close(bucketReducers, queue); - } - } - @Override public InternalAggregation finalizeSampling(SamplingContext samplingContext) { return new InternalComposite( @@ -343,6 +292,23 @@ public InternalAggregation finalizeSampling(SamplingContext samplingContext) { ); } + private InternalBucket reduceBucket(List buckets, AggregationReduceContext context) { + assert buckets.isEmpty() == false; + try (BucketReducer reducer = new BucketReducer<>(buckets.get(0), context, buckets.size())) { + for (InternalBucket bucket : buckets) { + reducer.accept(bucket); + } + /* Use the formats from the bucket because they'll be right to format + * the key. The formats on the InternalComposite doing the reducing are + * just whatever formats make sense for *its* index. This can be real + * trouble when the index doing the reducing is unmapped. */ + final var reducedFormats = reducer.getProto().formats; + final long docCount = reducer.getDocCount(); + final InternalAggregations aggs = reducer.getAggregations(); + return new InternalBucket(sourceNames, reducedFormats, reducer.getProto().key, reverseMuls, missingOrders, docCount, aggs); + } + } + @Override public boolean equals(Object obj) { if (this == obj) return true; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java index ce42145f2ceb1..4939c3bc88744 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java @@ -8,12 +8,12 @@ package org.elasticsearch.search.aggregations.bucket.histogram; import org.apache.lucene.util.CollectionUtil; +import org.apache.lucene.util.PriorityQueue; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.common.Rounding; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.core.Releasables; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.AggregatorReducer; @@ -23,6 +23,8 @@ import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.InternalOrder; import org.elasticsearch.search.aggregations.KeyComparable; +import org.elasticsearch.search.aggregations.bucket.BucketReducer; +import org.elasticsearch.search.aggregations.bucket.IteratorAndCurrent; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; @@ -262,6 +264,11 @@ public InternalDateHistogram(StreamInput in) throws IOException { downsampledResultsOffset = false; } buckets = in.readCollectionAsList(stream -> new Bucket(stream, keyed, format)); + // we changed the order format in 8.13 for partial reduce, therefore we need to order them to perform merge sort + if (in.getTransportVersion().between(TransportVersions.ML_MODEL_IN_SERVICE_SETTINGS, TransportVersions.HISTOGRAM_AGGS_KEY_SORTED)) { + // list is mutable by #readCollectionAsList contract + buckets.sort(Comparator.comparingLong(b -> b.key)); + } } @Override @@ -323,6 +330,71 @@ public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) return new Bucket(prototype.key, prototype.docCount, prototype.keyed, prototype.format, aggregations); } + private List reduceBuckets(final PriorityQueue> pq, AggregationReduceContext reduceContext) { + int consumeBucketCount = 0; + List reducedBuckets = new ArrayList<>(); + if (pq.size() > 0) { + // list of buckets coming from different shards that have the same key + List currentBuckets = new ArrayList<>(); + double key = pq.top().current().key; + + do { + final IteratorAndCurrent top = pq.top(); + + if (top.current().key != key) { + // the key changes, reduce what we already buffered and reset the buffer for current buckets + final Bucket reduced = reduceBucket(currentBuckets, reduceContext); + if (reduced.getDocCount() >= minDocCount || reduceContext.isFinalReduce() == false) { + if (consumeBucketCount++ >= REPORT_EMPTY_EVERY) { + reduceContext.consumeBucketsAndMaybeBreak(consumeBucketCount); + consumeBucketCount = 0; + } + reducedBuckets.add(reduced); + } + currentBuckets.clear(); + key = top.current().key; + } + + currentBuckets.add(top.current()); + + if (top.hasNext()) { + top.next(); + assert top.current().key > key : "shards must return data sorted by key"; + pq.updateTop(); + } else { + pq.pop(); + } + } while (pq.size() > 0); + + if (currentBuckets.isEmpty() == false) { + final Bucket reduced = reduceBucket(currentBuckets, reduceContext); + if (reduced.getDocCount() >= minDocCount || reduceContext.isFinalReduce() == false) { + reducedBuckets.add(reduced); + if (consumeBucketCount++ >= REPORT_EMPTY_EVERY) { + reduceContext.consumeBucketsAndMaybeBreak(consumeBucketCount); + consumeBucketCount = 0; + } + } + } + } + reduceContext.consumeBucketsAndMaybeBreak(consumeBucketCount); + return reducedBuckets; + } + + /** + * Reduce a list of same-keyed buckets (from multiple shards) to a single bucket. This + * requires all buckets to have the same key. + */ + private Bucket reduceBucket(List buckets, AggregationReduceContext context) { + assert buckets.isEmpty() == false; + try (BucketReducer reducer = new BucketReducer<>(buckets.get(0), context, buckets.size())) { + for (Bucket bucket : buckets) { + reducer.accept(bucket); + } + return createBucket(reducer.getProto().key, reducer.getDocCount(), reducer.getAggregations()); + } + } + private void addEmptyBuckets(List list, AggregationReduceContext reduceContext) { /* * Make sure we have space for the empty buckets we're going to add by @@ -433,31 +505,25 @@ private void iterateEmptyBuckets(List list, ListIterator iter, L @Override protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceContext, int size) { return new AggregatorReducer() { - - final LongKeyedMultiBucketsAggregatorReducer reducer = new LongKeyedMultiBucketsAggregatorReducer<>( - reduceContext, - size, - minDocCount - ) { + private final PriorityQueue> pq = new PriorityQueue<>(size) { @Override - protected Bucket createBucket(long key, long docCount, InternalAggregations aggregations) { - return InternalDateHistogram.this.createBucket(key, docCount, aggregations); + protected boolean lessThan(IteratorAndCurrent a, IteratorAndCurrent b) { + return a.current().key < b.current().key; } }; @Override public void accept(InternalAggregation aggregation) { - InternalDateHistogram dateHistogram = (InternalDateHistogram) aggregation; - for (Bucket bucket : dateHistogram.buckets) { - reducer.accept(bucket.key, bucket); + final InternalDateHistogram histogram = (InternalDateHistogram) aggregation; + if (histogram.buckets.isEmpty() == false) { + pq.add(new IteratorAndCurrent<>(histogram.buckets.iterator())); } } @Override public InternalAggregation get() { - List reducedBuckets = reducer.get(); + List reducedBuckets = reduceBuckets(pq, reduceContext); if (reduceContext.isFinalReduce()) { - reducedBuckets.sort(Comparator.comparingLong(b -> b.key)); if (minDocCount == 0) { addEmptyBuckets(reducedBuckets, reduceContext); } @@ -486,11 +552,6 @@ public InternalAggregation get() { getMetadata() ); } - - @Override - public void close() { - Releasables.close(reducer); - } }; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java index 980f11ab0aa61..4ff01c5648486 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java @@ -8,10 +8,10 @@ package org.elasticsearch.search.aggregations.bucket.histogram; import org.apache.lucene.util.CollectionUtil; -import org.apache.lucene.util.NumericUtils; +import org.apache.lucene.util.PriorityQueue; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.core.Releasables; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.AggregatorReducer; @@ -21,6 +21,8 @@ import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.InternalOrder; import org.elasticsearch.search.aggregations.KeyComparable; +import org.elasticsearch.search.aggregations.bucket.BucketReducer; +import org.elasticsearch.search.aggregations.bucket.IteratorAndCurrent; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; @@ -177,6 +179,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeDouble(minBound); out.writeDouble(maxBound); subAggregations.writeTo(out); + } @Override @@ -240,6 +243,11 @@ public InternalHistogram(StreamInput in) throws IOException { format = in.readNamedWriteable(DocValueFormat.class); keyed = in.readBoolean(); buckets = in.readCollectionAsList(stream -> new Bucket(stream, keyed, format)); + // we changed the order format in 8.13 for partial reduce, therefore we need to order them to perform merge sort + if (in.getTransportVersion().between(TransportVersions.ML_MODEL_IN_SERVICE_SETTINGS, TransportVersions.HISTOGRAM_AGGS_KEY_SORTED)) { + // list is mutable by #readCollectionAsList contract + buckets.sort(Comparator.comparingDouble(b -> b.key)); + } } @Override @@ -282,6 +290,69 @@ public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) return new Bucket(prototype.key, prototype.docCount, prototype.keyed, prototype.format, aggregations); } + private List reduceBuckets(PriorityQueue> pq, AggregationReduceContext reduceContext) { + int consumeBucketCount = 0; + List reducedBuckets = new ArrayList<>(); + if (pq.size() > 0) { + // list of buckets coming from different shards that have the same key + List currentBuckets = new ArrayList<>(); + double key = pq.top().current().key; + + do { + final IteratorAndCurrent top = pq.top(); + + if (Double.compare(top.current().key, key) != 0) { + // The key changes, reduce what we already buffered and reset the buffer for current buckets. + // Using Double.compare instead of != to handle NaN correctly. + final Bucket reduced = reduceBucket(currentBuckets, reduceContext); + if (reduced.getDocCount() >= minDocCount || reduceContext.isFinalReduce() == false) { + reducedBuckets.add(reduced); + if (consumeBucketCount++ >= REPORT_EMPTY_EVERY) { + reduceContext.consumeBucketsAndMaybeBreak(consumeBucketCount); + consumeBucketCount = 0; + } + } + currentBuckets.clear(); + key = top.current().key; + } + + currentBuckets.add(top.current()); + + if (top.hasNext()) { + top.next(); + assert Double.compare(top.current().key, key) > 0 : "shards must return data sorted by key"; + pq.updateTop(); + } else { + pq.pop(); + } + } while (pq.size() > 0); + + if (currentBuckets.isEmpty() == false) { + final Bucket reduced = reduceBucket(currentBuckets, reduceContext); + if (reduced.getDocCount() >= minDocCount || reduceContext.isFinalReduce() == false) { + reducedBuckets.add(reduced); + if (consumeBucketCount++ >= REPORT_EMPTY_EVERY) { + reduceContext.consumeBucketsAndMaybeBreak(consumeBucketCount); + consumeBucketCount = 0; + } + } + } + } + + reduceContext.consumeBucketsAndMaybeBreak(consumeBucketCount); + return reducedBuckets; + } + + private Bucket reduceBucket(List buckets, AggregationReduceContext context) { + assert buckets.isEmpty() == false; + try (BucketReducer reducer = new BucketReducer<>(buckets.get(0), context, buckets.size())) { + for (Bucket bucket : buckets) { + reducer.accept(bucket); + } + return createBucket(reducer.getProto().key, reducer.getDocCount(), reducer.getAggregations()); + } + } + private double nextKey(double key) { return round(key + emptyBucketInfo.interval + emptyBucketInfo.interval / 2); } @@ -376,31 +447,25 @@ private void iterateEmptyBuckets(List list, ListIterator iter, D @Override protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceContext, int size) { return new AggregatorReducer() { - - final LongKeyedMultiBucketsAggregatorReducer reducer = new LongKeyedMultiBucketsAggregatorReducer<>( - reduceContext, - size, - minDocCount - ) { + final PriorityQueue> pq = new PriorityQueue<>(size) { @Override - protected Bucket createBucket(long key, long docCount, InternalAggregations aggregations) { - return InternalHistogram.this.createBucket(NumericUtils.sortableLongToDouble(key), docCount, aggregations); + protected boolean lessThan(IteratorAndCurrent a, IteratorAndCurrent b) { + return Double.compare(a.current().key, b.current().key) < 0; } }; @Override public void accept(InternalAggregation aggregation) { - InternalHistogram histogram = (InternalHistogram) aggregation; - for (Bucket bucket : histogram.buckets) { - reducer.accept(NumericUtils.doubleToSortableLong(bucket.key), bucket); + final InternalHistogram histogram = (InternalHistogram) aggregation; + if (histogram.buckets.isEmpty() == false) { + pq.add(new IteratorAndCurrent<>(histogram.buckets.iterator())); } } @Override public InternalAggregation get() { - List reducedBuckets = reducer.get(); + List reducedBuckets = reduceBuckets(pq, reduceContext); if (reduceContext.isFinalReduce()) { - reducedBuckets.sort(Comparator.comparingDouble(b -> b.key)); if (minDocCount == 0) { addEmptyBuckets(reducedBuckets, reduceContext); } @@ -418,11 +483,6 @@ public InternalAggregation get() { } return new InternalHistogram(getName(), reducedBuckets, order, minDocCount, emptyBucketInfo, format, keyed, getMetadata()); } - - @Override - public void close() { - Releasables.close(reducer); - } }; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogram.java index 27a79095eb49d..05944b75d06d5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogram.java @@ -8,11 +8,10 @@ package org.elasticsearch.search.aggregations.bucket.histogram; -import org.apache.lucene.util.NumericUtils; +import org.apache.lucene.util.PriorityQueue; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.util.LongObjectPagedHashMap; -import org.elasticsearch.core.Releasables; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.AggregatorReducer; @@ -21,6 +20,7 @@ import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.KeyComparable; import org.elasticsearch.search.aggregations.bucket.BucketReducer; +import org.elasticsearch.search.aggregations.bucket.IteratorAndCurrent; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; @@ -260,6 +260,11 @@ public InternalVariableWidthHistogram(StreamInput in) throws IOException { format = in.readNamedWriteable(DocValueFormat.class); buckets = in.readCollectionAsList(stream -> new Bucket(stream, format)); targetNumBuckets = in.readVInt(); + // we changed the order format in 8.13 for partial reduce, therefore we need to order them to perform merge sort + if (in.getTransportVersion().between(TransportVersions.ML_MODEL_IN_SERVICE_SETTINGS, TransportVersions.HISTOGRAM_AGGS_KEY_SORTED)) { + // list is mutable by #readCollectionAsList contract + buckets.sort(Comparator.comparingDouble(b -> b.centroid)); + } } @Override @@ -309,21 +314,62 @@ public Number getKey(MultiBucketsAggregation.Bucket bucket) { } private Bucket reduceBucket(List buckets, AggregationReduceContext context) { - long docCount = 0; + assert buckets.isEmpty() == false; double min = Double.POSITIVE_INFINITY; double max = Double.NEGATIVE_INFINITY; double sum = 0; - for (InternalVariableWidthHistogram.Bucket bucket : buckets) { - docCount += bucket.docCount; - min = Math.min(min, bucket.bounds.min); - max = Math.max(max, bucket.bounds.max); - sum += bucket.docCount * bucket.centroid; - } - final List aggregations = new BucketAggregationList<>(buckets); - final InternalAggregations aggs = InternalAggregations.reduce(aggregations, context); - final double centroid = sum / docCount; - final Bucket.BucketBounds bounds = new Bucket.BucketBounds(min, max); - return new Bucket(centroid, bounds, docCount, format, aggs); + try (BucketReducer reducer = new BucketReducer<>(buckets.get(0), context, buckets.size())) { + for (Bucket bucket : buckets) { + min = Math.min(min, bucket.bounds.min); + max = Math.max(max, bucket.bounds.max); + sum += bucket.docCount * bucket.centroid; + reducer.accept(bucket); + } + final double centroid = sum / reducer.getDocCount(); + final Bucket.BucketBounds bounds = new Bucket.BucketBounds(min, max); + return new Bucket(centroid, bounds, reducer.getDocCount(), format, reducer.getAggregations()); + } + } + + public List reduceBuckets(PriorityQueue> pq, AggregationReduceContext reduceContext) { + List reducedBuckets = new ArrayList<>(); + if (pq.size() > 0) { + double key = pq.top().current().centroid(); + // list of buckets coming from different shards that have the same key + final List currentBuckets = new ArrayList<>(); + do { + IteratorAndCurrent top = pq.top(); + + if (Double.compare(top.current().centroid(), key) != 0) { + // The key changes, reduce what we already buffered and reset the buffer for current buckets. + final Bucket reduced = reduceBucket(currentBuckets, reduceContext); + reduceContext.consumeBucketsAndMaybeBreak(1); + reducedBuckets.add(reduced); + currentBuckets.clear(); + key = top.current().centroid(); + } + + currentBuckets.add(top.current()); + + if (top.hasNext()) { + Bucket prev = top.current(); + top.next(); + assert top.current().compareKey(prev) >= 0 : "shards must return data sorted by centroid"; + pq.updateTop(); + } else { + pq.pop(); + } + } while (pq.size() > 0); + + if (currentBuckets.isEmpty() == false) { + final Bucket reduced = reduceBucket(currentBuckets, reduceContext); + reduceContext.consumeBucketsAndMaybeBreak(1); + reducedBuckets.add(reduced); + } + } + + mergeBucketsIfNeeded(reducedBuckets, targetNumBuckets, reduceContext); + return reducedBuckets; } static class BucketRange { @@ -479,42 +525,24 @@ private static void adjustBoundsForOverlappingBuckets(List buckets) { @Override protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceContext, int size) { return new AggregatorReducer() { - - final LongObjectPagedHashMap bucketsReducer = new LongObjectPagedHashMap<>( - getBuckets().size(), - reduceContext.bigArrays() - ); + private final PriorityQueue> pq = new PriorityQueue<>(size) { + @Override + protected boolean lessThan(IteratorAndCurrent a, IteratorAndCurrent b) { + return Double.compare(a.current().centroid, b.current().centroid) < 0; + } + }; @Override public void accept(InternalAggregation aggregation) { - InternalVariableWidthHistogram histogram = (InternalVariableWidthHistogram) aggregation; - for (Bucket bucket : histogram.getBuckets()) { - long key = NumericUtils.doubleToSortableLong(bucket.centroid()); - ReducerAndExtraInfo reducer = bucketsReducer.get(key); - if (reducer == null) { - reducer = new ReducerAndExtraInfo(new BucketReducer<>(bucket, reduceContext, size)); - bucketsReducer.put(key, reducer); - reduceContext.consumeBucketsAndMaybeBreak(1); - } - reducer.min[0] = Math.min(reducer.min[0], bucket.bounds.min); - reducer.max[0] = Math.max(reducer.max[0], bucket.bounds.max); - reducer.sum[0] += bucket.docCount * bucket.centroid; - reducer.reducer.accept(bucket); + final InternalVariableWidthHistogram histogram = (InternalVariableWidthHistogram) aggregation; + if (histogram.buckets.isEmpty() == false) { + pq.add(new IteratorAndCurrent<>(histogram.buckets.iterator())); } } @Override public InternalAggregation get() { - final List reducedBuckets = new ArrayList<>((int) bucketsReducer.size()); - bucketsReducer.forEach(entry -> { - final double centroid = entry.value.sum[0] / entry.value.reducer.getDocCount(); - final Bucket.BucketBounds bounds = new Bucket.BucketBounds(entry.value.min[0], entry.value.max[0]); - reducedBuckets.add( - new Bucket(centroid, bounds, entry.value.reducer.getDocCount(), format, entry.value.reducer.getAggregations()) - ); - }); - reducedBuckets.sort(Comparator.comparing(Bucket::centroid)); - mergeBucketsIfNeeded(reducedBuckets, targetNumBuckets, reduceContext); + final List reducedBuckets = reduceBuckets(pq, reduceContext); if (reduceContext.isFinalReduce()) { buckets.sort(Comparator.comparing(Bucket::min)); mergeBucketsWithSameMin(reducedBuckets, reduceContext); @@ -522,21 +550,9 @@ public InternalAggregation get() { } return new InternalVariableWidthHistogram(getName(), reducedBuckets, emptyBucketInfo, targetNumBuckets, format, metadata); } - - @Override - public void close() { - bucketsReducer.forEach(entry -> Releasables.close(entry.value.reducer)); - Releasables.close(bucketsReducer); - } }; } - private record ReducerAndExtraInfo(BucketReducer reducer, double[] min, double[] max, double[] sum) { - private ReducerAndExtraInfo(BucketReducer reducer) { - this(reducer, new double[] { Double.POSITIVE_INFINITY }, new double[] { Double.NEGATIVE_INFINITY }, new double[] { 0 }); - } - } - @Override public InternalAggregation finalizeSampling(SamplingContext samplingContext) { return new InternalVariableWidthHistogram( diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefix.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefix.java index 3557947bb9ea7..48b11524df792 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefix.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefix.java @@ -9,10 +9,9 @@ package org.elasticsearch.search.aggregations.bucket.prefix; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.PriorityQueue; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.util.ObjectObjectPagedHashMap; -import org.elasticsearch.core.Releasables; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.AggregatorReducer; @@ -21,12 +20,12 @@ import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.KeyComparable; import org.elasticsearch.search.aggregations.bucket.BucketReducer; +import org.elasticsearch.search.aggregations.bucket.IteratorAndCurrent; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; -import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.Objects; @@ -225,51 +224,69 @@ protected void doWriteTo(StreamOutput out) throws IOException { @Override protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceContext, int size) { return new AggregatorReducer() { - final ObjectObjectPagedHashMap> buckets = new ObjectObjectPagedHashMap<>( - getBuckets().size(), - reduceContext.bigArrays() - ); + private final PriorityQueue> pq = new PriorityQueue<>(size) { + @Override + protected boolean lessThan(IteratorAndCurrent a, IteratorAndCurrent b) { + return a.current().key.compareTo(b.current().key) < 0; + } + }; @Override public void accept(InternalAggregation aggregation) { final InternalIpPrefix ipPrefix = (InternalIpPrefix) aggregation; - for (Bucket bucket : ipPrefix.getBuckets()) { - BucketReducer bucketReducer = buckets.get(bucket.key); - if (bucketReducer == null) { - bucketReducer = new BucketReducer<>(bucket, reduceContext, size); - boolean success = false; - try { - buckets.put(bucket.key, bucketReducer); - success = true; - } finally { - if (success == false) { - Releasables.close(bucketReducer); - } - } - } - bucketReducer.accept(bucket); + if (ipPrefix.buckets.isEmpty() == false) { + pq.add(new IteratorAndCurrent<>(ipPrefix.buckets.iterator())); } } @Override public InternalAggregation get() { - final List reducedBuckets = new ArrayList<>(Math.toIntExact(buckets.size())); - buckets.forEach(entry -> { - if (false == reduceContext.isFinalReduce() || entry.value.getDocCount() >= minDocCount) { - reducedBuckets.add(createBucket(entry.value.getProto(), entry.value.getAggregations(), entry.value.getDocCount())); - } - }); + final List reducedBuckets = reduceBuckets(pq, reduceContext); reduceContext.consumeBucketsAndMaybeBreak(reducedBuckets.size()); - reducedBuckets.sort(Comparator.comparing(a -> a.key)); return new InternalIpPrefix(getName(), format, keyed, minDocCount, reducedBuckets, metadata); } + }; + } - @Override - public void close() { - buckets.forEach(entry -> Releasables.close(entry.value)); - Releasables.close(buckets); + private List reduceBuckets(PriorityQueue> pq, AggregationReduceContext reduceContext) { + List reducedBuckets = new ArrayList<>(); + if (pq.size() > 0) { + // list of buckets coming from different shards that have the same value + List currentBuckets = new ArrayList<>(); + BytesRef value = pq.top().current().key; + + do { + final IteratorAndCurrent top = pq.top(); + if (top.current().key.equals(value) == false) { + final Bucket reduced = reduceBucket(currentBuckets, reduceContext); + if (false == reduceContext.isFinalReduce() || reduced.getDocCount() >= minDocCount) { + reducedBuckets.add(reduced); + } + currentBuckets.clear(); + value = top.current().key; + } + + currentBuckets.add(top.current()); + + if (top.hasNext()) { + top.next(); + assert top.current().key.compareTo(value) > 0 + : "shards must return data sorted by value [" + top.current().key + "] and [" + value + "]"; + pq.updateTop(); + } else { + pq.pop(); + } + } while (pq.size() > 0); + + if (currentBuckets.isEmpty() == false) { + final Bucket reduced = reduceBucket(currentBuckets, reduceContext); + if (false == reduceContext.isFinalReduce() || reduced.getDocCount() >= minDocCount) { + reducedBuckets.add(reduced); + } } - }; + } + + return reducedBuckets; } @Override @@ -322,6 +339,16 @@ private Bucket createBucket(Bucket prototype, InternalAggregations aggregations, ); } + private Bucket reduceBucket(List buckets, AggregationReduceContext context) { + assert buckets.isEmpty() == false; + try (BucketReducer reducer = new BucketReducer<>(buckets.get(0), context, buckets.size())) { + for (Bucket bucket : buckets) { + reducer.accept(bucket); + } + return createBucket(reducer.getProto(), reducer.getAggregations(), reducer.getDocCount()); + } + } + @Override public List getBuckets() { return Collections.unmodifiableList(buckets); From 6507ba572dacb5bcf1ff973418dd51b72a1d0d9a Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Wed, 10 Apr 2024 09:04:10 +0200 Subject: [PATCH 120/173] [Profiling] Limit TopN functions to available data (#107296) With this commit we consider a case in the TopN functions API where the specified limit is larger than the available number of TopN functions. Currently this throws an error (`IndexOutOfBoundException`). With this check in place we just return the list as is. --- .../TransportGetTopNFunctionsAction.java | 4 +- .../action/TopNFunctionsBuilderTests.java | 87 +++++++++++++++++++ 2 files changed, 89 insertions(+), 2 deletions(-) create mode 100644 x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TopNFunctionsBuilderTests.java diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetTopNFunctionsAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetTopNFunctionsAction.java index 05e2202c7b91c..e5d67c0b005e2 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetTopNFunctionsAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetTopNFunctionsAction.java @@ -120,7 +120,7 @@ static GetTopNFunctionsResponse buildTopNFunctions(GetStackTracesResponse respon return builder.build(); } - private static class TopNFunctionsBuilder { + static class TopNFunctionsBuilder { private final Integer limit; private final HashMap topNFunctions; @@ -141,7 +141,7 @@ public GetTopNFunctionsResponse build() { sumTotalCount += topNFunction.getTotalCount(); } // limit at the end so global stats are independent of the limit - if (limit != null && limit > 0) { + if (limit != null && limit > 0 && limit < functions.size()) { functions = functions.subList(0, limit); } return new GetTopNFunctionsResponse(sumSelfCount, sumTotalCount, functions); diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TopNFunctionsBuilderTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TopNFunctionsBuilderTests.java new file mode 100644 index 0000000000000..26c0f066dd092 --- /dev/null +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TopNFunctionsBuilderTests.java @@ -0,0 +1,87 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.profiling.action; + +import org.elasticsearch.test.ESTestCase; + +public class TopNFunctionsBuilderTests extends ESTestCase { + public void testBuildFunctions() { + TransportGetTopNFunctionsAction.TopNFunctionsBuilder builder = new TransportGetTopNFunctionsAction.TopNFunctionsBuilder(null); + TopNFunction foo = foo(); + TopNFunction bar = bar(); + builder.addTopNFunction(foo); + builder.addTopNFunction(bar); + + GetTopNFunctionsResponse response = builder.build(); + + assertEquals(7L, response.getSelfCount()); + assertEquals(14L, response.getTotalCount()); + assertEquals(2, response.getTopN().size()); + assertEquals(foo, response.getTopN().get(0)); + assertEquals(bar, response.getTopN().get(1)); + } + + public void testBuildFunctionsWithLimitSmallerThanAvailableFunctionCount() { + TransportGetTopNFunctionsAction.TopNFunctionsBuilder builder = new TransportGetTopNFunctionsAction.TopNFunctionsBuilder(1); + TopNFunction foo = foo(); + TopNFunction bar = bar(); + builder.addTopNFunction(foo); + builder.addTopNFunction(bar); + + GetTopNFunctionsResponse response = builder.build(); + + // total counts are independent of the limit + assertEquals(7L, response.getSelfCount()); + assertEquals(14L, response.getTotalCount()); + assertEquals(1, response.getTopN().size()); + assertEquals(foo, response.getTopN().get(0)); + } + + public void testBuildFunctionsWithLimitHigherThanAvailableFunctionCount() { + TransportGetTopNFunctionsAction.TopNFunctionsBuilder builder = new TransportGetTopNFunctionsAction.TopNFunctionsBuilder(5); + TopNFunction foo = foo(); + TopNFunction bar = bar(); + builder.addTopNFunction(foo); + builder.addTopNFunction(bar); + + GetTopNFunctionsResponse response = builder.build(); + + assertEquals(7L, response.getSelfCount()); + assertEquals(14L, response.getTotalCount()); + // still limited to the available two functions + assertEquals(2, response.getTopN().size()); + assertEquals(foo, response.getTopN().get(0)); + assertEquals(bar, response.getTopN().get(1)); + } + + private TopNFunction foo() { + TopNFunction foo = function("foo"); + foo.addSelfCount(5L); + foo.addTotalCount(10L); + foo.addSelfAnnualCO2Tons(1.0d); + foo.addTotalAnnualCO2Tons(2.0d); + foo.addSelfAnnualCostsUSD(32.2d); + foo.addTotalAnnualCostsUSD(64.4d); + return foo; + } + + private TopNFunction bar() { + TopNFunction bar = function("bar"); + bar.addSelfCount(2L); + bar.addTotalCount(4L); + bar.addSelfAnnualCO2Tons(0.5d); + bar.addTotalAnnualCO2Tons(1.0d); + bar.addSelfAnnualCostsUSD(16.0d); + bar.addTotalAnnualCostsUSD(32.0d); + return bar; + } + + private TopNFunction function(String name) { + return new TopNFunction(name, 3, false, 0, name, "main.c", 1, "demo"); + } +} From e21f2e30fb9c92b8df6c951b8b4afeb6cd4581bf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Przemys=C5=82aw=20Witek?= Date: Wed, 10 Apr 2024 10:17:59 +0200 Subject: [PATCH 121/173] [Transform] Make force-stopping the transform always remove persistent task from cluster state (#106989) --- docs/changelog/106989.yaml | 7 ++ .../transform/integration/TransformIT.java | 27 +++--- .../integration/TransformRestTestCase.java | 3 - .../integration/TransformRobustnessIT.java | 51 ++++++++++- .../TransformTaskFailedStateIT.java | 9 ++ .../action/TransportStopTransformAction.java | 91 +++++++++---------- .../transforms/TransformNodeAssignments.java | 14 +++ .../TransformNodeAssignmentsTests.java | 52 +++++++++-- 8 files changed, 177 insertions(+), 77 deletions(-) create mode 100644 docs/changelog/106989.yaml diff --git a/docs/changelog/106989.yaml b/docs/changelog/106989.yaml new file mode 100644 index 0000000000000..47df5fe5b47d7 --- /dev/null +++ b/docs/changelog/106989.yaml @@ -0,0 +1,7 @@ +pr: 106989 +summary: Make force-stopping the transform always remove persistent task from cluster + state +area: Transform +type: bug +issues: + - 106811 diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIT.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIT.java index e7d54028caa20..4db0d0d8baaf1 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIT.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIT.java @@ -241,38 +241,39 @@ public void testTransformLifecycleInALoop() throws Exception { long sleepAfterStartMillis = randomLongBetween(0, 5_000); boolean force = randomBoolean(); try { - // Create the continuous transform + // Create the continuous transform. putTransform(transformId, config, RequestOptions.DEFAULT); assertThat(getTransformTasks(), is(empty())); assertThat(getTransformTasksFromClusterState(transformId), is(empty())); startTransform(transformId, RequestOptions.DEFAULT); - // There is 1 transform task after start + // There is 1 transform task after start. assertThat(getTransformTasks(), hasSize(1)); assertThat(getTransformTasksFromClusterState(transformId), hasSize(1)); Thread.sleep(sleepAfterStartMillis); - // There should still be 1 transform task as the transform is continuous + // There should still be 1 transform task as the transform is continuous. assertThat(getTransformTasks(), hasSize(1)); assertThat(getTransformTasksFromClusterState(transformId), hasSize(1)); - // Stop the transform with force set randomly + // Stop the transform with force set randomly. stopTransform(transformId, true, null, false, force); - // After the transform is stopped, there should be no transform task left - assertThat(getTransformTasks(), is(empty())); + if (force) { + // If the "force" has been used, then the persistent task is removed from the cluster state but the local task can still + // be seen by the PersistentTasksNodeService. We need to wait until PersistentTasksNodeService reconciles the state. + assertBusy(() -> assertThat(getTransformTasks(), is(empty()))); + } else { + // If the "force" hasn't been used then we can expect the local task to be already gone. + assertThat(getTransformTasks(), is(empty())); + } + // After the transform is stopped, there should be no transform task left in the cluster state. assertThat(getTransformTasksFromClusterState(transformId), is(empty())); // Delete the transform deleteTransform(transformId); } catch (AssertionError | Exception e) { throw new AssertionError( - format( - "Failure at iteration %d (sleepAfterStartMillis=%s,force=%s): %s", - i, - sleepAfterStartMillis, - force, - e.getMessage() - ), + format("Failure at iteration %d (sleepAfterStart=%sms,force=%s): %s", i, sleepAfterStartMillis, force, e.getMessage()), e ); } diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java index eb1a1258d5a96..4cc9a31c8eff5 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java @@ -65,9 +65,6 @@ public abstract class TransformRestTestCase extends TransformCommonRestTestCase { - protected static final String AUTH_KEY = "Authorization"; - protected static final String SECONDARY_AUTH_KEY = "es-secondary-authorization"; - private final Set createdTransformIds = new HashSet<>(); protected void cleanUp() throws Exception { diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRobustnessIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRobustnessIT.java index 0f807fbae45d1..4b7c42968f557 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRobustnessIT.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRobustnessIT.java @@ -10,7 +10,6 @@ import org.elasticsearch.client.Request; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.xcontent.support.XContentMapValues; -import org.elasticsearch.core.Strings; import org.elasticsearch.xpack.core.transform.TransformField; import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; @@ -19,6 +18,7 @@ import java.util.Map; import java.util.concurrent.TimeUnit; +import static org.elasticsearch.core.Strings.format; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; @@ -86,10 +86,10 @@ public void testTaskRemovalAfterInternalIndexGotDeleted() throws Exception { deleteTransform(transformId); } - public void testCreateAndDeleteTransformInALoop() throws IOException { + public void testBatchTransformLifecycltInALoop() throws IOException { createReviewsIndex(); - String transformId = "test_create_and_delete_in_a_loop"; + String transformId = "test_batch_lifecycle_in_a_loop"; String destIndex = transformId + "-dest"; for (int i = 0; i < 100; ++i) { try { @@ -108,7 +108,48 @@ public void testCreateAndDeleteTransformInALoop() throws IOException { // Delete the transform deleteTransform(transformId); } catch (AssertionError | Exception e) { - fail("Failure at iteration " + i + ": " + e.getMessage()); + throw new AssertionError(format("Failure at iteration %d: %s", i, e.getMessage()), e); + } + } + } + + public void testContinuousTransformLifecycleInALoop() throws Exception { + createReviewsIndex(); + + String transformId = "test_cont_lifecycle_in_a_loop"; + String destIndex = transformId + "-dest"; + for (int i = 0; i < 100; ++i) { + long sleepAfterStartMillis = randomLongBetween(0, 5_000); + boolean force = randomBoolean(); + try { + // Create the continuous transform. + createContinuousPivotReviewsTransform(transformId, destIndex, null); + assertThat(getTransformTasks(), is(empty())); + assertThat(getTransformTasksFromClusterState(transformId), is(empty())); + + startTransform(transformId); + // There is 1 transform task after start. + assertThat(getTransformTasks(), hasSize(1)); + assertThat(getTransformTasksFromClusterState(transformId), hasSize(1)); + + Thread.sleep(sleepAfterStartMillis); + // There should still be 1 transform task as the transform is continuous. + assertThat(getTransformTasks(), hasSize(1)); + assertThat(getTransformTasksFromClusterState(transformId), hasSize(1)); + + // Stop the transform with force set randomly. + stopTransform(transformId, force); + // After the transform is stopped, there should be no transform task left. + assertThat(getTransformTasks(), is(empty())); + assertThat(getTransformTasksFromClusterState(transformId), is(empty())); + + // Delete the transform. + deleteTransform(transformId); + } catch (AssertionError | Exception e) { + throw new AssertionError( + format("Failure at iteration %d (sleepAfterStart=%sms,force=%s): %s", i, sleepAfterStartMillis, force, e.getMessage()), + e + ); } } } @@ -168,7 +209,7 @@ private void beEvilAndDeleteTheTransformIndex() throws IOException { } private static String createConfig(String sourceIndex, String destIndex) { - return Strings.format(""" + return format(""" { "source": { "index": "%s" diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformTaskFailedStateIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformTaskFailedStateIT.java index bccd97f22b4a1..5ab65ca023506 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformTaskFailedStateIT.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformTaskFailedStateIT.java @@ -65,6 +65,7 @@ public void testForceStopFailedTransform() throws Exception { createContinuousPivotReviewsTransform(transformId, transformIndex, null); assertThat(getTransformTasks(), is(empty())); + assertThat(getTransformTasksFromClusterState(transformId), is(empty())); startTransform(transformId); awaitState(transformId, TransformStats.State.FAILED); @@ -78,6 +79,7 @@ public void testForceStopFailedTransform() throws Exception { assertThat((String) XContentMapValues.extractValue("reason", fullState), startsWith(failureReason)); assertThat(getTransformTasks(), hasSize(1)); + assertThat(getTransformTasksFromClusterState(transformId), hasSize(1)); // verify that we cannot stop a failed transform ResponseException ex = expectThrows(ResponseException.class, () -> stopTransform(transformId, false)); @@ -99,6 +101,7 @@ public void testForceStopFailedTransform() throws Exception { assertThat(XContentMapValues.extractValue("reason", fullState), is(nullValue())); assertThat(getTransformTasks(), is(empty())); + assertThat(getTransformTasksFromClusterState(transformId), is(empty())); } public void testForceResetFailedTransform() throws Exception { @@ -109,6 +112,7 @@ public void testForceResetFailedTransform() throws Exception { createContinuousPivotReviewsTransform(transformId, transformIndex, null); assertThat(getTransformTasks(), is(empty())); + assertThat(getTransformTasksFromClusterState(transformId), is(empty())); startTransform(transformId); awaitState(transformId, TransformStats.State.FAILED); @@ -122,6 +126,7 @@ public void testForceResetFailedTransform() throws Exception { assertThat((String) XContentMapValues.extractValue("reason", fullState), startsWith(failureReason)); assertThat(getTransformTasks(), hasSize(1)); + assertThat(getTransformTasksFromClusterState(transformId), hasSize(1)); // verify that we cannot reset a failed transform ResponseException ex = expectThrows(ResponseException.class, () -> resetTransform(transformId, false)); @@ -135,6 +140,7 @@ public void testForceResetFailedTransform() throws Exception { resetTransform(transformId, true); assertThat(getTransformTasks(), is(empty())); + assertThat(getTransformTasksFromClusterState(transformId), is(empty())); } public void testStartFailedTransform() throws Exception { @@ -145,6 +151,7 @@ public void testStartFailedTransform() throws Exception { createContinuousPivotReviewsTransform(transformId, transformIndex, null); assertThat(getTransformTasks(), is(empty())); + assertThat(getTransformTasksFromClusterState(transformId), is(empty())); startTransform(transformId); awaitState(transformId, TransformStats.State.FAILED); @@ -158,6 +165,7 @@ public void testStartFailedTransform() throws Exception { assertThat((String) XContentMapValues.extractValue("reason", fullState), startsWith(failureReason)); assertThat(getTransformTasks(), hasSize(1)); + assertThat(getTransformTasksFromClusterState(transformId), hasSize(1)); var expectedFailure = "Unable to start transform [test-force-start-failed-transform] " + "as it is in a failed state. Use force stop and then restart the transform once error is resolved. More details: [" @@ -172,6 +180,7 @@ public void testStartFailedTransform() throws Exception { stopTransform(transformId, true); assertThat(getTransformTasks(), is(empty())); + assertThat(getTransformTasksFromClusterState(transformId), is(empty())); } private void awaitState(String transformId, TransformStats.State state) throws Exception { diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStopTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStopTransformAction.java index b8ea1fee6e886..1996012ccdf58 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStopTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStopTransformAction.java @@ -164,18 +164,23 @@ protected void doExecute(Task task, Request request, ActionListener li state ); - final ActionListener doExecuteListener; - if (transformNodeAssignments.getWaitingForAssignment().size() > 0) { - doExecuteListener = cancelTransformTasksWithNoAssignment(finalListener, transformNodeAssignments); - } else { - doExecuteListener = finalListener; - } + final ActionListener doExecuteListener = cancelTransformTasksListener( + transformNodeAssignments.getWaitingForAssignment(), + finalListener + ); - if (transformNodeAssignments.getExecutorNodes().size() > 0) { + if (request.isForce()) { + // When force==true, we **do not** fan out to individual tasks (i.e. taskOperation method will not be called) as we + // want to make sure that the persistent tasks will be removed from cluster state even if these tasks are no longer + // visible by the PersistentTasksService. + cancelTransformTasksListener(transformNodeAssignments.getAssigned(), doExecuteListener).onResponse( + new Response(true) + ); + } else if (transformNodeAssignments.getExecutorNodes().isEmpty()) { + doExecuteListener.onResponse(new Response(true)); + } else { request.setNodes(transformNodeAssignments.getExecutorNodes().toArray(new String[0])); super.doExecute(task, request, doExecuteListener); - } else { - doExecuteListener.onResponse(new Response(true)); } }, e -> { if (e instanceof ResourceNotFoundException) { @@ -189,13 +194,10 @@ protected void doExecute(Task task, Request request, ActionListener li listener.onFailure(e); // found transforms without a config } else if (request.isForce()) { - final ActionListener doExecuteListener; - - if (transformNodeAssignments.getWaitingForAssignment().size() > 0) { - doExecuteListener = cancelTransformTasksWithNoAssignment(finalListener, transformNodeAssignments); - } else { - doExecuteListener = finalListener; - } + final ActionListener doExecuteListener = cancelTransformTasksListener( + transformNodeAssignments.getWaitingForAssignment(), + finalListener + ); if (transformNodeAssignments.getExecutorNodes().size() > 0) { request.setExpandedIds(transformNodeAssignments.getAssigned()); @@ -235,7 +237,6 @@ protected void taskOperation( TransformTask transformTask, ActionListener listener ) { - Set ids = request.getExpandedIds(); if (ids == null) { listener.onFailure(new IllegalStateException("Request does not have expandedIds set")); @@ -243,20 +244,6 @@ protected void taskOperation( } if (ids.contains(transformTask.getTransformId())) { - if (request.isForce()) { - // If force==true, we skip the additional step (setShouldStopAtCheckpoint) and move directly to shutting down the task. - // This way we ensure that the persistent task is removed ASAP (as opposed to being removed in one of the listeners). - try { - // Here the task is deregistered in scheduler and marked as completed in persistent task service. - transformTask.shutdown(); - // Here the indexer is aborted so that its thread finishes work ASAP. - transformTask.onCancelled(); - listener.onResponse(new Response(true)); - } catch (ElasticsearchException ex) { - listener.onFailure(ex); - } - return; - } // move the call to the generic thread pool, so we do not block the network thread threadPool.generic().execute(() -> { transformTask.setShouldStopAtCheckpoint(request.isWaitForCheckpoint(), ActionListener.wrap(r -> { @@ -306,7 +293,6 @@ protected StopTransformAction.Response newResponse( } private ActionListener waitForStopListener(Request request, ActionListener listener) { - ActionListener onStopListener = ActionListener.wrap( waitResponse -> transformConfigManager.refresh(ActionListener.wrap(r -> listener.onResponse(waitResponse), e -> { if ((ExceptionsHelper.unwrapCause(e) instanceof IndexNotFoundException) == false) { @@ -393,6 +379,7 @@ private void waitForTransformStopped( ) { // This map is accessed in the predicate and the listener callbacks final Map exceptions = new ConcurrentHashMap<>(); + persistentTasksService.waitForPersistentTasksCondition(persistentTasksCustomMetadata -> { if (persistentTasksCustomMetadata == null) { return true; @@ -501,34 +488,38 @@ private void waitForTransformStopped( })); } - private ActionListener cancelTransformTasksWithNoAssignment( - final ActionListener finalListener, - final TransformNodeAssignments transformNodeAssignments + /** + * Creates and returns the listener that sends remove request for every task in the given set. + * + * @param transformTasks set of transform tasks that should be removed + * @param finalListener listener that should be called once all the given tasks are removed + * @return listener that removes given tasks in parallel + */ + private ActionListener cancelTransformTasksListener( + final Set transformTasks, + final ActionListener finalListener ) { - final ActionListener doExecuteListener = ActionListener.wrap(response -> { + if (transformTasks.isEmpty()) { + return finalListener; + } + return ActionListener.wrap(response -> { GroupedActionListener> groupedListener = new GroupedActionListener<>( - transformNodeAssignments.getWaitingForAssignment().size(), - ActionListener.wrap(r -> { - finalListener.onResponse(response); - }, finalListener::onFailure) + transformTasks.size(), + ActionListener.wrap(r -> finalListener.onResponse(response), finalListener::onFailure) ); - for (String unassignedTaskId : transformNodeAssignments.getWaitingForAssignment()) { - persistentTasksService.sendRemoveRequest(unassignedTaskId, null, groupedListener); + for (String taskId : transformTasks) { + persistentTasksService.sendRemoveRequest(taskId, null, groupedListener); } - }, e -> { GroupedActionListener> groupedListener = new GroupedActionListener<>( - transformNodeAssignments.getWaitingForAssignment().size(), - ActionListener.wrap(r -> { - finalListener.onFailure(e); - }, finalListener::onFailure) + transformTasks.size(), + ActionListener.wrap(r -> finalListener.onFailure(e), finalListener::onFailure) ); - for (String unassignedTaskId : transformNodeAssignments.getWaitingForAssignment()) { - persistentTasksService.sendRemoveRequest(unassignedTaskId, null, groupedListener); + for (String taskId : transformTasks) { + persistentTasksService.sendRemoveRequest(taskId, null, groupedListener); } }); - return doExecuteListener; } } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformNodeAssignments.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformNodeAssignments.java index 7b61f0c9e8335..46f893a90aba1 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformNodeAssignments.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformNodeAssignments.java @@ -65,4 +65,18 @@ public Set getWaitingForAssignment() { public Set getStopped() { return stopped; } + + @Override + public String toString() { + return new StringBuilder("TransformNodeAssignments[").append("executorNodes=") + .append(executorNodes) + .append(",assigned=") + .append(assigned) + .append(",waitingForAssignment=") + .append(waitingForAssignment) + .append(",stopped=") + .append(stopped) + .append("]") + .toString(); + } } diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformNodeAssignmentsTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformNodeAssignmentsTests.java index f5c0b6046fbfe..2643d1bba652d 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformNodeAssignmentsTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformNodeAssignmentsTests.java @@ -9,8 +9,6 @@ import org.elasticsearch.test.ESTestCase; -import java.util.Arrays; -import java.util.HashSet; import java.util.Set; import static org.hamcrest.Matchers.equalTo; @@ -19,10 +17,11 @@ public class TransformNodeAssignmentsTests extends ESTestCase { public void testConstructorAndGetters() { - Set executorNodes = new HashSet<>(Arrays.asList("executor-1", "executor-2")); - Set assigned = new HashSet<>(Arrays.asList("assigned-1", "assigned-2")); - Set waitingForAssignment = new HashSet<>(Arrays.asList("waiting-1", "waitingv-2")); - Set stopped = new HashSet<>(Arrays.asList("stopped-1", "stopped-2")); + Set executorNodes = Set.of("executor-1", "executor-2"); + Set assigned = Set.of("assigned-1", "assigned-2"); + Set waitingForAssignment = Set.of("waiting-1", "waiting-2"); + Set stopped = Set.of("stopped-1", "stopped-2"); + TransformNodeAssignments assignments = new TransformNodeAssignments(executorNodes, assigned, waitingForAssignment, stopped); assertThat(assignments.getExecutorNodes(), is(equalTo(executorNodes))); @@ -30,4 +29,45 @@ public void testConstructorAndGetters() { assertThat(assignments.getWaitingForAssignment(), is(equalTo(waitingForAssignment))); assertThat(assignments.getStopped(), is(equalTo(stopped))); } + + public void testToString() { + Set executorNodes = Set.of("executor-1"); + Set assigned = Set.of("assigned-1"); + Set waitingForAssignment = Set.of("waiting-1"); + Set stopped = Set.of("stopped-1"); + + TransformNodeAssignments assignments = new TransformNodeAssignments(executorNodes, assigned, waitingForAssignment, stopped); + + assertThat( + assignments.toString(), + is( + equalTo( + "TransformNodeAssignments[" + + "executorNodes=[executor-1]," + + "assigned=[assigned-1]," + + "waitingForAssignment=[waiting-1]," + + "stopped=[stopped-1]" + + "]" + ) + ) + ); + } + + public void testToString_EmptyCollections() { + Set executorNodes = Set.of(); + Set assigned = Set.of(); + Set waitingForAssignment = Set.of(); + Set stopped = Set.of(); + + TransformNodeAssignments assignments = new TransformNodeAssignments(executorNodes, assigned, waitingForAssignment, stopped); + + assertThat( + assignments.toString(), + is( + equalTo( + "TransformNodeAssignments[" + "executorNodes=[]," + "assigned=[]," + "waitingForAssignment=[]," + "stopped=[]" + "]" + ) + ) + ); + } } From c4a11de0046aa8b4b13bd1081e6120ff574d02ee Mon Sep 17 00:00:00 2001 From: Nikolaj Volgushev Date: Wed, 10 Apr 2024 11:28:01 +0200 Subject: [PATCH 122/173] Make API key actions local-only (#107148) Refactoring PR to make create, grant, and update API key actions local-only. Also ports a profiles action since it relies on the same base class as grant API key. --- .../core/security/action/GrantRequest.java | 16 +- .../apikey/AbstractCreateApiKeyRequest.java | 16 +- .../apikey/BaseBulkUpdateApiKeyRequest.java | 14 -- .../apikey/BaseSingleUpdateApiKeyRequest.java | 14 -- .../apikey/BaseUpdateApiKeyRequest.java | 23 +-- .../apikey/BulkUpdateApiKeyRequest.java | 6 - .../action/apikey/CreateApiKeyRequest.java | 51 ------- .../CreateCrossClusterApiKeyRequest.java | 28 ---- .../action/apikey/GrantApiKeyRequest.java | 14 -- .../action/apikey/UpdateApiKeyRequest.java | 6 - .../UpdateCrossClusterApiKeyRequest.java | 6 - .../profile/ActivateProfileRequest.java | 13 -- ...UpdateApiKeyRequestSerializationTests.java | 71 --------- .../apikey/CreateApiKeyRequestTests.java | 61 -------- .../CreateCrossClusterApiKeyRequestTests.java | 137 ------------------ ...UpdateApiKeyRequestSerializationTests.java | 72 --------- .../UpdateCrossClusterApiKeyRequestTests.java | 34 ----- .../xpack/security/apikey/ApiKeyRestIT.java | 2 +- .../security/action/TransportGrantAction.java | 9 +- .../TransportBaseUpdateApiKeyAction.java | 9 +- .../TransportBulkUpdateApiKeyAction.java | 2 +- .../apikey/TransportCreateApiKeyAction.java | 7 +- ...ansportCreateCrossClusterApiKeyAction.java | 15 +- .../apikey/TransportGrantApiKeyAction.java | 10 +- .../apikey/TransportUpdateApiKeyAction.java | 2 +- ...ansportUpdateCrossClusterApiKeyAction.java | 2 +- .../TransportActivateProfileAction.java | 1 - ...rtCreateCrossClusterApiKeyActionTests.java | 2 +- ...rtUpdateCrossClusterApiKeyActionTests.java | 2 +- .../security/authc/ApiKeyServiceTests.java | 28 +++- ...stUpdateCrossClusterApiKeyActionTests.java | 2 +- 31 files changed, 62 insertions(+), 613 deletions(-) delete mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/BulkUpdateApiKeyRequestSerializationTests.java delete mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/CreateCrossClusterApiKeyRequestTests.java delete mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyRequestSerializationTests.java diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/GrantRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/GrantRequest.java index 466ac58e55bf7..9675d66a183a5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/GrantRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/GrantRequest.java @@ -9,7 +9,7 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; @@ -21,23 +21,17 @@ public GrantRequest() { this.grant = new Grant(); } - public GrantRequest(StreamInput in) throws IOException { - super(in); - this.grant = new Grant(in); - } - public Grant getGrant() { return grant; } @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - grant.writeTo(out); + public ActionRequestValidationException validate() { + return grant.validate(null); } @Override - public ActionRequestValidationException validate() { - return grant.validate(null); + public final void writeTo(StreamOutput out) throws IOException { + TransportAction.localOnly(); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/AbstractCreateApiKeyRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/AbstractCreateApiKeyRequest.java index 998d35267be37..6e827a4a66a5d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/AbstractCreateApiKeyRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/AbstractCreateApiKeyRequest.java @@ -9,10 +9,11 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; -import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.support.MetadataUtils; @@ -39,14 +40,6 @@ public AbstractCreateApiKeyRequest() { this.id = UUIDs.base64UUID(); // because auditing can currently only catch requests but not responses, } - @SuppressWarnings("this-escape") - public AbstractCreateApiKeyRequest(StreamInput in) throws IOException { - super(in); - this.id = doReadId(in); - } - - protected abstract String doReadId(StreamInput in) throws IOException; - public String getId() { return id; } @@ -102,4 +95,9 @@ public ActionRequestValidationException validate() { assert refreshPolicy != null : "refresh policy is required"; return validationException; } + + @Override + public final void writeTo(StreamOutput out) throws IOException { + TransportAction.localOnly(); + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BaseBulkUpdateApiKeyRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BaseBulkUpdateApiKeyRequest.java index 34b249d7a8233..0ea772920652b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BaseBulkUpdateApiKeyRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BaseBulkUpdateApiKeyRequest.java @@ -8,13 +8,10 @@ package org.elasticsearch.xpack.core.security.action.apikey; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; -import java.io.IOException; import java.util.List; import java.util.Map; import java.util.Objects; @@ -35,11 +32,6 @@ public BaseBulkUpdateApiKeyRequest( this.ids = Objects.requireNonNull(ids, "API key IDs must not be null"); } - public BaseBulkUpdateApiKeyRequest(StreamInput in) throws IOException { - super(in); - this.ids = in.readStringCollectionAsList(); - } - @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = super.validate(); @@ -49,12 +41,6 @@ public ActionRequestValidationException validate() { return validationException; } - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeStringCollection(ids); - } - public List getIds() { return ids; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BaseSingleUpdateApiKeyRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BaseSingleUpdateApiKeyRequest.java index 725a9fb197b07..a3958b31e4716 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BaseSingleUpdateApiKeyRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BaseSingleUpdateApiKeyRequest.java @@ -7,13 +7,10 @@ package org.elasticsearch.xpack.core.security.action.apikey; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; -import java.io.IOException; import java.util.List; import java.util.Map; import java.util.Objects; @@ -32,17 +29,6 @@ public BaseSingleUpdateApiKeyRequest( this.id = Objects.requireNonNull(id, "API key ID must not be null"); } - public BaseSingleUpdateApiKeyRequest(StreamInput in) throws IOException { - super(in); - this.id = in.readString(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeString(id); - } - public String getId() { return id; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BaseUpdateApiKeyRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BaseUpdateApiKeyRequest.java index e5e3e3f2cabac..a592550484eb1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BaseUpdateApiKeyRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BaseUpdateApiKeyRequest.java @@ -7,10 +7,9 @@ package org.elasticsearch.xpack.core.security.action.apikey; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; @@ -43,17 +42,6 @@ public BaseUpdateApiKeyRequest( this.expiration = expiration; } - public BaseUpdateApiKeyRequest(StreamInput in) throws IOException { - super(in); - this.roleDescriptors = in.readOptionalCollectionAsList(RoleDescriptor::new); - this.metadata = in.readGenericMap(); - if (in.getTransportVersion().onOrAfter(TransportVersions.UPDATE_API_KEY_EXPIRATION_TIME_ADDED)) { - expiration = in.readOptionalTimeValue(); - } else { - expiration = null; - } - } - public Map getMetadata() { return metadata; } @@ -90,12 +78,7 @@ public ActionRequestValidationException validate() { } @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeOptionalCollection(roleDescriptors); - out.writeGenericMap(metadata); - if (out.getTransportVersion().onOrAfter(TransportVersions.UPDATE_API_KEY_EXPIRATION_TIME_ADDED)) { - out.writeOptionalTimeValue(expiration); - } + public final void writeTo(StreamOutput out) throws IOException { + TransportAction.localOnly(); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BulkUpdateApiKeyRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BulkUpdateApiKeyRequest.java index 534c874438e3f..eab74d6250aca 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BulkUpdateApiKeyRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BulkUpdateApiKeyRequest.java @@ -7,12 +7,10 @@ package org.elasticsearch.xpack.core.security.action.apikey; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; -import java.io.IOException; import java.util.Arrays; import java.util.List; import java.util.Map; @@ -41,10 +39,6 @@ public BulkUpdateApiKeyRequest( super(ids, roleDescriptors, metadata, expiration); } - public BulkUpdateApiKeyRequest(StreamInput in) throws IOException { - super(in); - } - @Override public ApiKey.Type getType() { return ApiKey.Type.REST; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/CreateApiKeyRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/CreateApiKeyRequest.java index 32669d5dca447..1d5eb35b99ea7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/CreateApiKeyRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/CreateApiKeyRequest.java @@ -7,18 +7,12 @@ package org.elasticsearch.xpack.core.security.action.apikey; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.common.UUIDs; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xpack.core.security.action.role.RoleDescriptorRequestValidator; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; -import java.io.IOException; import java.util.List; import java.util.Map; @@ -55,32 +49,6 @@ public CreateApiKeyRequest( this.metadata = metadata; } - public CreateApiKeyRequest(StreamInput in) throws IOException { - super(in); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_5_0)) { - this.name = in.readOptionalString(); - } else { - this.name = in.readString(); - } - this.expiration = in.readOptionalTimeValue(); - this.roleDescriptors = in.readCollectionAsImmutableList(RoleDescriptor::new); - this.refreshPolicy = WriteRequest.RefreshPolicy.readFrom(in); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_0_0)) { - this.metadata = in.readGenericMap(); - } else { - this.metadata = null; - } - } - - @Override - protected String doReadId(StreamInput in) throws IOException { - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_10_0)) { - return in.readString(); - } else { - return UUIDs.base64UUID(); - } - } - @Override public ApiKey.Type getType() { return ApiKey.Type.REST; @@ -114,23 +82,4 @@ public ActionRequestValidationException validate() { } return validationException; } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_10_0)) { - out.writeString(id); - } - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_5_0)) { - out.writeOptionalString(name); - } else { - out.writeString(name); - } - out.writeOptionalTimeValue(expiration); - out.writeCollection(getRoleDescriptors()); - refreshPolicy.writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_13_0)) { - out.writeGenericMap(metadata); - } - } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/CreateCrossClusterApiKeyRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/CreateCrossClusterApiKeyRequest.java index a375808def6d7..eea96bcbfcdaf 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/CreateCrossClusterApiKeyRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/CreateCrossClusterApiKeyRequest.java @@ -8,9 +8,6 @@ package org.elasticsearch.xpack.core.security.action.apikey; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Assertions; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; @@ -37,20 +34,6 @@ public CreateCrossClusterApiKeyRequest( this.metadata = metadata; } - public CreateCrossClusterApiKeyRequest(StreamInput in) throws IOException { - super(in); - this.name = in.readString(); - this.expiration = in.readOptionalTimeValue(); - this.roleDescriptors = in.readCollectionAsImmutableList(RoleDescriptor::new); - this.refreshPolicy = WriteRequest.RefreshPolicy.readFrom(in); - this.metadata = in.readGenericMap(); - } - - @Override - protected String doReadId(StreamInput in) throws IOException { - return in.readString(); - } - @Override public ApiKey.Type getType() { return ApiKey.Type.CROSS_CLUSTER; @@ -67,17 +50,6 @@ public ActionRequestValidationException validate() { return super.validate(); } - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeString(id); - out.writeString(name); - out.writeOptionalTimeValue(expiration); - out.writeCollection(roleDescriptors); - refreshPolicy.writeTo(out); - out.writeGenericMap(metadata); - } - @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/GrantApiKeyRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/GrantApiKeyRequest.java index 16a95e349cda8..17d5424b630eb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/GrantApiKeyRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/GrantApiKeyRequest.java @@ -9,11 +9,8 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.core.security.action.GrantRequest; -import java.io.IOException; import java.util.Objects; /** @@ -30,17 +27,6 @@ public GrantApiKeyRequest() { this.apiKey = new CreateApiKeyRequest(); } - public GrantApiKeyRequest(StreamInput in) throws IOException { - super(in); - this.apiKey = new CreateApiKeyRequest(in); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - apiKey.writeTo(out); - } - public WriteRequest.RefreshPolicy getRefreshPolicy() { return apiKey.getRefreshPolicy(); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyRequest.java index 9b1e9194d59fd..ffbc5a836633c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyRequest.java @@ -7,12 +7,10 @@ package org.elasticsearch.xpack.core.security.action.apikey; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; -import java.io.IOException; import java.util.List; import java.util.Map; @@ -30,10 +28,6 @@ public UpdateApiKeyRequest( super(roleDescriptors, metadata, expiration, id); } - public UpdateApiKeyRequest(StreamInput in) throws IOException { - super(in); - } - @Override public ApiKey.Type getType() { return ApiKey.Type.REST; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateCrossClusterApiKeyRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateCrossClusterApiKeyRequest.java index 184ce2c521ce0..04102e571e193 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateCrossClusterApiKeyRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateCrossClusterApiKeyRequest.java @@ -8,11 +8,9 @@ package org.elasticsearch.xpack.core.security.action.apikey; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; -import java.io.IOException; import java.util.List; import java.util.Map; @@ -29,10 +27,6 @@ public UpdateCrossClusterApiKeyRequest( super(roleDescriptorBuilder == null ? null : List.of(roleDescriptorBuilder.build()), metadata, expiration, id); } - public UpdateCrossClusterApiKeyRequest(StreamInput in) throws IOException { - super(in); - } - @Override public ApiKey.Type getType() { return ApiKey.Type.CROSS_CLUSTER; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/ActivateProfileRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/ActivateProfileRequest.java index f572c57232b2e..72005bf319c49 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/ActivateProfileRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/ActivateProfileRequest.java @@ -8,27 +8,14 @@ package org.elasticsearch.xpack.core.security.action.profile; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.core.security.action.GrantRequest; -import java.io.IOException; - public class ActivateProfileRequest extends GrantRequest { public ActivateProfileRequest() { super(); } - public ActivateProfileRequest(StreamInput in) throws IOException { - super(in); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - } - @Override public ActionRequestValidationException validate() { return super.validate(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/BulkUpdateApiKeyRequestSerializationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/BulkUpdateApiKeyRequestSerializationTests.java deleted file mode 100644 index 0221554963892..0000000000000 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/BulkUpdateApiKeyRequestSerializationTests.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.core.security.action.apikey; - -import org.elasticsearch.TransportVersions; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -import static org.hamcrest.Matchers.nullValue; - -public class BulkUpdateApiKeyRequestSerializationTests extends AbstractWireSerializingTestCase { - public void testSerializationBackwardsCompatibility() throws IOException { - BulkUpdateApiKeyRequest testInstance = createTestInstance(); - BulkUpdateApiKeyRequest deserializedInstance = copyInstance(testInstance, TransportVersions.V_8_11_X); - try { - // Transport is on a version before expiration was introduced, so should always be null - assertThat(deserializedInstance.getExpiration(), nullValue()); - } finally { - dispose(deserializedInstance); - } - } - - @Override - protected BulkUpdateApiKeyRequest createTestInstance() { - final boolean roleDescriptorsPresent = randomBoolean(); - final List descriptorList; - if (roleDescriptorsPresent == false) { - descriptorList = null; - } else { - final int numDescriptors = randomIntBetween(0, 4); - descriptorList = new ArrayList<>(); - for (int i = 0; i < numDescriptors; i++) { - descriptorList.add(new RoleDescriptor("role_" + i, new String[] { "all" }, null, null)); - } - } - - final var ids = randomList(randomInt(5), () -> randomAlphaOfLength(10)); - final var metadata = ApiKeyTests.randomMetadata(); - final TimeValue expiration = ApiKeyTests.randomFutureExpirationTime(); - return new BulkUpdateApiKeyRequest(ids, descriptorList, metadata, expiration); - } - - @Override - protected Writeable.Reader instanceReader() { - return BulkUpdateApiKeyRequest::new; - } - - @Override - protected BulkUpdateApiKeyRequest mutateInstance(BulkUpdateApiKeyRequest instance) throws IOException { - Map metadata = ApiKeyTests.randomMetadata(); - long days = randomValueOtherThan(instance.getExpiration().days(), () -> ApiKeyTests.randomFutureExpirationTime().getDays()); - return new BulkUpdateApiKeyRequest( - instance.getIds(), - instance.getRoleDescriptors(), - metadata, - TimeValue.parseTimeValue(days + "d", null, "expiration") - ); - } -} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/CreateApiKeyRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/CreateApiKeyRequestTests.java index 63dd636a31c3f..eee2e6e7da338 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/CreateApiKeyRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/CreateApiKeyRequestTests.java @@ -7,17 +7,10 @@ package org.elasticsearch.xpack.core.security.action.apikey; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; -import java.io.IOException; -import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -129,58 +122,4 @@ public void testRoleDescriptorValidation() { assertThat(ve1.validationErrors().get(5 + i), containsStringIgnoringCase("unknown workflow [" + unknownWorkflows[i] + "]")); } } - - public void testSerialization() throws IOException { - final String name = randomAlphaOfLengthBetween(1, 256); - final TimeValue expiration = randomBoolean() - ? null - : TimeValue.parseTimeValue(randomTimeValue(), "test serialization of create api key"); - final WriteRequest.RefreshPolicy refreshPolicy = randomFrom(WriteRequest.RefreshPolicy.values()); - boolean nullOrEmptyRoleDescriptors = randomBoolean(); - final List descriptorList; - if (nullOrEmptyRoleDescriptors) { - descriptorList = randomBoolean() ? null : List.of(); - } else { - final int numDescriptors = randomIntBetween(1, 4); - descriptorList = new ArrayList<>(); - for (int i = 0; i < numDescriptors; i++) { - descriptorList.add(new RoleDescriptor("role_" + i, new String[] { "all" }, null, null)); - } - } - - final CreateApiKeyRequest request = new CreateApiKeyRequest(); - request.setName(name); - request.setExpiration(expiration); - - if (refreshPolicy != request.getRefreshPolicy() || randomBoolean()) { - request.setRefreshPolicy(refreshPolicy); - } - request.setRoleDescriptors(descriptorList); - - boolean testV710Bwc = randomBoolean(); - - try (BytesStreamOutput out = new BytesStreamOutput()) { - if (testV710Bwc) { - out.setTransportVersion(TransportVersions.V_7_9_0); // a version before 7.10 - } - request.writeTo(out); - try (StreamInput in = out.bytes().streamInput()) { - if (testV710Bwc) { - in.setTransportVersion(TransportVersions.V_7_9_0); - } - final CreateApiKeyRequest serialized = new CreateApiKeyRequest(in); - assertEquals(name, serialized.getName()); - if (false == testV710Bwc) { - assertEquals(request.getId(), serialized.getId()); // API key id is only preserved after v 7.10 - } - assertEquals(expiration, serialized.getExpiration()); - assertEquals(refreshPolicy, serialized.getRefreshPolicy()); - if (nullOrEmptyRoleDescriptors) { - assertThat(serialized.getRoleDescriptors().isEmpty(), is(true)); - } else { - assertEquals(descriptorList, serialized.getRoleDescriptors()); - } - } - } - } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/CreateCrossClusterApiKeyRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/CreateCrossClusterApiKeyRequestTests.java deleted file mode 100644 index a0a9c9b31b430..0000000000000 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/CreateCrossClusterApiKeyRequestTests.java +++ /dev/null @@ -1,137 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.core.security.action.apikey; - -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.core.Tuple; -import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.junit.Before; - -import java.io.IOException; -import java.util.List; -import java.util.Map; - -import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; -import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.NONE; -import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.WAIT_UNTIL; - -public class CreateCrossClusterApiKeyRequestTests extends AbstractWireSerializingTestCase { - - private String access; - private CrossClusterApiKeyRoleDescriptorBuilder roleDescriptorBuilder; - - @Before - public void init() throws IOException { - access = randomCrossClusterApiKeyAccessField(); - roleDescriptorBuilder = CrossClusterApiKeyRoleDescriptorBuilder.parse(access); - } - - @Override - protected Writeable.Reader instanceReader() { - return CreateCrossClusterApiKeyRequest::new; - } - - @Override - protected CreateCrossClusterApiKeyRequest createTestInstance() { - CreateCrossClusterApiKeyRequest request = new CreateCrossClusterApiKeyRequest( - randomAlphaOfLengthBetween(3, 8), - roleDescriptorBuilder, - randomExpiration(), - randomMetadata() - ); - request.setRefreshPolicy(randomFrom(IMMEDIATE, WAIT_UNTIL, NONE)); - return request; - } - - @Override - protected CreateCrossClusterApiKeyRequest mutateInstance(CreateCrossClusterApiKeyRequest instance) throws IOException { - switch (randomIntBetween(1, 4)) { - case 1 -> { - return new CreateCrossClusterApiKeyRequest( - randomValueOtherThan(instance.getName(), () -> randomAlphaOfLengthBetween(3, 8)), - roleDescriptorBuilder, - instance.getExpiration(), - instance.getMetadata() - ); - } - case 2 -> { - return new CreateCrossClusterApiKeyRequest( - instance.getName(), - CrossClusterApiKeyRoleDescriptorBuilder.parse( - randomValueOtherThan(access, CreateCrossClusterApiKeyRequestTests::randomCrossClusterApiKeyAccessField) - ), - instance.getExpiration(), - instance.getMetadata() - ); - } - case 3 -> { - return new CreateCrossClusterApiKeyRequest( - instance.getName(), - roleDescriptorBuilder, - randomValueOtherThan(instance.getExpiration(), CreateCrossClusterApiKeyRequestTests::randomExpiration), - instance.getMetadata() - ); - } - default -> { - return new CreateCrossClusterApiKeyRequest( - instance.getName(), - roleDescriptorBuilder, - instance.getExpiration(), - randomValueOtherThan(instance.getMetadata(), CreateCrossClusterApiKeyRequestTests::randomMetadata) - ); - } - } - } - - private static TimeValue randomExpiration() { - return randomFrom(TimeValue.timeValueHours(randomIntBetween(1, 999)), null); - } - - private static Map randomMetadata() { - return randomFrom( - randomMap( - 0, - 3, - () -> new Tuple<>( - randomAlphaOfLengthBetween(3, 8), - randomFrom(randomAlphaOfLengthBetween(3, 8), randomInt(), randomBoolean()) - ) - ), - null - ); - } - - private static final List ACCESS_CANDIDATES = List.of(""" - { - "search": [ {"names": ["logs"]} ] - }""", """ - { - "search": [ {"names": ["logs"], "query": "abc" } ] - }""", """ - { - "search": [ {"names": ["logs"], "field_security": {"grant": ["*"], "except": ["private"]} } ] - }""", """ - { - "search": [ {"names": ["logs"], "query": "abc", "field_security": {"grant": ["*"], "except": ["private"]} } ] - }""", """ - { - "replication": [ {"names": ["archive"], "allow_restricted_indices": true } ] - }""", """ - { - "replication": [ {"names": ["archive"]} ] - }""", """ - { - "search": [ {"names": ["logs"]} ], - "replication": [ {"names": ["archive"]} ] - }"""); - - public static String randomCrossClusterApiKeyAccessField() { - return randomFrom(ACCESS_CANDIDATES); - } -} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyRequestSerializationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyRequestSerializationTests.java deleted file mode 100644 index 83d74b7e9d413..0000000000000 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyRequestSerializationTests.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.core.security.action.apikey; - -import org.elasticsearch.TransportVersions; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -import static org.hamcrest.Matchers.nullValue; - -public class UpdateApiKeyRequestSerializationTests extends AbstractWireSerializingTestCase { - public void testSerializationBackwardsCompatibility() throws IOException { - UpdateApiKeyRequest testInstance = createTestInstance(); - UpdateApiKeyRequest deserializedInstance = copyInstance(testInstance, TransportVersions.V_8_11_X); - try { - // Transport is on a version before expiration was introduced, so should always be null - assertThat(deserializedInstance.getExpiration(), nullValue()); - } finally { - dispose(deserializedInstance); - } - } - - @Override - protected UpdateApiKeyRequest createTestInstance() { - final boolean roleDescriptorsPresent = randomBoolean(); - final List descriptorList; - if (roleDescriptorsPresent == false) { - descriptorList = null; - } else { - final int numDescriptors = randomIntBetween(0, 4); - descriptorList = new ArrayList<>(); - for (int i = 0; i < numDescriptors; i++) { - descriptorList.add(new RoleDescriptor("role_" + i, new String[] { "all" }, null, null)); - } - } - - final var id = randomAlphaOfLength(10); - final var metadata = ApiKeyTests.randomMetadata(); - final TimeValue expiration = ApiKeyTests.randomFutureExpirationTime(); - return new UpdateApiKeyRequest(id, descriptorList, metadata, expiration); - } - - @Override - protected Writeable.Reader instanceReader() { - return UpdateApiKeyRequest::new; - } - - @Override - protected UpdateApiKeyRequest mutateInstance(UpdateApiKeyRequest instance) throws IOException { - Map metadata = ApiKeyTests.randomMetadata(); - long days = randomValueOtherThan(instance.getExpiration().days(), () -> ApiKeyTests.randomFutureExpirationTime().getDays()); - return new UpdateApiKeyRequest( - instance.getId(), - instance.getRoleDescriptors(), - metadata, - TimeValue.parseTimeValue(days + "d", null, "expiration") - ); - } - -} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateCrossClusterApiKeyRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateCrossClusterApiKeyRequestTests.java index f9faa2731dcc0..f7a0d1a6d35bf 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateCrossClusterApiKeyRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateCrossClusterApiKeyRequestTests.java @@ -8,49 +8,15 @@ package org.elasticsearch.xpack.core.security.action.apikey; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.test.ESTestCase; -import java.io.IOException; import java.util.Map; -import static org.elasticsearch.xpack.core.security.action.apikey.CreateCrossClusterApiKeyRequestTests.randomCrossClusterApiKeyAccessField; import static org.hamcrest.Matchers.contains; -import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.nullValue; public class UpdateCrossClusterApiKeyRequestTests extends ESTestCase { - public void testSerialization() throws IOException { - final var metadata = ApiKeyTests.randomMetadata(); - final TimeValue expiration = ApiKeyTests.randomFutureExpirationTime(); - final CrossClusterApiKeyRoleDescriptorBuilder roleDescriptorBuilder; - if (randomBoolean()) { - roleDescriptorBuilder = CrossClusterApiKeyRoleDescriptorBuilder.parse(randomCrossClusterApiKeyAccessField()); - } else { - roleDescriptorBuilder = null; - } - - final var request = new UpdateCrossClusterApiKeyRequest(randomAlphaOfLength(10), roleDescriptorBuilder, metadata, expiration); - assertThat(request.getType(), is(ApiKey.Type.CROSS_CLUSTER)); - assertThat(request.validate(), nullValue()); - - try (BytesStreamOutput out = new BytesStreamOutput()) { - request.writeTo(out); - try (StreamInput in = out.bytes().streamInput()) { - final var serialized = new UpdateCrossClusterApiKeyRequest(in); - assertEquals(request.getId(), serialized.getId()); - assertEquals(request.getRoleDescriptors(), serialized.getRoleDescriptors()); - assertEquals(metadata, serialized.getMetadata()); - assertEquals(expiration, serialized.getExpiration()); - assertEquals(request.getType(), serialized.getType()); - } - } - } - public void testNotEmptyUpdateValidation() { final var request = new UpdateCrossClusterApiKeyRequest(randomAlphaOfLength(10), null, null, null); final ActionRequestValidationException ve = request.validate(); diff --git a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java index 08bca3ffdaeea..9c22a6bb4d210 100644 --- a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java +++ b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java @@ -45,8 +45,8 @@ import static org.elasticsearch.test.SecuritySettingsSourceField.ES_TEST_ROOT_ROLE; import static org.elasticsearch.test.SecuritySettingsSourceField.ES_TEST_ROOT_ROLE_DESCRIPTOR; -import static org.elasticsearch.xpack.core.security.action.apikey.CreateCrossClusterApiKeyRequestTests.randomCrossClusterApiKeyAccessField; import static org.elasticsearch.xpack.core.security.authc.AuthenticationServiceField.RUN_AS_USER_HEADER; +import static org.elasticsearch.xpack.security.authc.ApiKeyServiceTests.randomCrossClusterApiKeyAccessField; import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/TransportGrantAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/TransportGrantAction.java index 881d1340ebc3f..667b513555594 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/TransportGrantAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/TransportGrantAction.java @@ -11,9 +11,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; @@ -27,7 +25,7 @@ import org.elasticsearch.xpack.security.authc.AuthenticationService; import org.elasticsearch.xpack.security.authz.AuthorizationService; -public abstract class TransportGrantAction extends HandledTransportAction< +public abstract class TransportGrantAction extends TransportAction< Request, Response> { @@ -39,12 +37,11 @@ public TransportGrantAction( String actionName, TransportService transportService, ActionFilters actionFilters, - Writeable.Reader requestReader, AuthenticationService authenticationService, AuthorizationService authorizationService, ThreadContext threadContext ) { - super(actionName, transportService, actionFilters, requestReader, EsExecutors.DIRECT_EXECUTOR_SERVICE); + super(actionName, actionFilters, transportService.getTaskManager()); this.authenticationService = authenticationService; this.authorizationService = authorizationService; this.threadContext = threadContext; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportBaseUpdateApiKeyAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportBaseUpdateApiKeyAction.java index 9d367bf5caf24..33b1e44004454 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportBaseUpdateApiKeyAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportBaseUpdateApiKeyAction.java @@ -10,9 +10,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.SecurityContext; @@ -24,7 +22,7 @@ import java.util.Map; public abstract class TransportBaseUpdateApiKeyAction extends - HandledTransportAction { + TransportAction { private final SecurityContext securityContext; @@ -32,10 +30,9 @@ protected TransportBaseUpdateApiKeyAction( final String actionName, final TransportService transportService, final ActionFilters actionFilters, - final Writeable.Reader requestReader, final SecurityContext context ) { - super(actionName, transportService, actionFilters, requestReader, EsExecutors.DIRECT_EXECUTOR_SERVICE); + super(actionName, actionFilters, transportService.getTaskManager()); this.securityContext = context; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportBulkUpdateApiKeyAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportBulkUpdateApiKeyAction.java index cb8f6c861ecf7..3b978c3e44b4c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportBulkUpdateApiKeyAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportBulkUpdateApiKeyAction.java @@ -38,7 +38,7 @@ public TransportBulkUpdateApiKeyAction( final CompositeRolesStore rolesStore, final NamedXContentRegistry xContentRegistry ) { - super(BulkUpdateApiKeyAction.NAME, transportService, actionFilters, BulkUpdateApiKeyRequest::new, context); + super(BulkUpdateApiKeyAction.NAME, transportService, actionFilters, context); this.apiKeyService = apiKeyService; this.resolver = new ApiKeyUserRoleDescriptorResolver(rolesStore, xContentRegistry); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportCreateApiKeyAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportCreateApiKeyAction.java index 568e0fe5eb075..268afc7f0b32f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportCreateApiKeyAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportCreateApiKeyAction.java @@ -9,9 +9,8 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.NamedXContentRegistry; @@ -28,7 +27,7 @@ /** * Implementation of the action needed to create an API key */ -public final class TransportCreateApiKeyAction extends HandledTransportAction { +public final class TransportCreateApiKeyAction extends TransportAction { private final ApiKeyService apiKeyService; private final ApiKeyUserRoleDescriptorResolver resolver; @@ -43,7 +42,7 @@ public TransportCreateApiKeyAction( CompositeRolesStore rolesStore, NamedXContentRegistry xContentRegistry ) { - super(CreateApiKeyAction.NAME, transportService, actionFilters, CreateApiKeyRequest::new, EsExecutors.DIRECT_EXECUTOR_SERVICE); + super(CreateApiKeyAction.NAME, actionFilters, transportService.getTaskManager()); this.apiKeyService = apiKeyService; this.resolver = new ApiKeyUserRoleDescriptorResolver(rolesStore, xContentRegistry); this.securityContext = context; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportCreateCrossClusterApiKeyAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportCreateCrossClusterApiKeyAction.java index 267a3aafe0c72..eeccd4b833a23 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportCreateCrossClusterApiKeyAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportCreateCrossClusterApiKeyAction.java @@ -9,9 +9,8 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.SecurityContext; @@ -26,9 +25,7 @@ /** * Implementation of the action needed to create an API key */ -public final class TransportCreateCrossClusterApiKeyAction extends HandledTransportAction< - CreateCrossClusterApiKeyRequest, - CreateApiKeyResponse> { +public final class TransportCreateCrossClusterApiKeyAction extends TransportAction { private final ApiKeyService apiKeyService; private final SecurityContext securityContext; @@ -40,13 +37,7 @@ public TransportCreateCrossClusterApiKeyAction( ApiKeyService apiKeyService, SecurityContext context ) { - super( - CreateCrossClusterApiKeyAction.NAME, - transportService, - actionFilters, - CreateCrossClusterApiKeyRequest::new, - EsExecutors.DIRECT_EXECUTOR_SERVICE - ); + super(CreateCrossClusterApiKeyAction.NAME, actionFilters, transportService.getTaskManager()); this.apiKeyService = apiKeyService; this.securityContext = context; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportGrantApiKeyAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportGrantApiKeyAction.java index a6401053634b2..54e073906b815 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportGrantApiKeyAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportGrantApiKeyAction.java @@ -65,15 +65,7 @@ public TransportGrantApiKeyAction( ApiKeyService apiKeyService, ApiKeyUserRoleDescriptorResolver resolver ) { - super( - GrantApiKeyAction.NAME, - transportService, - actionFilters, - GrantApiKeyRequest::new, - authenticationService, - authorizationService, - threadContext - ); + super(GrantApiKeyAction.NAME, transportService, actionFilters, authenticationService, authorizationService, threadContext); this.apiKeyService = apiKeyService; this.resolver = resolver; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportUpdateApiKeyAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportUpdateApiKeyAction.java index 2427b571cf575..b6e0854d6c443 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportUpdateApiKeyAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportUpdateApiKeyAction.java @@ -37,7 +37,7 @@ public TransportUpdateApiKeyAction( final CompositeRolesStore rolesStore, final NamedXContentRegistry xContentRegistry ) { - super(UpdateApiKeyAction.NAME, transportService, actionFilters, UpdateApiKeyRequest::new, context); + super(UpdateApiKeyAction.NAME, transportService, actionFilters, context); this.apiKeyService = apiKeyService; this.resolver = new ApiKeyUserRoleDescriptorResolver(rolesStore, xContentRegistry); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportUpdateCrossClusterApiKeyAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportUpdateCrossClusterApiKeyAction.java index a47bbb0301ebc..f4578bf7a737c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportUpdateCrossClusterApiKeyAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportUpdateCrossClusterApiKeyAction.java @@ -37,7 +37,7 @@ public TransportUpdateCrossClusterApiKeyAction( final ApiKeyService apiKeyService, final SecurityContext context ) { - super(UpdateCrossClusterApiKeyAction.NAME, transportService, actionFilters, UpdateCrossClusterApiKeyRequest::new, context); + super(UpdateCrossClusterApiKeyAction.NAME, transportService, actionFilters, context); this.apiKeyService = apiKeyService; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/profile/TransportActivateProfileAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/profile/TransportActivateProfileAction.java index d7241011d9c09..4d76205d29021 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/profile/TransportActivateProfileAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/profile/TransportActivateProfileAction.java @@ -39,7 +39,6 @@ public TransportActivateProfileAction( ActivateProfileAction.NAME, transportService, actionFilters, - ActivateProfileRequest::new, authenticationService, authorizationService, threadPool.getThreadContext() diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/apikey/TransportCreateCrossClusterApiKeyActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/apikey/TransportCreateCrossClusterApiKeyActionTests.java index f94acab50b6b5..9c1419f67bcf0 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/apikey/TransportCreateCrossClusterApiKeyActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/apikey/TransportCreateCrossClusterApiKeyActionTests.java @@ -23,7 +23,7 @@ import java.io.IOException; import java.util.Set; -import static org.elasticsearch.xpack.core.security.action.apikey.CreateCrossClusterApiKeyRequestTests.randomCrossClusterApiKeyAccessField; +import static org.elasticsearch.xpack.security.authc.ApiKeyServiceTests.randomCrossClusterApiKeyAccessField; import static org.hamcrest.Matchers.containsString; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.ArgumentMatchers.same; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/apikey/TransportUpdateCrossClusterApiKeyActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/apikey/TransportUpdateCrossClusterApiKeyActionTests.java index 70190b70f3f1a..1525b9157a610 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/apikey/TransportUpdateCrossClusterApiKeyActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/apikey/TransportUpdateCrossClusterApiKeyActionTests.java @@ -33,7 +33,7 @@ import java.util.Set; import static org.elasticsearch.test.ActionListenerUtils.anyActionListener; -import static org.elasticsearch.xpack.core.security.action.apikey.CreateCrossClusterApiKeyRequestTests.randomCrossClusterApiKeyAccessField; +import static org.elasticsearch.xpack.security.authc.ApiKeyServiceTests.randomCrossClusterApiKeyAccessField; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java index d2ca214723416..daa2b9cf149de 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java @@ -158,7 +158,6 @@ import static org.elasticsearch.test.SecurityIntegTestCase.getFastStoredHashAlgoForTests; import static org.elasticsearch.test.TestMatchers.throwableWithMessage; import static org.elasticsearch.transport.RemoteClusterPortSettings.TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY; -import static org.elasticsearch.xpack.core.security.action.apikey.CreateCrossClusterApiKeyRequestTests.randomCrossClusterApiKeyAccessField; import static org.elasticsearch.xpack.core.security.authc.AuthenticationField.API_KEY_ID_KEY; import static org.elasticsearch.xpack.core.security.authc.AuthenticationField.API_KEY_METADATA_KEY; import static org.elasticsearch.xpack.core.security.authc.AuthenticationField.API_KEY_TYPE_KEY; @@ -200,6 +199,29 @@ public class ApiKeyServiceTests extends ESTestCase { + private static final List ACCESS_CANDIDATES = List.of(""" + { + "search": [ {"names": ["logs"]} ] + }""", """ + { + "search": [ {"names": ["logs"], "query": "abc" } ] + }""", """ + { + "search": [ {"names": ["logs"], "field_security": {"grant": ["*"], "except": ["private"]} } ] + }""", """ + { + "search": [ {"names": ["logs"], "query": "abc", "field_security": {"grant": ["*"], "except": ["private"]} } ] + }""", """ + { + "replication": [ {"names": ["archive"], "allow_restricted_indices": true } ] + }""", """ + { + "replication": [ {"names": ["archive"]} ] + }""", """ + { + "search": [ {"names": ["logs"]} ], + "replication": [ {"names": ["archive"]} ] + }"""); private ThreadPool threadPool; private Client client; private SecurityIndexManager securityIndex; @@ -2845,6 +2867,10 @@ private static RoleDescriptor randomRoleDescriptorWithWorkflowsRestriction() { ); } + public static String randomCrossClusterApiKeyAccessField() { + return randomFrom(ACCESS_CANDIDATES); + } + public static class Utils { private static final AuthenticationContextSerializer authenticationContextSerializer = new AuthenticationContextSerializer(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestUpdateCrossClusterApiKeyActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestUpdateCrossClusterApiKeyActionTests.java index f9fa9269c4ef1..ddeffc0675498 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestUpdateCrossClusterApiKeyActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestUpdateCrossClusterApiKeyActionTests.java @@ -30,7 +30,7 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.xpack.core.security.action.apikey.CreateCrossClusterApiKeyRequestTests.randomCrossClusterApiKeyAccessField; +import static org.elasticsearch.xpack.security.authc.ApiKeyServiceTests.randomCrossClusterApiKeyAccessField; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; From 62729c9480a9336b6937d0e9581fde6430a5e745 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Wed, 10 Apr 2024 11:52:02 +0200 Subject: [PATCH 123/173] Port DocsTest gradle plugin to java (#107124) * Refactor DocsTest plugin to java * Rework asciidoc parsing to make adding more parser simple --- .../doc/DocsTestPluginFuncTest.groovy | 132 +++ .../gradle/internal/doc/DocsTestPlugin.groovy | 99 --- .../doc/RestTestsFromSnippetsTask.groovy | 503 ----------- .../gradle/internal/doc/SnippetsTask.groovy | 438 --------- .../internal/doc/AsciidocSnippetParser.java | 306 +++++++ .../gradle/internal/doc/DocSnippetTask.java | 88 ++ .../gradle/internal/doc/DocsTestPlugin.java | 106 +++ .../gradle/internal/doc/ParsingUtils.java | 57 ++ .../doc/RestTestsFromDocSnippetTask.java | 526 +++++++++++ .../gradle/internal/doc/Snippet.java | 188 ++++ .../gradle/internal/doc/SnippetParser.java | 17 + .../gradle/internal/doc/Source.java | 21 + .../internal/doc/AsciidocParserSpec.groovy | 184 ++++ .../internal/doc/DocSnippetTaskSpec.groovy | 676 ++++++++++++++ .../RestTestsFromDocSnippetTaskSpec.groovy | 839 ++++++++++++++++++ .../doc/RestTestFromSnippetsTaskTests.java | 57 -- .../internal/doc/SnippetsTaskTests.java | 63 -- 17 files changed, 3140 insertions(+), 1160 deletions(-) create mode 100644 build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/doc/DocsTestPluginFuncTest.groovy delete mode 100644 build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/doc/DocsTestPlugin.groovy delete mode 100644 build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/doc/RestTestsFromSnippetsTask.groovy delete mode 100644 build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/doc/SnippetsTask.groovy create mode 100644 build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/AsciidocSnippetParser.java create mode 100644 build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/DocSnippetTask.java create mode 100644 build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/DocsTestPlugin.java create mode 100644 build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/ParsingUtils.java create mode 100644 build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/RestTestsFromDocSnippetTask.java create mode 100644 build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/Snippet.java create mode 100644 build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/SnippetParser.java create mode 100644 build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/Source.java create mode 100644 build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/AsciidocParserSpec.groovy create mode 100644 build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/DocSnippetTaskSpec.groovy create mode 100644 build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/RestTestsFromDocSnippetTaskSpec.groovy delete mode 100644 build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/doc/RestTestFromSnippetsTaskTests.java delete mode 100644 build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/doc/SnippetsTaskTests.java diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/doc/DocsTestPluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/doc/DocsTestPluginFuncTest.groovy new file mode 100644 index 0000000000000..4c542d371c32c --- /dev/null +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/doc/DocsTestPluginFuncTest.groovy @@ -0,0 +1,132 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc + +import org.elasticsearch.gradle.fixtures.AbstractGradleInternalPluginFuncTest +import org.elasticsearch.gradle.internal.conventions.precommit.PrecommitPlugin +import org.gradle.testkit.runner.TaskOutcome + +class DocsTestPluginFuncTest extends AbstractGradleInternalPluginFuncTest { + Class pluginClassUnderTest = DocsTestPlugin.class + + def setup() { + File docDir = new File(projectDir, 'doc'); + docDir.mkdirs() + addSampleDoc(docDir) + buildApiRestrictionsDisabled = true + configurationCacheCompatible = false; + buildFile << """ +tasks.named('listSnippets') { + docs = fileTree('doc') +} + +tasks.named('listConsoleCandidates') { + docs = fileTree('doc') +} +""" + } + + def "can list snippets"() { + when: + def result = gradleRunner("listSnippets").build() + then: + result.task(":listSnippets").outcome == TaskOutcome.SUCCESS + assertOutputContains(result.output, """ +> Task :listSnippets +mapper-annotated-text.asciidoc[37:39](Painless) +mapper-annotated-text.asciidoc[42:44](js) +mapper-annotated-text.asciidoc[51:69](console)// TEST[setup:seats] +""") + } + + def "can console candidates"() { + when: + def result = gradleRunner("listConsoleCandidates").build() + then: + result.task(":listConsoleCandidates").outcome == TaskOutcome.SUCCESS + assertOutputContains(result.output, """ +> Task :listConsoleCandidates +mapper-annotated-text.asciidoc[42:44](js) +""") + } + + void addSampleDoc(File docFolder) { + new File(docFolder, "mapper-annotated-text.asciidoc").text = """ +[[painless-filter-context]] +=== Filter context + +Use a Painless script as a {ref}/query-dsl-script-query.html[filter] in a +query to include and exclude documents. + + +*Variables* + +`params` (`Map`, read-only):: + User-defined parameters passed in as part of the query. + +`doc` (`Map`, read-only):: + Contains the fields of the current document where each field is a + `List` of values. + +*Return* + +`boolean`:: + Return `true` if the current document should be returned as a result of + the query, and `false` otherwise. + + +*API* + +The standard <> is available. + +*Example* + +To run this example, first follow the steps in +<>. + +This script finds all unsold documents that cost less than \$25. + +[source,Painless] +---- +doc['sold'].value == false && doc['cost'].value < 25 +---- + +[source,js] +---- +curl 'hello world' +---- + +Defining `cost` as a script parameter enables the cost to be configured +in the script query request. For example, the following request finds +all available theatre seats for evening performances that are under \$25. + +[source,console] +---- +GET seats/_search +{ + "query": { + "bool": { + "filter": { + "script": { + "script": { + "source": "doc['sold'].value == false && doc['cost'].value < params.cost", + "params": { + "cost": 25 + } + } + } + } + } + } +} +---- +// TEST[setup:seats] +""" + } +} diff --git a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/doc/DocsTestPlugin.groovy b/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/doc/DocsTestPlugin.groovy deleted file mode 100644 index 38b4cb499eeb9..0000000000000 --- a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/doc/DocsTestPlugin.groovy +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.gradle.internal.doc - -import org.elasticsearch.gradle.OS -import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.VersionProperties -import org.elasticsearch.gradle.internal.test.rest.CopyRestApiTask -import org.elasticsearch.gradle.internal.test.rest.CopyRestTestsTask -import org.gradle.api.Action -import org.gradle.api.Plugin -import org.gradle.api.Project -import org.gradle.api.file.Directory -import org.gradle.api.file.ProjectLayout -import org.gradle.api.internal.file.FileOperations -import org.gradle.api.provider.Provider -import org.gradle.api.tasks.TaskProvider - -import javax.inject.Inject - -/** - * Sets up tests for documentation. - */ -class DocsTestPlugin implements Plugin { - - private FileOperations fileOperations - private ProjectLayout projectLayout - - @Inject - DocsTestPlugin(FileOperations fileOperations, ProjectLayout projectLayout) { - this.projectLayout = projectLayout - this.fileOperations = fileOperations - } - - @Override - void apply(Project project) { - project.pluginManager.apply('elasticsearch.legacy-yaml-rest-test') - - String distribution = System.getProperty('tests.distribution', 'default') - // The distribution can be configured with -Dtests.distribution on the command line - project.testClusters.matching { it.name.equals("yamlRestTest") }.configureEach { testDistribution = distribution.toUpperCase() } - project.testClusters.matching { it.name.equals("yamlRestTest") }.configureEach { nameCustomization = { it.replace("yamlRestTest", "node") } } - // Docs are published separately so no need to assemble - project.tasks.named("assemble").configure {enabled = false } - Map commonDefaultSubstitutions = [ - /* These match up with the asciidoc syntax for substitutions but - * the values may differ. In particular {version} needs to resolve - * to the version being built for testing but needs to resolve to - * the last released version for docs. */ - '\\{version\\}': Version.fromString(VersionProperties.elasticsearch).toString(), - '\\{version_qualified\\}': VersionProperties.elasticsearch, - '\\{lucene_version\\}' : VersionProperties.lucene.replaceAll('-snapshot-\\w+$', ''), - '\\{build_flavor\\}' : distribution, - '\\{build_type\\}' : OS.conditionalString().onWindows({"zip"}).onUnix({"tar"}).supply(), - ] - project.tasks.register('listSnippets', SnippetsTask) { - group 'Docs' - description 'List each snippet' - defaultSubstitutions = commonDefaultSubstitutions - perSnippet = new Action() { - @Override - void execute(SnippetsTask.Snippet snippet) { - println(snippet.toString()) - } - } - } - project.tasks.register('listConsoleCandidates', SnippetsTask) { - group 'Docs' - description - 'List snippets that probably should be marked // CONSOLE' - defaultSubstitutions = commonDefaultSubstitutions - perSnippet = new Action() { - @Override - void execute(SnippetsTask.Snippet snippet) { - if (RestTestsFromSnippetsTask.isConsoleCandidate(it)) { - println(it.toString()) - } - } - } - } - - Provider restRootDir = projectLayout.buildDirectory.dir("rest") - TaskProvider buildRestTests = project.tasks.register('buildRestTests', RestTestsFromSnippetsTask) { - defaultSubstitutions = commonDefaultSubstitutions - testRoot.convention(restRootDir) - doFirst { - getFileOperations().delete(testRoot.get()) - } - } - - // TODO: This effectively makes testRoot not customizable, which we don't do anyway atm - project.sourceSets.yamlRestTest.output.dir(restRootDir, builtBy: buildRestTests) - } -} diff --git a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/doc/RestTestsFromSnippetsTask.groovy b/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/doc/RestTestsFromSnippetsTask.groovy deleted file mode 100644 index 81207181dc9a7..0000000000000 --- a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/doc/RestTestsFromSnippetsTask.groovy +++ /dev/null @@ -1,503 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.gradle.internal.doc - -import groovy.transform.PackageScope -import org.elasticsearch.gradle.internal.doc.SnippetsTask.Snippet -import org.gradle.api.Action -import org.gradle.api.InvalidUserDataException -import org.gradle.api.file.DirectoryProperty -import org.gradle.api.internal.file.FileOperations -import org.gradle.api.tasks.Input -import org.gradle.api.tasks.Internal -import org.gradle.api.tasks.OutputDirectory -import org.gradle.api.model.ObjectFactory - -import javax.inject.Inject; -import java.nio.file.Files -import java.nio.file.Path - -/** - * Generates REST tests for each snippet marked // TEST. - */ -abstract class RestTestsFromSnippetsTask extends SnippetsTask { - /** - * These languages aren't supported by the syntax highlighter so we - * shouldn't use them. - */ - private static final List BAD_LANGUAGES = ['json', 'javascript'] - - /** - * Test setups defined in the build instead of the docs so they can be - * shared between many doc files. - */ - @Input - Map setups = new HashMap() - - /** - * Test teardowns defined in the build instead of the docs so they can be - * shared between many doc files. - */ - @Input - Map teardowns = new HashMap() - - /** - * A list of files that contain snippets that *probably* should be - * converted to `// CONSOLE` but have yet to be converted. If a file is in - * this list and doesn't contain unconverted snippets this task will fail. - * If there are unconverted snippets not in this list then this task will - * fail. All files are paths relative to the docs dir. - */ - @Input - List expectedUnconvertedCandidates = [] - - /** - * Root directory of the tests being generated. To make rest tests happy - * we generate them in a testRoot which is contained in this directory. - */ - private DirectoryProperty testRoot - - @Internal - Set names = new HashSet<>() - - @Inject - abstract FileOperations getFileOperations(); - - @Inject - RestTestsFromSnippetsTask(ObjectFactory objectFactory) { - testRoot = objectFactory.directoryProperty() - TestBuilder builder = new TestBuilder() - perSnippet = new Action() { - @Override - void execute(Snippet snippet) { - builder.handleSnippet(snippet) - } - } - doLast { - builder.checkUnconverted() - builder.finishLastTest() - } - } - - /** - * Root directory containing all the files generated by this task. It is - * contained within testRoot. - */ - File outputRoot() { - return new File(testRoot.get().asFile, '/rest-api-spec/test') - } - - @OutputDirectory - DirectoryProperty getTestRoot() { - return testRoot - } -/** - * Is this snippet a candidate for conversion to `// CONSOLE`? - */ - static isConsoleCandidate(Snippet snippet) { - /* Snippets that are responses or already marked as `// CONSOLE` or - * `// NOTCONSOLE` are not candidates. */ - if (snippet.console != null || snippet.testResponse) { - return false - } - /* js snippets almost always should be marked with `// CONSOLE`. js - * snippets that shouldn't be marked `// CONSOLE`, like examples for - * js client, should always be marked with `// NOTCONSOLE`. - * - * `sh` snippets that contain `curl` almost always should be marked - * with `// CONSOLE`. In the exceptionally rare cases where they are - * not communicating with Elasticsearch, like the examples in the ec2 - * and gce discovery plugins, the snippets should be marked - * `// NOTCONSOLE`. */ - return snippet.language == 'js' || snippet.curl - } - - /** - * Certain requests should not have the shard failure check because the - * format of the response is incompatible i.e. it is not a JSON object. - */ - static shouldAddShardFailureCheck(String path) { - return path.startsWith('_cat') == false && path.startsWith('_ml/datafeeds/') == false - } - - /** - * Converts Kibana's block quoted strings into standard JSON. These - * {@code """} delimited strings can be embedded in CONSOLE and can - * contain newlines and {@code "} without the normal JSON escaping. - * This has to add it. - */ - @PackageScope - static String replaceBlockQuote(String body) { - int start = body.indexOf('"""'); - if (start < 0) { - return body - } - /* - * 1.3 is a fairly wild guess of the extra space needed to hold - * the escaped string. - */ - StringBuilder result = new StringBuilder((int) (body.length() * 1.3)); - int startOfNormal = 0; - while (start >= 0) { - int end = body.indexOf('"""', start + 3); - if (end < 0) { - throw new InvalidUserDataException( - "Invalid block quote starting at $start in:\n$body") - } - result.append(body.substring(startOfNormal, start)); - result.append('"'); - result.append(body.substring(start + 3, end) - .replace('"', '\\"') - .replace("\n", "\\n")); - result.append('"'); - startOfNormal = end + 3; - start = body.indexOf('"""', startOfNormal); - } - result.append(body.substring(startOfNormal)); - return result.toString(); - } - - private class TestBuilder { - private static final String SYNTAX = { - String method = /(?GET|PUT|POST|HEAD|OPTIONS|DELETE)/ - String pathAndQuery = /(?[^\n]+)/ - String badBody = /GET|PUT|POST|HEAD|OPTIONS|DELETE|startyaml|#/ - String body = /(?(?:\n(?!$badBody)[^\n]+)+)/ - String rawRequest = /(?:$method\s+$pathAndQuery$body?)/ - String yamlRequest = /(?:startyaml(?s)(?.+?)(?-s)endyaml)/ - String nonComment = /(?:$rawRequest|$yamlRequest)/ - String comment = /(?#.+)/ - /(?:$comment|$nonComment)\n+/ - }() - - /** - * The file in which we saw the last snippet that made a test. - */ - Path lastDocsPath - - /** - * The file we're building. - */ - PrintWriter current - - /** - * Files containing all snippets that *probably* should be converted - * to `// CONSOLE` but have yet to be converted. All files are paths - * relative to the docs dir. - */ - Set unconvertedCandidates = new HashSet<>() - - /** - * The last non-TESTRESPONSE snippet. - */ - Snippet previousTest - - /** - * Called each time a snippet is encountered. Tracks the snippets and - * calls buildTest to actually build the test. - */ - - void handleSnippet(Snippet snippet) { - if (RestTestsFromSnippetsTask.isConsoleCandidate(snippet)) { - unconvertedCandidates.add(snippet.path.toString() - .replace('\\', '/')) - } - if (BAD_LANGUAGES.contains(snippet.language)) { - throw new InvalidUserDataException( - "$snippet: Use `js` instead of `${snippet.language}`.") - } - if (snippet.testSetup) { - testSetup(snippet) - previousTest = snippet - return - } - if (snippet.testTearDown) { - testTearDown(snippet) - previousTest = snippet - return - } - if (snippet.testResponse || snippet.language == 'console-result') { - if (previousTest == null) { - throw new InvalidUserDataException("$snippet: No paired previous test") - } - if (previousTest.path != snippet.path) { - throw new InvalidUserDataException("$snippet: Result can't be first in file") - } - response(snippet) - return - } - if ((snippet.language == 'js') && (snippet.console)) { - throw new InvalidUserDataException( - "$snippet: Use `[source,console]` instead of `// CONSOLE`.") - } - if (snippet.test || snippet.language == 'console') { - test(snippet) - previousTest = snippet - return - } - // Must be an unmarked snippet.... - } - - private void test(Snippet test) { - setupCurrent(test) - - if (test.continued) { - /* Catch some difficult to debug errors with // TEST[continued] - * and throw a helpful error message. */ - if (previousTest == null || previousTest.path != test.path) { - throw new InvalidUserDataException("// TEST[continued] " + - "cannot be on first snippet in a file: $test") - } - if (previousTest != null && previousTest.testSetup) { - throw new InvalidUserDataException("// TEST[continued] " + - "cannot immediately follow // TESTSETUP: $test") - } - if (previousTest != null && previousTest.testTearDown) { - throw new InvalidUserDataException("// TEST[continued] " + - "cannot immediately follow // TEARDOWN: $test") - } - } else { - current.println('---') - if (test.name != null && test.name.isBlank() == false) { - if(names.add(test.name) == false) { - throw new InvalidUserDataException("Duplicated snippet name '$test.name': $test") - } - current.println("\"$test.name\":") - } else { - current.println("\"line_$test.start\":") - } - /* The Elasticsearch test runner doesn't support quite a few - * constructs unless we output this skip. We don't know if - * we're going to use these constructs, but we might so we - * output the skip just in case. */ - current.println(" - skip:") - current.println(" features: ") - current.println(" - default_shards") - current.println(" - stash_in_key") - current.println(" - stash_in_path") - current.println(" - stash_path_replace") - current.println(" - warnings") - } - if (test.skip) { - if (test.continued) { - throw new InvalidUserDataException("Continued snippets " - + "can't be skipped") - } - current.println(" - always_skip") - current.println(" reason: $test.skip") - } - if (test.setup != null) { - setup(test) - } - - body(test, false) - - if (test.teardown != null) { - teardown(test) - } - } - - private void setup(final Snippet snippet) { - // insert a setup defined outside of the docs - for (final String name : snippet.setup.split(',')) { - final String setup = setups[name] - if (setup == null) { - throw new InvalidUserDataException( - "Couldn't find named setup $name for $snippet" - ) - } - current.println("# Named setup ${name}") - current.println(setup) - } - } - - private void teardown(final Snippet snippet) { - // insert a teardown defined outside of the docs - for (final String name : snippet.teardown.split(',')) { - final String teardown = teardowns[name] - if (teardown == null) { - throw new InvalidUserDataException( - "Couldn't find named teardown $name for $snippet" - ) - } - current.println("# Named teardown ${name}") - current.println(teardown) - } - } - - private void response(Snippet response) { - if (null == response.skip) { - current.println(" - match: ") - current.println(" \$body: ") - replaceBlockQuote(response.contents).eachLine { - current.println(" $it") - } - } - } - - void emitDo(String method, String pathAndQuery, String body, - String catchPart, List warnings, boolean inSetup, boolean skipShardFailures) { - def (String path, String query) = pathAndQuery.tokenize('?') - if (path == null) { - path = '' // Catch requests to the root... - } else { - path = path.replace('<', '%3C').replace('>', '%3E') - } - current.println(" - do:") - if (catchPart != null) { - current.println(" catch: $catchPart") - } - if (false == warnings.isEmpty()) { - current.println(" warnings:") - for (String warning in warnings) { - // Escape " because we're going to quote the warning - String escaped = warning.replaceAll('"', '\\\\"') - /* Quote the warning in case it starts with [ which makes - * it look too much like an array. */ - current.println(" - \"$escaped\"") - } - } - current.println(" raw:") - current.println(" method: $method") - current.println(" path: \"$path\"") - if (query != null) { - for (String param: query.tokenize('&')) { - def (String name, String value) = param.tokenize('=') - if (value == null) { - value = '' - } - current.println(" $name: \"$value\"") - } - } - if (body != null) { - // Throw out the leading newline we get from parsing the body - body = body.substring(1) - // Replace """ quoted strings with valid json ones - body = replaceBlockQuote(body) - current.println(" body: |") - body.eachLine { current.println(" $it") } - } - /* Catch any shard failures. These only cause a non-200 response if - * no shard succeeds. But we need to fail the tests on all of these - * because they mean invalid syntax or broken queries or something - * else that we don't want to teach people to do. The REST test - * framework doesn't allow us to have assertions in the setup - * section so we have to skip it there. We also omit the assertion - * from APIs that don't return a JSON object - */ - if (false == inSetup && skipShardFailures == false && shouldAddShardFailureCheck(path)) { - current.println(" - is_false: _shards.failures") - } - } - - private void testSetup(Snippet snippet) { - if (lastDocsPath == snippet.path) { - throw new InvalidUserDataException("$snippet: wasn't first. TESTSETUP can only be used in the first snippet of a document.") - } - setupCurrent(snippet) - current.println('---') - current.println("setup:") - if (snippet.setup != null) { - setup(snippet) - } - body(snippet, true) - } - - private void testTearDown(Snippet snippet) { - if (previousTest != null && previousTest.testSetup == false && lastDocsPath == snippet.path) { - throw new InvalidUserDataException("$snippet must follow test setup or be first") - } - setupCurrent(snippet) - current.println('---') - current.println('teardown:') - body(snippet, true) - } - - private void body(Snippet snippet, boolean inSetup) { - parse("$snippet", snippet.contents, SYNTAX) { matcher, last -> - if (matcher.group("comment") != null) { - // Comment - return - } - String yamlRequest = matcher.group("yaml"); - if (yamlRequest != null) { - current.println(yamlRequest) - return - } - String method = matcher.group("method") - String pathAndQuery = matcher.group("pathAndQuery") - String body = matcher.group("body") - String catchPart = last ? snippet.catchPart : null - if (pathAndQuery.startsWith('/')) { - // Leading '/'s break the generated paths - pathAndQuery = pathAndQuery.substring(1) - } - emitDo(method, pathAndQuery, body, catchPart, snippet.warnings, - inSetup, snippet.skipShardsFailures) - } - } - - private PrintWriter setupCurrent(Snippet test) { - if (lastDocsPath == test.path) { - return - } - names.clear() - finishLastTest() - lastDocsPath = test.path - - // Make the destination file: - // Shift the path into the destination directory tree - Path dest = outputRoot().toPath().resolve(test.path) - // Replace the extension - String fileName = dest.getName(dest.nameCount - 1) - dest = dest.parent.resolve(fileName.replace('.asciidoc', '.yml')) - - // Now setup the writer - Files.createDirectories(dest.parent) - current = dest.newPrintWriter('UTF-8') - } - - void finishLastTest() { - if (current != null) { - current.close() - current = null - } - } - - void checkUnconverted() { - List listedButNotFound = [] - for (String listed : expectedUnconvertedCandidates) { - if (false == unconvertedCandidates.remove(listed)) { - listedButNotFound.add(listed) - } - } - String message = "" - if (false == listedButNotFound.isEmpty()) { - Collections.sort(listedButNotFound) - listedButNotFound = listedButNotFound.collect {' ' + it} - message += "Expected unconverted snippets but none found in:\n" - message += listedButNotFound.join("\n") - } - if (false == unconvertedCandidates.isEmpty()) { - List foundButNotListed = - new ArrayList<>(unconvertedCandidates) - Collections.sort(foundButNotListed) - foundButNotListed = foundButNotListed.collect {' ' + it} - if (false == "".equals(message)) { - message += "\n" - } - message += "Unexpected unconverted snippets:\n" - message += foundButNotListed.join("\n") - } - if (false == "".equals(message)) { - throw new InvalidUserDataException(message); - } - } - } -} diff --git a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/doc/SnippetsTask.groovy b/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/doc/SnippetsTask.groovy deleted file mode 100644 index 3e4ad91024082..0000000000000 --- a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/doc/SnippetsTask.groovy +++ /dev/null @@ -1,438 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.gradle.internal.doc - -import com.fasterxml.jackson.core.JsonFactory; -import com.fasterxml.jackson.core.JsonParser; -import com.fasterxml.jackson.core.JsonParseException; -import com.fasterxml.jackson.core.JsonToken - -import org.gradle.api.Action; -import org.gradle.api.DefaultTask -import org.gradle.api.InvalidUserDataException -import org.gradle.api.file.ConfigurableFileTree -import org.gradle.api.tasks.Input -import org.gradle.api.tasks.InputFiles -import org.gradle.api.tasks.Internal -import org.gradle.api.tasks.TaskAction - -import java.nio.file.Path -import java.util.regex.Matcher - -/** - * A task which will run a closure on each snippet in the documentation. - */ -class SnippetsTask extends DefaultTask { - private static final String SCHAR = /(?:\\\/|[^\/])/ - private static final String SUBSTITUTION = /s\/($SCHAR+)\/($SCHAR*)\// - private static final String CATCH = /catch:\s*((?:\/[^\/]+\/)|[^ \]]+)/ - private static final String SKIP_REGEX = /skip:([^\]]+)/ - private static final String SETUP = /setup:([^ \]]+)/ - private static final String TEARDOWN = /teardown:([^ \]]+)/ - private static final String WARNING = /warning:(.+)/ - private static final String NON_JSON = /(non_json)/ - private static final String TEST_SYNTAX = - /(?:$CATCH|$SUBSTITUTION|$SKIP_REGEX|(continued)|$SETUP|$TEARDOWN|$WARNING|(skip_shard_failures)) ?/ - - /** - * Action to take on each snippet. Called with a single parameter, an - * instance of Snippet. - */ - @Internal - Action perSnippet - - /** - * The docs to scan. Defaults to every file in the directory exception the - * build.gradle file because that is appropriate for Elasticsearch's docs - * directory. - */ - @InputFiles - ConfigurableFileTree docs - - /** - * Substitutions done on every snippet's contents. - */ - @Input - Map defaultSubstitutions = [:] - - @TaskAction - void executeTask() { - /* - * Walks each line of each file, building snippets as it encounters - * the lines that make up the snippet. - */ - for (File file: docs) { - String lastLanguage - String name - int lastLanguageLine - Snippet snippet = null - StringBuilder contents = null - List substitutions = null - Closure emit = { - snippet.contents = contents.toString() - contents = null - Closure doSubstitution = { String pattern, String subst -> - /* - * $body is really common but it looks like a - * backreference so we just escape it here to make the - * tests cleaner. - */ - subst = subst.replace('$body', '\\$body') - subst = subst.replace('$_path', '\\$_path') - // \n is a new line.... - subst = subst.replace('\\n', '\n') - snippet.contents = snippet.contents.replaceAll( - pattern, subst) - } - defaultSubstitutions.each doSubstitution - if (substitutions != null) { - substitutions.each doSubstitution - substitutions = null - } - if (snippet.language == null) { - throw new InvalidUserDataException("$snippet: " - + "Snippet missing a language. This is required by " - + "Elasticsearch's doc testing infrastructure so we " - + "be sure we don't accidentally forget to test a " - + "snippet.") - } - // Try to detect snippets that contain `curl` - if (snippet.language == 'sh' || snippet.language == 'shell') { - snippet.curl = snippet.contents.contains('curl') - if (snippet.console == false && snippet.curl == false) { - throw new InvalidUserDataException("$snippet: " - + "No need for NOTCONSOLE if snippet doesn't " - + "contain `curl`.") - } - } - if (snippet.testResponse - && ('js' == snippet.language || 'console-result' == snippet.language) - && null == snippet.skip) { - String quoted = snippet.contents - // quote values starting with $ - .replaceAll(/([:,])\s*(\$[^ ,\n}]+)/, '$1 "$2"') - // quote fields starting with $ - .replaceAll(/(\$[^ ,\n}]+)\s*:/, '"$1":') - - JsonFactory jf = new JsonFactory(); - jf.configure(JsonParser.Feature.ALLOW_BACKSLASH_ESCAPING_ANY_CHARACTER,true); - JsonParser jsonParser; - - try { - jsonParser = jf.createParser(quoted); - while(jsonParser.isClosed() == false) { - jsonParser.nextToken(); - } - } catch (JsonParseException e) { - throw new InvalidUserDataException("Invalid json in " - + snippet.toString() + ". The error is:\n" + e.getMessage() + ".\n" - + "After substitutions and munging, the json looks like:\n" + quoted, e); - } - } - perSnippet.execute(snippet) - snippet = null - } - file.eachLine('UTF-8') { String line, int lineNumber -> - Matcher matcher - if (line ==~ /-{4,}\s*/) { // Four dashes looks like a snippet - if (snippet == null) { - Path path = docs.dir.toPath().relativize(file.toPath()) - snippet = new Snippet(path: path, start: lineNumber, name: name) - if (lastLanguageLine == lineNumber - 1) { - snippet.language = lastLanguage - } - name = null - } else { - snippet.end = lineNumber - } - return - } - def source = matchSource(line) - if (source.matches) { - lastLanguage = source.language - lastLanguageLine = lineNumber - name = source.name - return - } - if (line ==~ /\/\/\s*AUTOSENSE\s*/) { - throw new InvalidUserDataException("$file:$lineNumber: " - + "AUTOSENSE has been replaced by CONSOLE.") - } - if (line ==~ /\/\/\s*CONSOLE\s*/) { - if (snippet == null) { - throw new InvalidUserDataException("$file:$lineNumber: " - + "CONSOLE not paired with a snippet") - } - if (snippet.console != null) { - throw new InvalidUserDataException("$file:$lineNumber: " - + "Can't be both CONSOLE and NOTCONSOLE") - } - snippet.console = true - return - } - if (line ==~ /\/\/\s*NOTCONSOLE\s*/) { - if (snippet == null) { - throw new InvalidUserDataException("$file:$lineNumber: " - + "NOTCONSOLE not paired with a snippet") - } - if (snippet.console != null) { - throw new InvalidUserDataException("$file:$lineNumber: " - + "Can't be both CONSOLE and NOTCONSOLE") - } - snippet.console = false - return - } - matcher = line =~ /\/\/\s*TEST(\[(.+)\])?\s*/ - if (matcher.matches()) { - if (snippet == null) { - throw new InvalidUserDataException("$file:$lineNumber: " - + "TEST not paired with a snippet at ") - } - snippet.test = true - if (matcher.group(2) != null) { - String loc = "$file:$lineNumber" - parse(loc, matcher.group(2), TEST_SYNTAX) { - if (it.group(1) != null) { - snippet.catchPart = it.group(1) - return - } - if (it.group(2) != null) { - if (substitutions == null) { - substitutions = [] - } - substitutions.add([it.group(2), it.group(3)]) - return - } - if (it.group(4) != null) { - snippet.skip = it.group(4) - return - } - if (it.group(5) != null) { - snippet.continued = true - return - } - if (it.group(6) != null) { - snippet.setup = it.group(6) - return - } - if (it.group(7) != null) { - snippet.teardown = it.group(7) - return - } - if (it.group(8) != null) { - snippet.warnings.add(it.group(8)) - return - } - if (it.group(9) != null) { - snippet.skipShardsFailures = true - return - } - throw new InvalidUserDataException( - "Invalid test marker: $line") - } - } - return - } - matcher = line =~ /\/\/\s*TESTRESPONSE(\[(.+)\])?\s*/ - if (matcher.matches()) { - if (snippet == null) { - throw new InvalidUserDataException("$file:$lineNumber: " - + "TESTRESPONSE not paired with a snippet") - } - snippet.testResponse = true - if (matcher.group(2) != null) { - if (substitutions == null) { - substitutions = [] - } - String loc = "$file:$lineNumber" - parse(loc, matcher.group(2), /(?:$SUBSTITUTION|$NON_JSON|$SKIP_REGEX) ?/) { - if (it.group(1) != null) { - // TESTRESPONSE[s/adsf/jkl/] - substitutions.add([it.group(1), it.group(2)]) - } else if (it.group(3) != null) { - // TESTRESPONSE[non_json] - substitutions.add(['^', '/']) - substitutions.add(['\n$', '\\\\s*/']) - substitutions.add(['( +)', '$1\\\\s+']) - substitutions.add(['\n', '\\\\s*\n ']) - } else if (it.group(4) != null) { - // TESTRESPONSE[skip:reason] - snippet.skip = it.group(4) - } - } - } - return - } - if (line ==~ /\/\/\s*TESTSETUP\s*/) { - snippet.testSetup = true - return - } - if (line ==~ /\/\/\s*TEARDOWN\s*/) { - snippet.testTearDown = true - return - } - if (snippet == null) { - // Outside - return - } - if (snippet.end == Snippet.NOT_FINISHED) { - // Inside - if (contents == null) { - contents = new StringBuilder() - } - // We don't need the annotations - line = line.replaceAll(/<\d+>/, '') - // Nor any trailing spaces - line = line.replaceAll(/\s+$/, '') - contents.append(line).append('\n') - return - } - // Allow line continuations for console snippets within lists - if (snippet != null && line.trim() == '+') { - return - } - // Just finished - emit() - } - if (snippet != null) emit() - } - } - - static Source matchSource(String line) { - def matcher = line =~ /\["?source"?(?:\.[^,]+)?,\s*"?([-\w]+)"?(,((?!id=).)*(id="?([-\w]+)"?)?(.*))?].*/ - if(matcher.matches()){ - return new Source(matches: true, language: matcher.group(1), name: matcher.group(5)) - } - return new Source(matches: false) - } - - static class Source { - boolean matches - String language - String name - } - - static class Snippet { - static final int NOT_FINISHED = -1 - - /** - * Path to the file containing this snippet. Relative to docs.dir of the - * SnippetsTask that created it. - */ - Path path - int start - int end = NOT_FINISHED - String contents - - Boolean console = null - boolean test = false - boolean testResponse = false - boolean testSetup = false - boolean testTearDown = false - String skip = null - boolean continued = false - String language = null - String catchPart = null - String setup = null - String teardown = null - boolean curl - List warnings = new ArrayList() - boolean skipShardsFailures = false - String name - - @Override - public String toString() { - String result = "$path[$start:$end]" - if (language != null) { - result += "($language)" - } - if (console != null) { - result += console ? '// CONSOLE' : '// NOTCONSOLE' - } - if (test) { - result += '// TEST' - if (catchPart) { - result += "[catch: $catchPart]" - } - if (skip) { - result += "[skip=$skip]" - } - if (continued) { - result += '[continued]' - } - if (setup) { - result += "[setup:$setup]" - } - if (teardown) { - result += "[teardown:$teardown]" - } - for (String warning in warnings) { - result += "[warning:$warning]" - } - if (skipShardsFailures) { - result += '[skip_shard_failures]' - } - } - if (testResponse) { - result += '// TESTRESPONSE' - if (skip) { - result += "[skip=$skip]" - } - } - if (testSetup) { - result += '// TESTSETUP' - } - if (curl) { - result += '(curl)' - } - return result - } - } - - /** - * Repeatedly match the pattern to the string, calling the closure with the - * matchers each time there is a match. If there are characters that don't - * match then blow up. If the closure takes two parameters then the second - * one is "is this the last match?". - */ - protected parse(String location, String s, String pattern, Closure c) { - if (s == null) { - return // Silly null, only real stuff gets to match! - } - Matcher m = s =~ pattern - int offset = 0 - Closure extraContent = { message -> - StringBuilder cutOut = new StringBuilder() - cutOut.append(s[offset - 6..offset - 1]) - cutOut.append('*') - cutOut.append(s[offset..Math.min(offset + 5, s.length() - 1)]) - String cutOutNoNl = cutOut.toString().replace('\n', '\\n') - throw new InvalidUserDataException("$location: Extra content " - + "$message ('$cutOutNoNl') matching [$pattern]: $s") - } - while (m.find()) { - if (m.start() != offset) { - extraContent("between [$offset] and [${m.start()}]") - } - offset = m.end() - if (c.maximumNumberOfParameters == 1) { - c(m) - } else { - c(m, offset == s.length()) - } - } - if (offset == 0) { - throw new InvalidUserDataException("$location: Didn't match " - + "$pattern: $s") - } - if (offset != s.length()) { - extraContent("after [$offset]") - } - } -} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/AsciidocSnippetParser.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/AsciidocSnippetParser.java new file mode 100644 index 0000000000000..7b35fd29fbd1a --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/AsciidocSnippetParser.java @@ -0,0 +1,306 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc; + +import org.gradle.api.InvalidUserDataException; + +import java.io.File; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.function.BiConsumer; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +public class AsciidocSnippetParser implements SnippetParser { + public static final Pattern SNIPPET_PATTERN = Pattern.compile("-{4,}\\s*"); + + private static final String CATCH = "catch:\\s*((?:\\/[^\\/]+\\/)|[^ \\]]+)"; + private static final String SKIP_REGEX = "skip:([^\\]]+)"; + private static final String SETUP = "setup:([^ \\]]+)"; + private static final String TEARDOWN = "teardown:([^ \\]]+)"; + private static final String WARNING = "warning:(.+)"; + private static final String NON_JSON = "(non_json)"; + private static final String SCHAR = "(?:\\\\\\/|[^\\/])"; + private static final String SUBSTITUTION = "s\\/(" + SCHAR + "+)\\/(" + SCHAR + "*)\\/"; + private static final String TEST_SYNTAX = "(?:" + + CATCH + + "|" + + SUBSTITUTION + + "|" + + SKIP_REGEX + + "|(continued)|" + + SETUP + + "|" + + TEARDOWN + + "|" + + WARNING + + "|(skip_shard_failures)) ?"; + + private final Map defaultSubstitutions; + + public AsciidocSnippetParser(Map defaultSubstitutions) { + this.defaultSubstitutions = defaultSubstitutions; + } + + @Override + public List parseDoc(File rootDir, File docFile, List> substitutions) { + String lastLanguage = null; + Snippet snippet = null; + String name = null; + int lastLanguageLine = 0; + StringBuilder contents = null; + List snippets = new ArrayList<>(); + + try (Stream lines = Files.lines(docFile.toPath(), StandardCharsets.UTF_8)) { + List linesList = lines.collect(Collectors.toList()); + for (int lineNumber = 0; lineNumber < linesList.size(); lineNumber++) { + String line = linesList.get(lineNumber); + if (SNIPPET_PATTERN.matcher(line).matches()) { + if (snippet == null) { + Path path = rootDir.toPath().relativize(docFile.toPath()); + snippet = new Snippet(path, lineNumber + 1, name); + snippets.add(snippet); + if (lastLanguageLine == lineNumber - 1) { + snippet.language = lastLanguage; + } + name = null; + } else { + snippet.end = lineNumber + 1; + } + continue; + } + + Source source = matchSource(line); + if (source.matches) { + lastLanguage = source.language; + lastLanguageLine = lineNumber; + name = source.name; + continue; + } + if (consoleHandled(docFile.getName(), lineNumber, line, snippet)) { + continue; + } + if (testHandled(docFile.getName(), lineNumber, line, snippet, substitutions)) { + continue; + } + if (testResponseHandled(docFile.getName(), lineNumber, line, snippet, substitutions)) { + continue; + } + if (line.matches("\\/\\/\s*TESTSETUP\s*")) { + snippet.testSetup = true; + continue; + } + if (line.matches("\\/\\/\s*TEARDOWN\s*")) { + snippet.testTearDown = true; + continue; + } + if (snippet == null) { + // Outside + continue; + } + if (snippet.end == Snippet.NOT_FINISHED) { + // Inside + if (contents == null) { + contents = new StringBuilder(); + } + // We don't need the annotations + line = line.replaceAll("<\\d+>", ""); + // Nor any trailing spaces + line = line.replaceAll("\s+$", ""); + contents.append(line).append("\n"); + continue; + } + // Allow line continuations for console snippets within lists + if (snippet != null && line.trim().equals("+")) { + continue; + } + finalizeSnippet(snippet, contents.toString(), defaultSubstitutions, substitutions); + substitutions = new ArrayList<>(); + ; + snippet = null; + contents = null; + } + if (snippet != null) { + finalizeSnippet(snippet, contents.toString(), defaultSubstitutions, substitutions); + contents = null; + snippet = null; + substitutions = new ArrayList<>(); + } + } catch (IOException e) { + e.printStackTrace(); + } + return snippets; + } + + static Snippet finalizeSnippet( + final Snippet snippet, + String contents, + Map defaultSubstitutions, + Collection> substitutions + ) { + snippet.contents = contents.toString(); + snippet.validate(); + escapeSubstitutions(snippet, defaultSubstitutions, substitutions); + return snippet; + } + + private static void escapeSubstitutions( + Snippet snippet, + Map defaultSubstitutions, + Collection> substitutions + ) { + BiConsumer doSubstitution = (pattern, subst) -> { + /* + * $body is really common but it looks like a + * backreference so we just escape it here to make the + * tests cleaner. + */ + subst = subst.replace("$body", "\\$body"); + subst = subst.replace("$_path", "\\$_path"); + subst = subst.replace("\\n", "\n"); + snippet.contents = snippet.contents.replaceAll(pattern, subst); + }; + defaultSubstitutions.forEach(doSubstitution); + + if (substitutions != null) { + substitutions.forEach(e -> doSubstitution.accept(e.getKey(), e.getValue())); + } + } + + private boolean testResponseHandled( + String name, + int lineNumber, + String line, + Snippet snippet, + final List> substitutions + ) { + Matcher matcher = Pattern.compile("\\/\\/\s*TESTRESPONSE(\\[(.+)\\])?\s*").matcher(line); + if (matcher.matches()) { + if (snippet == null) { + throw new InvalidUserDataException(name + ":" + lineNumber + ": TESTRESPONSE not paired with a snippet at "); + } + snippet.testResponse = true; + if (matcher.group(2) != null) { + String loc = name + ":" + lineNumber; + ParsingUtils.parse( + loc, + matcher.group(2), + "(?:" + SUBSTITUTION + "|" + NON_JSON + "|" + SKIP_REGEX + ") ?", + (Matcher m, Boolean last) -> { + if (m.group(1) != null) { + // TESTRESPONSE[s/adsf/jkl/] + substitutions.add(Map.entry(m.group(1), m.group(2))); + } else if (m.group(3) != null) { + // TESTRESPONSE[non_json] + substitutions.add(Map.entry("^", "/")); + substitutions.add(Map.entry("\n$", "\\\\s*/")); + substitutions.add(Map.entry("( +)", "$1\\\\s+")); + substitutions.add(Map.entry("\n", "\\\\s*\n ")); + } else if (m.group(4) != null) { + // TESTRESPONSE[skip:reason] + snippet.skip = m.group(4); + } + } + ); + } + return true; + } + return false; + } + + private boolean testHandled(String name, int lineNumber, String line, Snippet snippet, List> substitutions) { + Matcher matcher = Pattern.compile("\\/\\/\s*TEST(\\[(.+)\\])?\s*").matcher(line); + if (matcher.matches()) { + if (snippet == null) { + throw new InvalidUserDataException(name + ":" + lineNumber + ": TEST not paired with a snippet at "); + } + snippet.test = true; + if (matcher.group(2) != null) { + String loc = name + ":" + lineNumber; + ParsingUtils.parse(loc, matcher.group(2), TEST_SYNTAX, (Matcher m, Boolean last) -> { + if (m.group(1) != null) { + snippet.catchPart = m.group(1); + return; + } + if (m.group(2) != null) { + substitutions.add(Map.entry(m.group(2), m.group(3))); + return; + } + if (m.group(4) != null) { + snippet.skip = m.group(4); + return; + } + if (m.group(5) != null) { + snippet.continued = true; + return; + } + if (m.group(6) != null) { + snippet.setup = m.group(6); + return; + } + if (m.group(7) != null) { + snippet.teardown = m.group(7); + return; + } + if (m.group(8) != null) { + snippet.warnings.add(m.group(8)); + return; + } + if (m.group(9) != null) { + snippet.skipShardsFailures = true; + return; + } + throw new InvalidUserDataException("Invalid test marker: " + line); + }); + } + return true; + } + return false; + } + + private boolean consoleHandled(String fileName, int lineNumber, String line, Snippet snippet) { + if (line.matches("\\/\\/\s*CONSOLE\s*")) { + if (snippet == null) { + throw new InvalidUserDataException(fileName + ":" + lineNumber + ": CONSOLE not paired with a snippet"); + } + if (snippet.console != null) { + throw new InvalidUserDataException(fileName + ":" + lineNumber + ": Can't be both CONSOLE and NOTCONSOLE"); + } + snippet.console = true; + return true; + } else if (line.matches("\\/\\/\s*NOTCONSOLE\s*")) { + if (snippet == null) { + throw new InvalidUserDataException(fileName + ":" + lineNumber + ": NOTCONSOLE not paired with a snippet"); + } + if (snippet.console != null) { + throw new InvalidUserDataException(fileName + ":" + lineNumber + ": Can't be both CONSOLE and NOTCONSOLE"); + } + snippet.console = false; + return true; + } + return false; + } + + static Source matchSource(String line) { + Pattern pattern = Pattern.compile("\\[\"?source\"?(?:\\.[^,]+)?,\\s*\"?([-\\w]+)\"?(,((?!id=).)*(id=\"?([-\\w]+)\"?)?(.*))?].*"); + Matcher matcher = pattern.matcher(line); + if (matcher.matches()) { + return new Source(true, matcher.group(1), matcher.group(5)); + } + return new Source(false, null, null); + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/DocSnippetTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/DocSnippetTask.java new file mode 100644 index 0000000000000..87f0621d53fba --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/DocSnippetTask.java @@ -0,0 +1,88 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc; + +import org.apache.commons.collections.map.HashedMap; +import org.gradle.api.Action; +import org.gradle.api.DefaultTask; +import org.gradle.api.InvalidUserDataException; +import org.gradle.api.file.ConfigurableFileTree; +import org.gradle.api.tasks.Input; +import org.gradle.api.tasks.InputFiles; +import org.gradle.api.tasks.TaskAction; + +import java.io.File; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +public abstract class DocSnippetTask extends DefaultTask { + + /** + * Action to take on each snippet. Called with a single parameter, an + * instance of Snippet. + */ + private Action perSnippet; + + /** + * The docs to scan. Defaults to every file in the directory exception the + * build.gradle file because that is appropriate for Elasticsearch's docs + * directory. + */ + private ConfigurableFileTree docs; + private Map defaultSubstitutions = new HashedMap(); + + @InputFiles + public ConfigurableFileTree getDocs() { + return docs; + } + + public void setDocs(ConfigurableFileTree docs) { + this.docs = docs; + } + + /** + * Substitutions done on every snippet's contents. + */ + @Input + public Map getDefaultSubstitutions() { + return defaultSubstitutions; + } + + @TaskAction + void executeTask() { + for (File file : docs) { + List snippets = parseDocFile(docs.getDir(), file, new ArrayList<>()); + if (perSnippet != null) { + snippets.forEach(perSnippet::execute); + } + } + } + + List parseDocFile(File rootDir, File docFile, List> substitutions) { + SnippetParser parser = parserForFileType(docFile); + return parser.parseDoc(rootDir, docFile, substitutions); + } + + private SnippetParser parserForFileType(File docFile) { + if (docFile.getName().endsWith(".asciidoc")) { + return new AsciidocSnippetParser(defaultSubstitutions); + } + throw new InvalidUserDataException("Unsupported file type: " + docFile.getName()); + } + + public void setDefaultSubstitutions(Map defaultSubstitutions) { + this.defaultSubstitutions = defaultSubstitutions; + } + + public void setPerSnippet(Action perSnippet) { + this.perSnippet = perSnippet; + } + +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/DocsTestPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/DocsTestPlugin.java new file mode 100644 index 0000000000000..bbb5102dd6699 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/DocsTestPlugin.java @@ -0,0 +1,106 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc; + +import org.elasticsearch.gradle.OS; +import org.elasticsearch.gradle.Version; +import org.elasticsearch.gradle.VersionProperties; +import org.elasticsearch.gradle.testclusters.ElasticsearchCluster; +import org.elasticsearch.gradle.testclusters.TestClustersPlugin; +import org.elasticsearch.gradle.testclusters.TestDistribution; +import org.gradle.api.NamedDomainObjectContainer; +import org.gradle.api.Plugin; +import org.gradle.api.Project; +import org.gradle.api.file.Directory; +import org.gradle.api.file.ProjectLayout; +import org.gradle.api.internal.file.FileOperations; +import org.gradle.api.plugins.JavaPluginExtension; +import org.gradle.api.provider.Provider; +import org.gradle.api.tasks.TaskProvider; + +import java.util.Map; + +import javax.inject.Inject; + +public class DocsTestPlugin implements Plugin { + private FileOperations fileOperations; + private ProjectLayout projectLayout; + + @Inject + DocsTestPlugin(FileOperations fileOperations, ProjectLayout projectLayout) { + this.projectLayout = projectLayout; + this.fileOperations = fileOperations; + } + + @Override + public void apply(Project project) { + project.getPluginManager().apply("elasticsearch.legacy-yaml-rest-test"); + + String distribution = System.getProperty("tests.distribution", "default"); + // The distribution can be configured with -Dtests.distribution on the command line + NamedDomainObjectContainer testClusters = (NamedDomainObjectContainer) project + .getExtensions() + .getByName(TestClustersPlugin.EXTENSION_NAME); + + testClusters.matching((c) -> c.getName().equals("yamlRestTest")).configureEach(c -> { + c.setTestDistribution(TestDistribution.valueOf(distribution.toUpperCase())); + c.setNameCustomization((name) -> name.replace("yamlRestTest", "node")); + }); + + project.getTasks().named("assemble").configure(task -> { task.setEnabled(false); }); + + Map commonDefaultSubstitutions = Map.of( + /* These match up with the asciidoc syntax for substitutions but + * the values may differ. In particular {version} needs to resolve + * to the version being built for testing but needs to resolve to + * the last released version for docs. */ + "\\{version\\}", + Version.fromString(VersionProperties.getElasticsearch()).toString(), + "\\{version_qualified\\}", + VersionProperties.getElasticsearch(), + "\\{lucene_version\\}", + VersionProperties.getLucene().replaceAll("-snapshot-\\w+$", ""), + "\\{build_flavor\\}", + distribution, + "\\{build_type\\}", + OS.conditionalString().onWindows(() -> "zip").onUnix(() -> "tar").supply() + ); + + project.getTasks().register("listSnippets", DocSnippetTask.class, task -> { + task.setGroup("Docs"); + task.setDescription("List each snippet"); + task.setDefaultSubstitutions(commonDefaultSubstitutions); + task.setPerSnippet(snippet -> System.out.println(snippet)); + }); + + project.getTasks().register("listConsoleCandidates", DocSnippetTask.class, task -> { + task.setGroup("Docs"); + task.setDescription("List snippets that probably should be marked // CONSOLE"); + task.setDefaultSubstitutions(commonDefaultSubstitutions); + task.setPerSnippet(snippet -> { + if (snippet.isConsoleCandidate()) { + System.out.println(snippet); + } + }); + }); + + Provider restRootDir = projectLayout.getBuildDirectory().dir("rest"); + TaskProvider buildRestTests = project.getTasks() + .register("buildRestTests", RestTestsFromDocSnippetTask.class, task -> { + task.setDefaultSubstitutions(commonDefaultSubstitutions); + task.getTestRoot().convention(restRootDir); + task.doFirst(task1 -> fileOperations.delete(restRootDir.get())); + }); + + // TODO: This effectively makes testRoot not customizable, which we don't do anyway atm + JavaPluginExtension byType = project.getExtensions().getByType(JavaPluginExtension.class); + byType.getSourceSets().getByName("yamlRestTest").getOutput().dir(Map.of("builtBy", buildRestTests), restRootDir); + } + +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/ParsingUtils.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/ParsingUtils.java new file mode 100644 index 0000000000000..b17dd4c7e21d3 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/ParsingUtils.java @@ -0,0 +1,57 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc; + +import org.gradle.api.InvalidUserDataException; + +import java.util.function.BiConsumer; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +public class ParsingUtils { + + static void extraContent(String message, String content, int offset, String location, String pattern) { + StringBuilder cutOut = new StringBuilder(); + cutOut.append(content.substring(offset - 6, offset)); + cutOut.append('*'); + cutOut.append(content.substring(offset, Math.min(offset + 5, content.length()))); + String cutOutNoNl = cutOut.toString().replace("\n", "\\n"); + throw new InvalidUserDataException( + location + ": Extra content " + message + " ('" + cutOutNoNl + "') matching [" + pattern + "]: " + content + ); + } + + /** + * Repeatedly match the pattern to the string, calling the closure with the + * matchers each time there is a match. If there are characters that don't + * match then blow up. If the closure takes two parameters then the second + * one is "is this the last match?". + */ + static void parse(String location, String content, String pattern, BiConsumer testHandler) { + if (content == null) { + return; // Silly null, only real stuff gets to match! + } + Matcher m = Pattern.compile(pattern).matcher(content); + int offset = 0; + while (m.find()) { + if (m.start() != offset) { + extraContent("between [$offset] and [${m.start()}]", content, offset, location, pattern); + } + offset = m.end(); + testHandler.accept(m, offset == content.length()); + } + if (offset == 0) { + throw new InvalidUserDataException(location + ": Didn't match " + pattern + ": " + content); + } + if (offset != content.length()) { + extraContent("after [" + offset + "]", content, offset, location, pattern); + } + } + +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/RestTestsFromDocSnippetTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/RestTestsFromDocSnippetTask.java new file mode 100644 index 0000000000000..c5b1d67627dd9 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/RestTestsFromDocSnippetTask.java @@ -0,0 +1,526 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc; + +import groovy.transform.PackageScope; + +import org.gradle.api.InvalidUserDataException; +import org.gradle.api.file.DirectoryProperty; +import org.gradle.api.internal.file.FileOperations; +import org.gradle.api.model.ObjectFactory; +import org.gradle.api.tasks.Input; +import org.gradle.api.tasks.Internal; +import org.gradle.api.tasks.OutputDirectory; + +import java.io.File; +import java.io.IOException; +import java.io.PrintWriter; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +import javax.inject.Inject; + +public abstract class RestTestsFromDocSnippetTask extends DocSnippetTask { + + private Map setups = new HashMap<>(); + + private Map teardowns = new HashMap(); + + /** + * Test setups defined in the build instead of the docs so they can be + * shared between many doc files. + */ + @Input + public Map getSetups() { + return setups; + } + + public void setSetups(Map setups) { + this.setups = setups; + } + + /** + * Test teardowns defined in the build instead of the docs so they can be + * shared between many doc files. + */ + @Input + public Map getTeardowns() { + return teardowns; + } + + public void setTeardowns(Map teardowns) { + this.teardowns = teardowns; + } + + /** + * A list of files that contain snippets that *probably* should be + * converted to `// CONSOLE` but have yet to be converted. If a file is in + * this list and doesn't contain unconverted snippets this task will fail. + * If there are unconverted snippets not in this list then this task will + * fail. All files are paths relative to the docs dir. + */ + private List expectedUnconvertedCandidates; + + @Input + public List getExpectedUnconvertedCandidates() { + return expectedUnconvertedCandidates; + } + + public void setExpectedUnconvertedCandidates(List expectedUnconvertedCandidates) { + this.expectedUnconvertedCandidates = expectedUnconvertedCandidates; + } + + /** + * Root directory of the tests being generated. To make rest tests happy + * we generate them in a testRoot which is contained in this directory. + */ + private DirectoryProperty testRoot; + + private Set names = new HashSet<>(); + + @Internal + public Set getNames() { + return names; + } + + public void setNames(Set names) { + this.names = names; + } + + @Inject + public abstract FileOperations getFileOperations(); + + /** + * Root directory containing all the files generated by this task. It is + * contained within testRoot. + */ + @OutputDirectory + File getOutputRoot() { + return new File(testRoot.get().getAsFile(), "/rest-api-spec/test"); + } + + @OutputDirectory + DirectoryProperty getTestRoot() { + return testRoot; + } + + @Inject + public RestTestsFromDocSnippetTask(ObjectFactory objectFactory) { + testRoot = objectFactory.directoryProperty(); + TestBuilder builder = new TestBuilder(); + + setPerSnippet(snippet -> builder.handleSnippet(snippet)); + doLast(task -> { + builder.finishLastTest(); + builder.checkUnconverted(); + }); + } + + /** + * Certain requests should not have the shard failure check because the + * format of the response is incompatible i.e. it is not a JSON object. + */ + static boolean shouldAddShardFailureCheck(String path) { + return path.startsWith("_cat") == false && path.startsWith("_ml/datafeeds/") == false; + } + + /** + * Converts Kibana's block quoted strings into standard JSON. These + * {@code """} delimited strings can be embedded in CONSOLE and can + * contain newlines and {@code "} without the normal JSON escaping. + * This has to add it. + */ + @PackageScope + static String replaceBlockQuote(String body) { + int start = body.indexOf("\"\"\""); + if (start < 0) { + return body; + } + /* + * 1.3 is a fairly wild guess of the extra space needed to hold + * the escaped string. + */ + StringBuilder result = new StringBuilder((int) (body.length() * 1.3)); + int startOfNormal = 0; + while (start >= 0) { + int end = body.indexOf("\"\"\"", start + 3); + if (end < 0) { + throw new InvalidUserDataException("Invalid block quote starting at " + start + " in:\n" + body); + } + result.append(body.substring(startOfNormal, start)); + result.append('"'); + result.append(body.substring(start + 3, end).replace("\"", "\\\"").replace("\n", "\\n")); + result.append('"'); + startOfNormal = end + 3; + start = body.indexOf("\"\"\"", startOfNormal); + } + result.append(body.substring(startOfNormal)); + return result.toString(); + } + + private class TestBuilder { + /** + * These languages aren't supported by the syntax highlighter so we + * shouldn't use them. + */ + private static final List BAD_LANGUAGES = List.of("json", "javascript"); + + String method = "(?GET|PUT|POST|HEAD|OPTIONS|DELETE)"; + String pathAndQuery = "(?[^\\n]+)"; + + String badBody = "GET|PUT|POST|HEAD|OPTIONS|DELETE|startyaml|#"; + String body = "(?(?:\\n(?!" + badBody + ")[^\\n]+)+)"; + + String rawRequest = "(?:" + method + "\\s+" + pathAndQuery + body + "?)"; + + String yamlRequest = "(?:startyaml(?s)(?.+?)(?-s)endyaml)"; + String nonComment = "(?:" + rawRequest + "|" + yamlRequest + ")"; + String comment = "(?#.+)"; + + String SYNTAX = "(?:" + comment + "|" + nonComment + ")\\n+"; + + /** + * Files containing all snippets that *probably* should be converted + * to `// CONSOLE` but have yet to be converted. All files are paths + * relative to the docs dir. + */ + private Set unconvertedCandidates = new HashSet<>(); + + /** + * The last non-TESTRESPONSE snippet. + */ + Snippet previousTest; + + /** + * The file in which we saw the last snippet that made a test. + */ + Path lastDocsPath; + + /** + * The file we're building. + */ + PrintWriter current; + + Set names = new HashSet<>(); + + /** + * Called each time a snippet is encountered. Tracks the snippets and + * calls buildTest to actually build the test. + */ + public void handleSnippet(Snippet snippet) { + if (snippet.isConsoleCandidate()) { + unconvertedCandidates.add(snippet.path.toString().replace('\\', '/')); + } + if (BAD_LANGUAGES.contains(snippet.language)) { + throw new InvalidUserDataException(snippet + ": Use `js` instead of `" + snippet.language + "`."); + } + if (snippet.testSetup) { + testSetup(snippet); + previousTest = snippet; + return; + } + if (snippet.testTearDown) { + testTearDown(snippet); + previousTest = snippet; + return; + } + if (snippet.testResponse || snippet.language.equals("console-result")) { + if (previousTest == null) { + throw new InvalidUserDataException(snippet + ": No paired previous test"); + } + if (previousTest.path.equals(snippet.path) == false) { + throw new InvalidUserDataException(snippet + ": Result can't be first in file"); + } + response(snippet); + return; + } + if (("js".equals(snippet.language)) && snippet.console != null && snippet.console) { + throw new InvalidUserDataException(snippet + ": Use `[source,console]` instead of `// CONSOLE`."); + } + if (snippet.test || snippet.language.equals("console")) { + test(snippet); + previousTest = snippet; + return; + } + // Must be an unmarked snippet.... + } + + private void test(Snippet test) { + setupCurrent(test); + + if (test.continued) { + /* Catch some difficult to debug errors with // TEST[continued] + * and throw a helpful error message. */ + if (previousTest == null || previousTest.path.equals(test.path) == false) { + throw new InvalidUserDataException("// TEST[continued] " + "cannot be on first snippet in a file: " + test); + } + if (previousTest != null && previousTest.testSetup) { + throw new InvalidUserDataException("// TEST[continued] " + "cannot immediately follow // TESTSETUP: " + test); + } + if (previousTest != null && previousTest.testTearDown) { + throw new InvalidUserDataException("// TEST[continued] " + "cannot immediately follow // TEARDOWN: " + test); + } + } else { + current.println("---"); + if (test.name != null && test.name.isBlank() == false) { + if (names.add(test.name) == false) { + throw new InvalidUserDataException("Duplicated snippet name '" + test.name + "': " + test); + } + current.println("\"" + test.name + "\":"); + } else { + current.println("\"line_" + test.start + "\":"); + } + /* The Elasticsearch test runner doesn't support quite a few + * constructs unless we output this skip. We don't know if + * we're going to use these constructs, but we might so we + * output the skip just in case. */ + current.println(" - skip:"); + current.println(" features:"); + current.println(" - default_shards"); + current.println(" - stash_in_key"); + current.println(" - stash_in_path"); + current.println(" - stash_path_replace"); + current.println(" - warnings"); + } + if (test.skip != null) { + if (test.continued) { + throw new InvalidUserDataException("Continued snippets " + "can't be skipped"); + } + current.println(" - always_skip"); + current.println(" reason: " + test.skip); + } + if (test.setup != null) { + setup(test); + } + + body(test, false); + + if (test.teardown != null) { + teardown(test); + } + } + + private void response(Snippet response) { + if (null == response.skip) { + current.println(" - match:"); + current.println(" $body:"); + replaceBlockQuote(response.contents).lines().forEach(line -> current.println(" " + line)); + } + } + + private void teardown(final Snippet snippet) { + // insert a teardown defined outside of the docs + for (final String name : snippet.teardown.split(",")) { + final String teardown = teardowns.get(name); + if (teardown == null) { + throw new InvalidUserDataException("Couldn't find named teardown $name for " + snippet); + } + current.println("# Named teardown " + name); + current.println(teardown); + } + } + + private void testTearDown(Snippet snippet) { + if (previousTest != null && previousTest.testSetup == false && lastDocsPath == snippet.path) { + throw new InvalidUserDataException(snippet + " must follow test setup or be first"); + } + setupCurrent(snippet); + current.println("---"); + current.println("teardown:"); + body(snippet, true); + } + + void emitDo( + String method, + String pathAndQuery, + String body, + String catchPart, + List warnings, + boolean inSetup, + boolean skipShardFailures + ) { + String[] tokenized = pathAndQuery.split("\\?"); + String path = tokenized[0]; + String query = tokenized.length > 1 ? tokenized[1] : null; + if (path == null) { + path = ""; // Catch requests to the root... + } else { + path = path.replace("<", "%3C").replace(">", "%3E"); + } + current.println(" - do:"); + if (catchPart != null) { + current.println(" catch: " + catchPart); + } + if (false == warnings.isEmpty()) { + current.println(" warnings:"); + for (String warning : warnings) { + // Escape " because we're going to quote the warning + String escaped = warning.replaceAll("\"", "\\\\\""); + /* Quote the warning in case it starts with [ which makes + * it look too much like an array. */ + current.println(" - \"" + escaped + "\""); + } + } + current.println(" raw:"); + current.println(" method: " + method); + current.println(" path: \"" + path + "\""); + if (query != null) { + for (String param : query.split("&")) { + String[] tokenizedQuery = param.split("="); + String paramName = tokenizedQuery[0]; + String paramValue = tokenizedQuery.length > 1 ? tokenizedQuery[1] : null; + if (paramValue == null) { + paramValue = ""; + } + current.println(" " + paramName + ": \"" + paramValue + "\""); + } + } + if (body != null) { + // Throw out the leading newline we get from parsing the body + body = body.substring(1); + // Replace """ quoted strings with valid json ones + body = replaceBlockQuote(body); + current.println(" body: |"); + body.lines().forEach(line -> current.println(" " + line)); + } + /* Catch any shard failures. These only cause a non-200 response if + * no shard succeeds. But we need to fail the tests on all of these + * because they mean invalid syntax or broken queries or something + * else that we don't want to teach people to do. The REST test + * framework doesn't allow us to have assertions in the setup + * section so we have to skip it there. We also omit the assertion + * from APIs that don't return a JSON object + */ + if (false == inSetup && skipShardFailures == false && shouldAddShardFailureCheck(path)) { + current.println(" - is_false: _shards.failures"); + } + } + + private void body(Snippet snippet, boolean inSetup) { + ParsingUtils.parse(snippet.getLocation(), snippet.contents, SYNTAX, (matcher, last) -> { + if (matcher.group("comment") != null) { + // Comment + return; + } + String yamlRequest = matcher.group("yaml"); + if (yamlRequest != null) { + current.println(yamlRequest); + return; + } + String method = matcher.group("method"); + String pathAndQuery = matcher.group("pathAndQuery"); + String body = matcher.group("body"); + String catchPart = last ? snippet.catchPart : null; + if (pathAndQuery.startsWith("/")) { + // Leading '/'s break the generated paths + pathAndQuery = pathAndQuery.substring(1); + } + emitDo(method, pathAndQuery, body, catchPart, snippet.warnings, inSetup, snippet.skipShardsFailures); + }); + + } + + private PrintWriter setupCurrent(Snippet test) { + if (test.path.equals(lastDocsPath)) { + return current; + } + names.clear(); + finishLastTest(); + lastDocsPath = test.path; + + // Make the destination file: + // Shift the path into the destination directory tree + Path dest = getOutputRoot().toPath().resolve(test.path); + // Replace the extension + String fileName = dest.getName(dest.getNameCount() - 1).toString(); + dest = dest.getParent().resolve(fileName.replace(".asciidoc", ".yml")); + + // Now setup the writer + try { + Files.createDirectories(dest.getParent()); + current = new PrintWriter(dest.toFile(), "UTF-8"); + return current; + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + private void testSetup(Snippet snippet) { + if (lastDocsPath == snippet.path) { + throw new InvalidUserDataException( + snippet + ": wasn't first. TESTSETUP can only be used in the first snippet of a document." + ); + } + setupCurrent(snippet); + current.println("---"); + current.println("setup:"); + if (snippet.setup != null) { + setup(snippet); + } + body(snippet, true); + } + + private void setup(final Snippet snippet) { + // insert a setup defined outside of the docs + for (final String name : snippet.setup.split(",")) { + final String setup = setups.get(name); + if (setup == null) { + throw new InvalidUserDataException("Couldn't find named setup " + name + " for " + snippet); + } + current.println("# Named setup " + name); + current.println(setup); + } + } + + public void checkUnconverted() { + List listedButNotFound = new ArrayList<>(); + for (String listed : expectedUnconvertedCandidates) { + if (false == unconvertedCandidates.remove(listed)) { + listedButNotFound.add(listed); + } + } + String message = ""; + if (false == listedButNotFound.isEmpty()) { + Collections.sort(listedButNotFound); + listedButNotFound = listedButNotFound.stream().map(notfound -> " " + notfound).collect(Collectors.toList()); + message += "Expected unconverted snippets but none found in:\n"; + message += listedButNotFound.stream().collect(Collectors.joining("\n")); + } + if (false == unconvertedCandidates.isEmpty()) { + List foundButNotListed = new ArrayList<>(unconvertedCandidates); + Collections.sort(foundButNotListed); + foundButNotListed = foundButNotListed.stream().map(f -> " " + f).collect(Collectors.toList()); + if (false == "".equals(message)) { + message += "\n"; + } + message += "Unexpected unconverted snippets:\n"; + message += foundButNotListed.stream().collect(Collectors.joining("\n")); + } + if (false == "".equals(message)) { + throw new InvalidUserDataException(message); + } + } + + public void finishLastTest() { + if (current != null) { + current.close(); + current = null; + } + } + } + +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/Snippet.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/Snippet.java new file mode 100644 index 0000000000000..b8aa864734f44 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/Snippet.java @@ -0,0 +1,188 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc; + +import com.fasterxml.jackson.core.JsonFactory; +import com.fasterxml.jackson.core.JsonParseException; +import com.fasterxml.jackson.core.JsonParser; + +import org.gradle.api.InvalidUserDataException; + +import java.io.IOException; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.List; + +public class Snippet { + static final int NOT_FINISHED = -1; + + /** + * Path to the file containing this snippet. Relative to docs.dir of the + * SnippetsTask that created it. + */ + Path path; + int start; + int end = NOT_FINISHED; + public String contents; + + Boolean console = null; + boolean test = false; + boolean testResponse = false; + boolean testSetup = false; + boolean testTearDown = false; + String skip = null; + boolean continued = false; + String language = null; + String catchPart = null; + String setup = null; + String teardown = null; + boolean curl; + List warnings = new ArrayList(); + boolean skipShardsFailures = false; + String name; + + public Snippet(Path path, int start, String name) { + this.path = path; + this.start = start; + this.name = name; + } + + public void validate() { + if (language == null) { + throw new InvalidUserDataException( + name + + ": " + + "Snippet missing a language. This is required by " + + "Elasticsearch's doc testing infrastructure so we " + + "be sure we don't accidentally forget to test a " + + "snippet." + ); + } + assertValidCurlInput(); + assertValidJsonInput(); + } + + String getLocation() { + return path + "[" + start + ":" + end + "]"; + } + + private void assertValidCurlInput() { + // Try to detect snippets that contain `curl` + if ("sh".equals(language) || "shell".equals(language)) { + curl = contents.contains("curl"); + if (console == Boolean.FALSE && curl == false) { + throw new InvalidUserDataException(name + ": " + "No need for NOTCONSOLE if snippet doesn't " + "contain `curl`."); + } + } + } + + private void assertValidJsonInput() { + if (testResponse && ("js" == language || "console-result" == language) && null == skip) { + String quoted = contents + // quote values starting with $ + .replaceAll("([:,])\\s*(\\$[^ ,\\n}]+)", "$1 \"$2\"") + // quote fields starting with $ + .replaceAll("(\\$[^ ,\\n}]+)\\s*:", "\"$1\":"); + + JsonFactory jf = new JsonFactory(); + jf.configure(JsonParser.Feature.ALLOW_BACKSLASH_ESCAPING_ANY_CHARACTER, true); + JsonParser jsonParser; + + try { + jsonParser = jf.createParser(quoted); + while (jsonParser.isClosed() == false) { + jsonParser.nextToken(); + } + } catch (JsonParseException e) { + throw new InvalidUserDataException( + "Invalid json in " + + name + + ". The error is:\n" + + e.getMessage() + + ".\n" + + "After substitutions and munging, the json looks like:\n" + + quoted, + e + ); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + } + + @Override + public String toString() { + String result = path + "[" + start + ":" + end + "]"; + if (language != null) { + result += "(" + language + ")"; + } + if (console != null) { + result += console ? "// CONSOLE" : "// NOTCONSOLE"; + } + if (test) { + result += "// TEST"; + if (catchPart != null) { + result += "[catch: " + catchPart + "]"; + } + if (skip != null) { + result += "[skip=" + skip + "]"; + } + if (continued) { + result += "[continued]"; + } + if (setup != null) { + result += "[setup:" + setup + "]"; + } + if (teardown != null) { + result += "[teardown:" + teardown + "]"; + } + for (String warning : warnings) { + result += "[warning:" + warning + "]"; + } + if (skipShardsFailures) { + result += "[skip_shard_failures]"; + } + } + if (testResponse) { + result += "// TESTRESPONSE"; + if (skip != null) { + result += "[skip=" + skip + "]"; + } + } + if (testSetup) { + result += "// TESTSETUP"; + } + if (curl) { + result += "(curl)"; + } + return result; + } + + /** + * Is this snippet a candidate for conversion to `// CONSOLE`? + */ + boolean isConsoleCandidate() { + /* Snippets that are responses or already marked as `// CONSOLE` or + * `// NOTCONSOLE` are not candidates. */ + if (console != null || testResponse) { + return false; + } + /* js snippets almost always should be marked with `// CONSOLE`. js + * snippets that shouldn't be marked `// CONSOLE`, like examples for + * js client, should always be marked with `// NOTCONSOLE`. + * + * `sh` snippets that contain `curl` almost always should be marked + * with `// CONSOLE`. In the exceptionally rare cases where they are + * not communicating with Elasticsearch, like the examples in the ec2 + * and gce discovery plugins, the snippets should be marked + * `// NOTCONSOLE`. */ + return language.equals("js") || curl; + } + +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/SnippetParser.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/SnippetParser.java new file mode 100644 index 0000000000000..064c1c460febf --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/SnippetParser.java @@ -0,0 +1,17 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc; + +import java.io.File; +import java.util.List; +import java.util.Map; + +public interface SnippetParser { + List parseDoc(File rootDir, File docFile, List> substitutions); +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/Source.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/Source.java new file mode 100644 index 0000000000000..b7f2f01aa7987 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/Source.java @@ -0,0 +1,21 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc; + +public final class Source { + boolean matches; + String language; + String name; + + public Source(boolean matches, String language, String name) { + this.matches = matches; + this.language = language; + this.name = name; + } +} diff --git a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/AsciidocParserSpec.groovy b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/AsciidocParserSpec.groovy new file mode 100644 index 0000000000000..b7ac363ef7ad3 --- /dev/null +++ b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/AsciidocParserSpec.groovy @@ -0,0 +1,184 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc; + +import spock.lang.Specification +import spock.lang.Unroll + +import org.gradle.api.InvalidUserDataException + +import static org.elasticsearch.gradle.internal.doc.AsciidocSnippetParser.finalizeSnippet; +import static org.elasticsearch.gradle.internal.doc.AsciidocSnippetParser.matchSource; + +class AsciidocParserSpec extends Specification { + + def testMatchSource() { + expect: + with(matchSource("[source,console]")) { + matches == true + language == "console" + name == null + } + + with(matchSource("[source,console,id=snippet-name-1]")) { + matches == true + language == "console" + name == "snippet-name-1" + } + + with(matchSource("[source, console, id=snippet-name-1]")) { + matches == true + language == "console" + name == "snippet-name-1" + } + + with(matchSource("[source, console, id=snippet-name-1]")) { + matches == true + language == "console" + name == "snippet-name-1" + } + + with(matchSource("[source,console,attr=5,id=snippet-name-1,attr2=6]")) { + matches == true + language == "console" + name == "snippet-name-1" + } + + with(matchSource("[source,console, attr=5, id=snippet-name-1, attr2=6]")) { + matches == true + language == "console" + name == "snippet-name-1" + } + + with(matchSource("[\"source\",\"console\",id=\"snippet-name-1\"]")) { + matches == true + language == "console" + name == "snippet-name-1" + } + + with(matchSource("[source,console,id=\"snippet-name-1\"]")) { + matches == true + language == "console" + name == "snippet-name-1" + } + with(matchSource("[source.merge.styled,esql]")) { + matches == true + language == "esql" + } + + with(matchSource("[source.merge.styled,foo-bar]")) { + matches == true + language == "foo-bar" + } + } + + @Unroll + def "checks for valid json for #languageParam"() { + given: + def snippet = snippet() { + language = languageParam + testResponse = true + } + def json = """{ + "name": "John Doe", + "age": 30, + "isMarried": true, + "address": { + "street": "123 Main Street", + "city": "Springfield", + "state": "IL", + "zip": "62701" + }, + "hobbies": ["Reading", "Cooking", "Traveling"] +}""" + when: + def result = finalizeSnippet(snippet, json, [:], [:].entrySet()) + then: + result != null + + when: + finalizeSnippet(snippet, "some no valid json", [:], [:].entrySet()) + then: + def e = thrown(InvalidUserDataException) + e.message.contains("Invalid json in") + + when: + snippet.skip = "true" + result = finalizeSnippet(snippet, "some no valid json", [:], [:].entrySet()) + then: + result != null + + where: + languageParam << ["js", "console-result"] + } + + def "test finalized snippet handles substitutions"() { + given: + def snippet = snippet() { + language = "console" + } + when: + finalizeSnippet(snippet, "snippet-content substDefault subst", [substDefault: "\$body"], [subst: 'substValue'].entrySet()) + then: + snippet.contents == "snippet-content \$body substValue" + } + + def snippetMustHaveLanguage() { + given: + def snippet = snippet() + when: + finalizeSnippet(snippet, "snippet-content", [:], []) + then: + def e = thrown(InvalidUserDataException) + e.message.contains("Snippet missing a language.") + } + + def testEmit() { + given: + def snippet = snippet() { + language = "console" + } + when: + finalizeSnippet(snippet, "snippet-content", [:], []) + then: + snippet.contents == "snippet-content" + } + + def testSnippetsWithCurl() { + given: + def snippet = snippet() { + language = "sh" + name = "snippet-name-1" + } + when: + finalizeSnippet(snippet, "curl substDefault subst", [:], [:].entrySet()) + then: + snippet.curl == true + } + + def "test snippets with no curl no console"() { + given: + def snippet = snippet() { + console = false + language = "shell" + } + when: + finalizeSnippet(snippet, "hello substDefault subst", [:], [:].entrySet()) + then: + def e = thrown(InvalidUserDataException) + e.message.contains("No need for NOTCONSOLE if snippet doesn't contain `curl`") + } + + Snippet snippet(Closure configClosure = {}) { + def snippet = new Snippet(new File("SomePath").toPath(), 0, "snippet-name-1") + configClosure.delegate = snippet + configClosure() + return snippet + } +} diff --git a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/DocSnippetTaskSpec.groovy b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/DocSnippetTaskSpec.groovy new file mode 100644 index 0000000000000..89939645d0f9c --- /dev/null +++ b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/DocSnippetTaskSpec.groovy @@ -0,0 +1,676 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc + +import spock.lang.Specification +import spock.lang.TempDir + +import org.gradle.api.InvalidUserDataException +import org.gradle.testfixtures.ProjectBuilder + +class DocSnippetTaskSpec extends Specification { + + @TempDir + File tempDir + + def "handling test parsing multiple snippets per file"() { + given: + def project = ProjectBuilder.builder().build() + def task = project.tasks.register("docSnippetTask", DocSnippetTask).get() + when: + def substitutions = [] + def snippets = task.parseDocFile( + tempDir, docFile( + """ +[[mapper-annotated-text]] +=== Mapper annotated text plugin + +experimental[] + +The mapper-annotated-text plugin provides the ability to index text that is a +combination of free-text and special markup that is typically used to identify +items of interest such as people or organisations (see NER or Named Entity Recognition +tools). + + +The elasticsearch markup allows one or more additional tokens to be injected, unchanged, into the token +stream at the same position as the underlying text it annotates. + +:plugin_name: mapper-annotated-text +include::install_remove.asciidoc[] + +[[mapper-annotated-text-usage]] +==== Using the `annotated-text` field + +The `annotated-text` tokenizes text content as per the more common {ref}/text.html[`text`] field (see +"limitations" below) but also injects any marked-up annotation tokens directly into +the search index: + +[source,console] +-------------------------- +PUT my-index-000001 +{ + "mappings": { + "properties": { + "my_field": { + "type": "annotated_text" + } + } + } +} +-------------------------- + +Such a mapping would allow marked-up text eg wikipedia articles to be indexed as both text +and structured tokens. The annotations use a markdown-like syntax using URL encoding of +one or more values separated by the `&` symbol. + + +We can use the "_analyze" api to test how an example annotation would be stored as tokens +in the search index: + + +[source,js] +-------------------------- +GET my-index-000001/_analyze +{ + "field": "my_field", + "text":"Investors in [Apple](Apple+Inc.) rejoiced." +} +-------------------------- +// NOTCONSOLE + +Response: + +[source,js] +-------------------------------------------------- +{ + "tokens": [ + { + "token": "investors", + "start_offset": 0, + "end_offset": 9, + "type": "", + "position": 0 + }, + { + "token": "in", + "start_offset": 10, + "end_offset": 12, + "type": "", + "position": 1 + }, + { + "token": "Apple Inc.", <1> + "start_offset": 13, + "end_offset": 18, + "type": "annotation", + "position": 2 + }, + { + "token": "apple", + "start_offset": 13, + "end_offset": 18, + "type": "", + "position": 2 + }, + { + "token": "rejoiced", + "start_offset": 19, + "end_offset": 27, + "type": "", + "position": 3 + } + ] +} +-------------------------------------------------- +// NOTCONSOLE + +<1> Note the whole annotation token `Apple Inc.` is placed, unchanged as a single token in +the token stream and at the same position (position 2) as the text token (`apple`) it annotates. + + +We can now perform searches for annotations using regular `term` queries that don't tokenize +the provided search values. Annotations are a more precise way of matching as can be seen +in this example where a search for `Beck` will not match `Jeff Beck` : + +[source,console] +-------------------------- +# Example documents +PUT my-index-000001/_doc/1 +{ + "my_field": "[Beck](Beck) announced a new tour"<1> +} + +PUT my-index-000001/_doc/2 +{ + "my_field": "[Jeff Beck](Jeff+Beck&Guitarist) plays a strat"<2> +} + +# Example search +GET my-index-000001/_search +{ + "query": { + "term": { + "my_field": "Beck" <3> + } + } +} +-------------------------- + +<1> As well as tokenising the plain text into single words e.g. `beck`, here we +inject the single token value `Beck` at the same position as `beck` in the token stream. +<2> Note annotations can inject multiple tokens at the same position - here we inject both +the very specific value `Jeff Beck` and the broader term `Guitarist`. This enables +broader positional queries e.g. finding mentions of a `Guitarist` near to `strat`. +<3> A benefit of searching with these carefully defined annotation tokens is that a query for +`Beck` will not match document 2 that contains the tokens `jeff`, `beck` and `Jeff Beck` + +WARNING: Any use of `=` signs in annotation values eg `[Prince](person=Prince)` will +cause the document to be rejected with a parse failure. In future we hope to have a use for +the equals signs so wil actively reject documents that contain this today. + + +[[mapper-annotated-text-tips]] +==== Data modelling tips +===== Use structured and unstructured fields + +Annotations are normally a way of weaving structured information into unstructured text for +higher-precision search. + +`Entity resolution` is a form of document enrichment undertaken by specialist software or people +where references to entities in a document are disambiguated by attaching a canonical ID. +The ID is used to resolve any number of aliases or distinguish between people with the +same name. The hyperlinks connecting Wikipedia's articles are a good example of resolved +entity IDs woven into text. + +These IDs can be embedded as annotations in an annotated_text field but it often makes +sense to include them in dedicated structured fields to support discovery via aggregations: + +[source,console] +-------------------------- +PUT my-index-000001 +{ + "mappings": { + "properties": { + "my_unstructured_text_field": { + "type": "annotated_text" + }, + "my_structured_people_field": { + "type": "text", + "fields": { + "keyword" : { + "type": "keyword" + } + } + } + } + } +} +-------------------------- + +Applications would then typically provide content and discover it as follows: + +[source,console] +-------------------------- +# Example documents +PUT my-index-000001/_doc/1 +{ + "my_unstructured_text_field": "[Shay](%40kimchy) created elasticsearch", + "my_twitter_handles": ["@kimchy"] <1> +} + +GET my-index-000001/_search +{ + "query": { + "query_string": { + "query": "elasticsearch OR logstash OR kibana",<2> + "default_field": "my_unstructured_text_field" + } + }, + "aggregations": { + \t"top_people" :{ + \t "significant_terms" : { <3> +\t "field" : "my_twitter_handles.keyword" + \t } + \t} + } +} +-------------------------- + +<1> Note the `my_twitter_handles` contains a list of the annotation values +also used in the unstructured text. (Note the annotated_text syntax requires escaping). +By repeating the annotation values in a structured field this application has ensured that +the tokens discovered in the structured field can be used for search and highlighting +in the unstructured field. +<2> In this example we search for documents that talk about components of the elastic stack +<3> We use the `my_twitter_handles` field here to discover people who are significantly +associated with the elastic stack. + +===== Avoiding over-matching annotations +By design, the regular text tokens and the annotation tokens co-exist in the same indexed +field but in rare cases this can lead to some over-matching. + +The value of an annotation often denotes a _named entity_ (a person, place or company). +The tokens for these named entities are inserted untokenized, and differ from typical text +tokens because they are normally: + +* Mixed case e.g. `Madonna` +* Multiple words e.g. `Jeff Beck` +* Can have punctuation or numbers e.g. `Apple Inc.` or `@kimchy` + +This means, for the most part, a search for a named entity in the annotated text field will +not have any false positives e.g. when selecting `Apple Inc.` from an aggregation result +you can drill down to highlight uses in the text without "over matching" on any text tokens +like the word `apple` in this context: + + the apple was very juicy + +However, a problem arises if your named entity happens to be a single term and lower-case e.g. the +company `elastic`. In this case, a search on the annotated text field for the token `elastic` +may match a text document such as this: + + they fired an elastic band + +To avoid such false matches users should consider prefixing annotation values to ensure +they don't name clash with text tokens e.g. + + [elastic](Company_elastic) released version 7.0 of the elastic stack today + + + + +[[mapper-annotated-text-highlighter]] +==== Using the `annotated` highlighter + +The `annotated-text` plugin includes a custom highlighter designed to mark up search hits +in a way which is respectful of the original markup: + +[source,console] +-------------------------- +# Example documents +PUT my-index-000001/_doc/1 +{ + "my_field": "The cat sat on the [mat](sku3578)" +} + +GET my-index-000001/_search +{ + "query": { + "query_string": { + "query": "cats" + } + }, + "highlight": { + "fields": { + "my_field": { + "type": "annotated", <1> + "require_field_match": false + } + } + } +} +-------------------------- + +<1> The `annotated` highlighter type is designed for use with annotated_text fields + +The annotated highlighter is based on the `unified` highlighter and supports the same +settings but does not use the `pre_tags` or `post_tags` parameters. Rather than using +html-like markup such as `cat` the annotated highlighter uses the same +markdown-like syntax used for annotations and injects a key=value annotation where `_hit_term` +is the key and the matched search term is the value e.g. + + The [cat](_hit_term=cat) sat on the [mat](sku3578) + +The annotated highlighter tries to be respectful of any existing markup in the original +text: + +* If the search term matches exactly the location of an existing annotation then the +`_hit_term` key is merged into the url-like syntax used in the `(...)` part of the +existing annotation. +* However, if the search term overlaps the span of an existing annotation it would break +the markup formatting so the original annotation is removed in favour of a new annotation +with just the search hit information in the results. +* Any non-overlapping annotations in the original text are preserved in highlighter +selections + + +[[mapper-annotated-text-limitations]] +==== Limitations + +The annotated_text field type supports the same mapping settings as the `text` field type +but with the following exceptions: + +* No support for `fielddata` or `fielddata_frequency_filter` +* No support for `index_prefixes` or `index_phrases` indexing + +""" + ), substitutions + ) + then: + snippets*.test == [false, false, false, false, false, false, false] + snippets*.catchPart == [null, null, null, null, null, null, null] + } + + def "handling test parsing"() { + when: + def substitutions = [] + def snippets = task().parseDocFile( + tempDir, docFile( + """ +[source,console] +---- +POST logs-my_app-default/_rollover/ +---- +// TEST[s/_explain\\/1/_explain\\/1?error_trace=false/ catch:/painless_explain_error/] +""" + ), substitutions + ) + then: + snippets*.test == [true] + snippets*.catchPart == ["/painless_explain_error/"] + substitutions.size() == 1 + substitutions[0].key == "_explain\\/1" + substitutions[0].value == "_explain\\/1?error_trace=false" + + when: + substitutions = [] + snippets = task().parseDocFile( + tempDir, docFile( + """ + +[source,console] +---- +PUT _snapshot/my_hdfs_repository +{ + "type": "hdfs", + "settings": { + "uri": "hdfs://namenode:8020/", + "path": "elasticsearch/repositories/my_hdfs_repository", + "conf.dfs.client.read.shortcircuit": "true" + } +} +---- +// TEST[skip:we don't have hdfs set up while testing this] +""" + ), substitutions + ) + then: + snippets*.test == [true] + snippets*.skip == ["we don't have hdfs set up while testing this"] + } + + def "handling testresponse parsing"() { + when: + def substitutions = [] + def snippets = task().parseDocFile( + tempDir, docFile( + """ +[source,console] +---- +POST logs-my_app-default/_rollover/ +---- +// TESTRESPONSE[s/\\.\\.\\./"script_stack": \$body.error.caused_by.script_stack, "script": \$body.error.caused_by.script, "lang": \$body.error.caused_by.lang, "position": \$body.error.caused_by.position, "caused_by": \$body.error.caused_by.caused_by, "reason": \$body.error.caused_by.reason/] +""" + ), substitutions + ) + then: + snippets*.test == [false] + snippets*.testResponse == [true] + substitutions.size() == 1 + substitutions[0].key == "\\.\\.\\." + substitutions[0].value == + "\"script_stack\": \$body.error.caused_by.script_stack, \"script\": \$body.error.caused_by.script, \"lang\": \$body.error.caused_by.lang, \"position\": \$body.error.caused_by.position, \"caused_by\": \$body.error.caused_by.caused_by, \"reason\": \$body.error.caused_by.reason" + + when: + snippets = task().parseDocFile( + tempDir, docFile( + """ +[source,console] +---- +POST logs-my_app-default/_rollover/ +---- +// TESTRESPONSE[skip:no setup made for this example yet] +""" + ), [] + ) + then: + snippets*.test == [false] + snippets*.testResponse == [true] + snippets*.skip == ["no setup made for this example yet"] + + when: + substitutions = [] + snippets = task().parseDocFile( + tempDir, docFile( + """ +[source,txt] +--------------------------------------------------------------------------- +my-index-000001 0 p RELOCATING 3014 31.1mb 192.168.56.10 H5dfFeA -> -> 192.168.56.30 bGG90GE +--------------------------------------------------------------------------- +// TESTRESPONSE[non_json] +""" + ), substitutions + ) + then: + snippets*.test == [false] + snippets*.testResponse == [true] + substitutions.size() == 4 + } + + + def "handling console parsing"() { + when: + def snippets = task().parseDocFile( + tempDir, docFile( + """ +[source,console] +---- + +// $firstToken +---- +""" + ), [] + ) + then: + snippets*.console == [firstToken.equals("CONSOLE")] + + + when: + task().parseDocFile( + tempDir, docFile( + """ +[source,console] +---- +// $firstToken +// $secondToken +---- +""" + ), [] + ) + then: + def e = thrown(InvalidUserDataException) + e.message == "mapping-charfilter.asciidoc:4: Can't be both CONSOLE and NOTCONSOLE" + + when: + task().parseDocFile( + tempDir, docFile( + """ +// $firstToken +// $secondToken +""" + ), [] + ) + then: + e = thrown(InvalidUserDataException) + e.message == "mapping-charfilter.asciidoc:1: $firstToken not paired with a snippet" + + where: + firstToken << ["CONSOLE", "NOTCONSOLE"] + secondToken << ["NOTCONSOLE", "CONSOLE"] + } + + def "test parsing snippet from doc"() { + def doc = docFile( + """ +[source,console] +---- +GET /_analyze +{ + "tokenizer": "keyword", + "char_filter": [ + { + "type": "mapping", + "mappings": [ + "٠ => 0", + "١ => 1", + "٢ => 2" + ] + } + ], + "text": "My license plate is ٢٥٠١٥" +} +---- +""" + ) + def snippets = task().parseDocFile(tempDir, doc, []) + expect: + snippets*.start == [3] + snippets*.language == ["console"] + snippets*.contents == ["""GET /_analyze +{ + "tokenizer": "keyword", + "char_filter": [ + { + "type": "mapping", + "mappings": [ + "٠ => 0", + "١ => 1", + "٢ => 2" + ] + } + ], + "text": "My license plate is ٢٥٠١٥" +} +"""] + } + + def "test parsing snippet from doc2"() { + given: + def doc = docFile( + """ +[role="xpack"] +[[ml-update-snapshot]] += Update model snapshots API +++++ +Update model snapshots +++++ + +Updates certain properties of a snapshot. + +[[ml-update-snapshot-request]] +== {api-request-title} + +`POST _ml/anomaly_detectors//model_snapshots//_update` + +[[ml-update-snapshot-prereqs]] +== {api-prereq-title} + +Requires the `manage_ml` cluster privilege. This privilege is included in the +`machine_learning_admin` built-in role. + +[[ml-update-snapshot-path-parms]] +== {api-path-parms-title} + +``:: +(Required, string) +include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] + +``:: +(Required, string) +include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=snapshot-id] + +[[ml-update-snapshot-request-body]] +== {api-request-body-title} + +The following properties can be updated after the model snapshot is created: + +`description`:: +(Optional, string) A description of the model snapshot. + +`retain`:: +(Optional, Boolean) +include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=retain] + + +[[ml-update-snapshot-example]] +== {api-examples-title} + +[source,console] +-------------------------------------------------- +POST +_ml/anomaly_detectors/it_ops_new_logs/model_snapshots/1491852978/_update +{ + "description": "Snapshot 1", + "retain": true +} +-------------------------------------------------- +// TEST[skip:todo] + +When the snapshot is updated, you receive the following results: +[source,js] +---- +{ + "acknowledged": true, + "model": { + "job_id": "it_ops_new_logs", + "timestamp": 1491852978000, + "description": "Snapshot 1", +... + "retain": true + } +} +---- +""" + ) + def snippets = task().parseDocFile(tempDir, doc, []) + expect: + snippets*.start == [50, 62] + snippets*.language == ["console", "js"] + snippets*.contents == ["""POST +_ml/anomaly_detectors/it_ops_new_logs/model_snapshots/1491852978/_update +{ + "description": "Snapshot 1", + "retain": true +} +""", """{ + "acknowledged": true, + "model": { + "job_id": "it_ops_new_logs", + "timestamp": 1491852978000, + "description": "Snapshot 1", +... + "retain": true + } +} +"""] + } + + + File docFile(String docContent) { + def file = tempDir.toPath().resolve("mapping-charfilter.asciidoc").toFile() + file.text = docContent + return file + } + + + private DocSnippetTask task() { + ProjectBuilder.builder().build().tasks.register("docSnippetTask", DocSnippetTask).get() + } + +} diff --git a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/RestTestsFromDocSnippetTaskSpec.groovy b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/RestTestsFromDocSnippetTaskSpec.groovy new file mode 100644 index 0000000000000..6ef4726e1578a --- /dev/null +++ b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/RestTestsFromDocSnippetTaskSpec.groovy @@ -0,0 +1,839 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc + +import spock.lang.Specification +import spock.lang.TempDir + +import org.gradle.api.InvalidUserDataException +import org.gradle.testfixtures.ProjectBuilder + +import static org.elasticsearch.gradle.internal.doc.RestTestsFromDocSnippetTask.replaceBlockQuote +import static org.elasticsearch.gradle.internal.doc.RestTestsFromDocSnippetTask.shouldAddShardFailureCheck + +class RestTestsFromDocSnippetTaskSpec extends Specification { + + @TempDir + File tempDir; + + def "test simple block quote"() { + expect: + replaceBlockQuote("\"foo\": \"\"\"bort baz\"\"\"") == "\"foo\": \"bort baz\"" + } + + def "test multiple block quotes"() { + expect: + replaceBlockQuote("\"foo\": \"\"\"bort baz\"\"\", \"bar\": \"\"\"other\"\"\"") == "\"foo\": \"bort baz\", \"bar\": \"other\"" + } + + def "test escaping in block quote"() { + expect: + replaceBlockQuote("\"foo\": \"\"\"bort\" baz\"\"\"") == "\"foo\": \"bort\\\" baz\"" + replaceBlockQuote("\"foo\": \"\"\"bort\n baz\"\"\"") == "\"foo\": \"bort\\n baz\"" + } + + def "test invalid block quotes"() { + given: + String input = "\"foo\": \"\"\"bar\""; + when: + RestTestsFromDocSnippetTask.replaceBlockQuote(input); + then: + def e = thrown(InvalidUserDataException) + e.message == "Invalid block quote starting at 7 in:\n" + input + } + + def "test is doc write request"() { + expect: + shouldAddShardFailureCheck("doc-index/_search") == true + shouldAddShardFailureCheck("_cat") == false + shouldAddShardFailureCheck("_ml/datafeeds/datafeed-id/_preview") == false + } + + def "can create rest tests from docs"() { + def build = ProjectBuilder.builder().build() + given: + def task = build.tasks.create("restTestFromSnippet", RestTestsFromDocSnippetTask) +// def task = build.tasks.create("restTestFromSnippet", RestTestsFromSnippetsTask) + task.expectedUnconvertedCandidates = ["ml-update-snapshot.asciidoc", "reference/security/authorization/run-as-privilege.asciidoc"] +// + docs() + task.docs = build.fileTree(new File(tempDir, "docs")) + task.testRoot.convention(build.getLayout().buildDirectory.dir("rest-tests")); + + when: + task.getActions().forEach { it.execute(task) } + def restSpec = new File(task.getTestRoot().get().getAsFile(), "rest-api-spec/test/painless-debugging.yml") + + then: + restSpec.exists() + restSpec.text == """--- +"line_22": + - skip: + features: + - default_shards + - stash_in_key + - stash_in_path + - stash_path_replace + - warnings + - do: + raw: + method: PUT + path: "hockey/_doc/1" + refresh: "" + body: | + {"first":"johnny","last":"gaudreau","goals":[9,27,1],"assists":[17,46,0],"gp":[26,82,1]} + - is_false: _shards.failures + - do: + catch: /painless_explain_error/ + raw: + method: POST + path: "hockey/_explain/1" + error_trace: "false" + body: | + { + "query": { + "script": { + "script": "Debug.explain(doc.goals)" + } + } + } + - is_false: _shards.failures + - match: + \$body: + { + "error": { + "type": "script_exception", + "to_string": "[1, 9, 27]", + "painless_class": "org.elasticsearch.index.fielddata.ScriptDocValues.Longs", + "java_class": "org.elasticsearch.index.fielddata.ScriptDocValues\$Longs", + "script_stack": \$body.error.script_stack, "script": \$body.error.script, "lang": \$body.error.lang, "position": \$body.error.position, "caused_by": \$body.error.caused_by, "root_cause": \$body.error.root_cause, "reason": \$body.error.reason + }, + "status": 400 + } + - do: + catch: /painless_explain_error/ + raw: + method: POST + path: "hockey/_update/1" + error_trace: "false" + body: | + { + "script": "Debug.explain(ctx._source)" + } + - is_false: _shards.failures + - match: + \$body: + { + "error" : { + "root_cause": \$body.error.root_cause, + "type": "illegal_argument_exception", + "reason": "failed to execute script", + "caused_by": { + "type": "script_exception", + "to_string": \$body.error.caused_by.to_string, + "painless_class": "java.util.LinkedHashMap", + "java_class": "java.util.LinkedHashMap", + "script_stack": \$body.error.caused_by.script_stack, "script": \$body.error.caused_by.script, "lang": \$body.error.caused_by.lang, "position": \$body.error.caused_by.position, "caused_by": \$body.error.caused_by.caused_by, "reason": \$body.error.caused_by.reason + } + }, + "status": 400 + } +""" + def restSpec2 = new File(task.testRoot.get().getAsFile(), "rest-api-spec/test/ml-update-snapshot.yml") + restSpec2.exists() + restSpec2.text == """--- +"line_50": + - skip: + features: + - default_shards + - stash_in_key + - stash_in_path + - stash_path_replace + - warnings + - always_skip + reason: todo + - do: + raw: + method: POST + path: "_ml/anomaly_detectors/it_ops_new_logs/model_snapshots/1491852978/_update" + body: | + { + "description": "Snapshot 1", + "retain": true + } + - is_false: _shards.failures +""" + def restSpec3 = new File(task.testRoot.get().getAsFile(), "rest-api-spec/test/reference/sql/getting-started.yml") + restSpec3.exists() + restSpec3.text == """--- +"line_10": + - skip: + features: + - default_shards + - stash_in_key + - stash_in_path + - stash_path_replace + - warnings + - do: + raw: + method: PUT + path: "library/_bulk" + refresh: "" + body: | + {"index":{"_id": "Leviathan Wakes"}} + {"name": "Leviathan Wakes", "author": "James S.A. Corey", "release_date": "2011-06-02", "page_count": 561} + {"index":{"_id": "Hyperion"}} + {"name": "Hyperion", "author": "Dan Simmons", "release_date": "1989-05-26", "page_count": 482} + {"index":{"_id": "Dune"}} + {"name": "Dune", "author": "Frank Herbert", "release_date": "1965-06-01", "page_count": 604} + - is_false: _shards.failures + - do: + raw: + method: POST + path: "_sql" + format: "txt" + body: | + { + "query": "SELECT * FROM library WHERE release_date < '2000-01-01'" + } + - is_false: _shards.failures + - match: + \$body: + / \\s+author \\s+\\| \\s+name \\s+\\| \\s+page_count \\s+\\| \\s+release_date\\s* + ---------------\\+---------------\\+---------------\\+------------------------\\s* + Dan \\s+Simmons \\s+\\|Hyperion \\s+\\|482 \\s+\\|1989-05-26T00:00:00.000Z\\s* + Frank \\s+Herbert \\s+\\|Dune \\s+\\|604 \\s+\\|1965-06-01T00:00:00.000Z\\s*/ +""" + + def restSpec4 = new File(task.testRoot.get().getAsFile(), "rest-api-spec/test/reference/security/authorization/run-as-privilege.yml") + restSpec4.exists() + restSpec4.text == """--- +"line_51": + - skip: + features: + - default_shards + - stash_in_key + - stash_in_path + - stash_path_replace + - warnings + - do: + raw: + method: POST + path: "_security/role/my_director" + refresh: "true" + body: | + { + "cluster": ["manage"], + "indices": [ + { + "names": [ "index1", "index2" ], + "privileges": [ "manage" ] + } + ], + "run_as": [ "jacknich", "rdeniro" ], + "metadata" : { + "version" : 1 + } + } + - is_false: _shards.failures +--- +"line_114": + - skip: + features: + - default_shards + - stash_in_key + - stash_in_path + - stash_path_replace + - warnings + - do: + raw: + method: POST + path: "_security/role/my_admin_role" + refresh: "true" + body: | + { + "cluster": ["manage"], + "indices": [ + { + "names": [ "index1", "index2" ], + "privileges": [ "manage" ] + } + ], + "applications": [ + { + "application": "myapp", + "privileges": [ "admin", "read" ], + "resources": [ "*" ] + } + ], + "run_as": [ "analyst_user" ], + "metadata" : { + "version" : 1 + } + } + - is_false: _shards.failures +--- +"line_143": + - skip: + features: + - default_shards + - stash_in_key + - stash_in_path + - stash_path_replace + - warnings + - do: + raw: + method: POST + path: "_security/role/my_analyst_role" + refresh: "true" + body: | + { + "cluster": [ "monitor"], + "indices": [ + { + "names": [ "index1", "index2" ], + "privileges": ["manage"] + } + ], + "applications": [ + { + "application": "myapp", + "privileges": [ "read" ], + "resources": [ "*" ] + } + ], + "metadata" : { + "version" : 1 + } + } + - is_false: _shards.failures +--- +"line_170": + - skip: + features: + - default_shards + - stash_in_key + - stash_in_path + - stash_path_replace + - warnings + - do: + raw: + method: POST + path: "_security/user/admin_user" + refresh: "true" + body: | + { + "password": "l0ng-r4nd0m-p@ssw0rd", + "roles": [ "my_admin_role" ], + "full_name": "Eirian Zola", + "metadata": { "intelligence" : 7} + } + - is_false: _shards.failures +--- +"line_184": + - skip: + features: + - default_shards + - stash_in_key + - stash_in_path + - stash_path_replace + - warnings + - do: + raw: + method: POST + path: "_security/user/analyst_user" + refresh: "true" + body: | + { + "password": "l0nger-r4nd0mer-p@ssw0rd", + "roles": [ "my_analyst_role" ], + "full_name": "Monday Jaffe", + "metadata": { "innovation" : 8} + } + - is_false: _shards.failures +""" +} + + File docFile(String fileName, String docContent) { + def file = tempDir.toPath().resolve(fileName).toFile() + file.parentFile.mkdirs() + file.text = docContent + return file + } + + + void docs() { + docFile( + "docs/reference/sql/getting-started.asciidoc", """ +[role="xpack"] +[[sql-getting-started]] +== Getting Started with SQL + +To start using {es-sql}, create +an index with some data to experiment with: + +[source,console] +-------------------------------------------------- +PUT /library/_bulk?refresh +{"index":{"_id": "Leviathan Wakes"}} +{"name": "Leviathan Wakes", "author": "James S.A. Corey", "release_date": "2011-06-02", "page_count": 561} +{"index":{"_id": "Hyperion"}} +{"name": "Hyperion", "author": "Dan Simmons", "release_date": "1989-05-26", "page_count": 482} +{"index":{"_id": "Dune"}} +{"name": "Dune", "author": "Frank Herbert", "release_date": "1965-06-01", "page_count": 604} +-------------------------------------------------- + +And now you can execute SQL using the <>: + +[source,console] +-------------------------------------------------- +POST /_sql?format=txt +{ + "query": "SELECT * FROM library WHERE release_date < '2000-01-01'" +} +-------------------------------------------------- +// TEST[continued] + +Which should return something along the lines of: + +[source,text] +-------------------------------------------------- + author | name | page_count | release_date +---------------+---------------+---------------+------------------------ +Dan Simmons |Hyperion |482 |1989-05-26T00:00:00.000Z +Frank Herbert |Dune |604 |1965-06-01T00:00:00.000Z +-------------------------------------------------- +// TESTRESPONSE[s/\\|/\\\\|/ s/\\+/\\\\+/] +// TESTRESPONSE[non_json] + +You can also use the <>. There is a script to start it +shipped in x-pack's bin directory: + +[source,bash] +-------------------------------------------------- +\$ ./bin/elasticsearch-sql-cli +-------------------------------------------------- + +From there you can run the same query: + +[source,sqlcli] +-------------------------------------------------- +sql> SELECT * FROM library WHERE release_date < '2000-01-01'; + author | name | page_count | release_date +---------------+---------------+---------------+------------------------ +Dan Simmons |Hyperion |482 |1989-05-26T00:00:00.000Z +Frank Herbert |Dune |604 |1965-06-01T00:00:00.000Z +-------------------------------------------------- +""" + ) + docFile( + "docs/ml-update-snapshot.asciidoc", + """ +[role="xpack"] +[[ml-update-snapshot]] += Update model snapshots API +++++ +Update model snapshots +++++ + +Updates certain properties of a snapshot. + +[[ml-update-snapshot-request]] +== {api-request-title} + +`POST _ml/anomaly_detectors//model_snapshots//_update` + +[[ml-update-snapshot-prereqs]] +== {api-prereq-title} + +Requires the `manage_ml` cluster privilege. This privilege is included in the +`machine_learning_admin` built-in role. + +[[ml-update-snapshot-path-parms]] +== {api-path-parms-title} + +``:: +(Required, string) +include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] + +``:: +(Required, string) +include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=snapshot-id] + +[[ml-update-snapshot-request-body]] +== {api-request-body-title} + +The following properties can be updated after the model snapshot is created: + +`description`:: +(Optional, string) A description of the model snapshot. + +`retain`:: +(Optional, Boolean) +include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=retain] + + +[[ml-update-snapshot-example]] +== {api-examples-title} + +[source,console] +-------------------------------------------------- +POST +_ml/anomaly_detectors/it_ops_new_logs/model_snapshots/1491852978/_update +{ + "description": "Snapshot 1", + "retain": true +} +-------------------------------------------------- +// TEST[skip:todo] + +When the snapshot is updated, you receive the following results: +[source,js] +---- +{ + "acknowledged": true, + "model": { + "job_id": "it_ops_new_logs", + "timestamp": 1491852978000, + "description": "Snapshot 1", +... + "retain": true + } +} +---- + +""" + ) + + docFile( + "docs/painless-debugging.asciidoc", + """ + +[[painless-debugging]] +=== Painless Debugging + +==== Debug.Explain + +Painless doesn't have a +{wikipedia}/Read%E2%80%93eval%E2%80%93print_loop[REPL] +and while it'd be nice for it to have one day, it wouldn't tell you the +whole story around debugging painless scripts embedded in Elasticsearch because +the data that the scripts have access to or "context" is so important. For now +the best way to debug embedded scripts is by throwing exceptions at choice +places. While you can throw your own exceptions +(`throw new Exception('whatever')`), Painless's sandbox prevents you from +accessing useful information like the type of an object. So Painless has a +utility method, `Debug.explain` which throws the exception for you. For +example, you can use {ref}/search-explain.html[`_explain`] to explore the +context available to a {ref}/query-dsl-script-query.html[script query]. + +[source,console] +--------------------------------------------------------- +PUT /hockey/_doc/1?refresh +{"first":"johnny","last":"gaudreau","goals":[9,27,1],"assists":[17,46,0],"gp":[26,82,1]} + +POST /hockey/_explain/1 +{ + "query": { + "script": { + "script": "Debug.explain(doc.goals)" + } + } +} +--------------------------------------------------------- +// TEST[s/_explain\\/1/_explain\\/1?error_trace=false/ catch:/painless_explain_error/] +// The test system sends error_trace=true by default for easier debugging so +// we have to override it to get a normal shaped response + +Which shows that the class of `doc.first` is +`org.elasticsearch.index.fielddata.ScriptDocValues.Longs` by responding with: + +[source,console-result] +--------------------------------------------------------- +{ + "error": { + "type": "script_exception", + "to_string": "[1, 9, 27]", + "painless_class": "org.elasticsearch.index.fielddata.ScriptDocValues.Longs", + "java_class": "org.elasticsearch.index.fielddata.ScriptDocValues\$Longs", + ... + }, + "status": 400 +} +--------------------------------------------------------- +// TESTRESPONSE[s/\\.\\.\\./"script_stack": \$body.error.script_stack, "script": \$body.error.script, "lang": \$body.error.lang, "position": \$body.error.position, "caused_by": \$body.error.caused_by, "root_cause": \$body.error.root_cause, "reason": \$body.error.reason/] + +You can use the same trick to see that `_source` is a `LinkedHashMap` +in the `_update` API: + +[source,console] +--------------------------------------------------------- +POST /hockey/_update/1 +{ + "script": "Debug.explain(ctx._source)" +} +--------------------------------------------------------- +// TEST[continued s/_update\\/1/_update\\/1?error_trace=false/ catch:/painless_explain_error/] + +The response looks like: + +[source,console-result] +--------------------------------------------------------- +{ + "error" : { + "root_cause": ..., + "type": "illegal_argument_exception", + "reason": "failed to execute script", + "caused_by": { + "type": "script_exception", + "to_string": "{gp=[26, 82, 1], last=gaudreau, assists=[17, 46, 0], first=johnny, goals=[9, 27, 1]}", + "painless_class": "java.util.LinkedHashMap", + "java_class": "java.util.LinkedHashMap", + ... + } + }, + "status": 400 +} +--------------------------------------------------------- +// TESTRESPONSE[s/"root_cause": \\.\\.\\./"root_cause": \$body.error.root_cause/] +// TESTRESPONSE[s/\\.\\.\\./"script_stack": \$body.error.caused_by.script_stack, "script": \$body.error.caused_by.script, "lang": \$body.error.caused_by.lang, "position": \$body.error.caused_by.position, "caused_by": \$body.error.caused_by.caused_by, "reason": \$body.error.caused_by.reason/] +// TESTRESPONSE[s/"to_string": ".+"/"to_string": \$body.error.caused_by.to_string/] + +Once you have a class you can go to <> to see a list of +available methods. + +""" + ) + docFile( + "docs/reference/security/authorization/run-as-privilege.asciidoc", + """[role="xpack"] +[[run-as-privilege]] += Submitting requests on behalf of other users + +{es} roles support a `run_as` privilege that enables an authenticated user to +submit requests on behalf of other users. For example, if your external +application is trusted to authenticate users, {es} can authenticate the external +application and use the _run as_ mechanism to issue authorized requests as +other users without having to re-authenticate each user. + +To "run as" (impersonate) another user, the first user (the authenticating user) +must be authenticated by a mechanism that supports run-as delegation. The second +user (the `run_as` user) must be authorized by a mechanism that supports +delegated run-as lookups by username. + +The `run_as` privilege essentially operates like a secondary form of +<>. Delegated authorization applies +to the authenticating user, and the `run_as` privilege applies to the user who +is being impersonated. + +Authenticating user:: +-- +For the authenticating user, the following realms (plus API keys) all support +`run_as` delegation: `native`, `file`, Active Directory, JWT, Kerberos, LDAP and +PKI. + +Service tokens, the {es} Token Service, SAML 2.0, and OIDC 1.0 do not +support `run_as` delegation. +-- + +`run_as` user:: +-- +{es} supports `run_as` for any realm that supports user lookup. +Not all realms support user lookup. Refer to the list of <> +and ensure that the realm you wish to use is configured in a manner that +supports user lookup. + +The `run_as` user must be retrieved from a <> - it is not +possible to run as a +<>, +<> or +<>. +-- + +To submit requests on behalf of other users, you need to have the `run_as` +privilege in your <>. For example, the following request +creates a `my_director` role that grants permission to submit request on behalf +of `jacknich` or `redeniro`: + +[source,console] +---- +POST /_security/role/my_director?refresh=true +{ + "cluster": ["manage"], + "indices": [ + { + "names": [ "index1", "index2" ], + "privileges": [ "manage" ] + } + ], + "run_as": [ "jacknich", "rdeniro" ], + "metadata" : { + "version" : 1 + } +} +---- + +To submit a request as another user, you specify the user in the +`es-security-runas-user` request header. For example: + +[source,sh] +---- +curl -H "es-security-runas-user: jacknich" -u es-admin -X GET http://localhost:9200/ +---- + +The `run_as` user passed in through the `es-security-runas-user` header must be +available from a realm that supports delegated authorization lookup by username. +Realms that don't support user lookup can't be used by `run_as` delegation from +other realms. + +For example, JWT realms can authenticate external users specified in JWTs, and +execute requests as a `run_as` user in the `native` realm. {es} will retrieve the +indicated `runas` user and execute the request as that user using their roles. + +[[run-as-privilege-apply]] +== Apply the `run_as` privilege to roles +You can apply the `run_as` privilege when creating roles with the +<>. Users who are assigned +a role that contains the `run_as` privilege inherit all privileges from their +role, and can also submit requests on behalf of the indicated users. + +NOTE: Roles for the authenticated user and the `run_as` user are not merged. If +a user authenticates without specifying the `run_as` parameter, only the +authenticated user's roles are used. If a user authenticates and their roles +include the `run_as` parameter, only the `run_as` user's roles are used. + +After a user successfully authenticates to {es}, an authorization process determines whether the user behind an incoming request is allowed to run +that request. If the authenticated user has the `run_as` privilege in their list +of permissions and specifies the run-as header, {es} _discards_ the authenticated +user and associated roles. It then looks in each of the configured realms in the +realm chain until it finds the username that's associated with the `run_as` user, +and uses those roles to execute any requests. + +Consider an admin role and an analyst role. The admin role has higher privileges, +but might also want to submit requests as another user to test and verify their +permissions. + +First, we'll create an admin role named `my_admin_role`. This role has `manage` +<> on the entire cluster, and on a subset of +indices. This role also contains the `run_as` privilege, which enables any user +with this role to submit requests on behalf of the specified `analyst_user`. + +[source,console] +---- +POST /_security/role/my_admin_role?refresh=true +{ + "cluster": ["manage"], + "indices": [ + { + "names": [ "index1", "index2" ], + "privileges": [ "manage" ] + } + ], + "applications": [ + { + "application": "myapp", + "privileges": [ "admin", "read" ], + "resources": [ "*" ] + } + ], + "run_as": [ "analyst_user" ], + "metadata" : { + "version" : 1 + } +} +---- + +Next, we'll create an analyst role named `my_analyst_role`, which has more +restricted `monitor` cluster privileges and `manage` privileges on a subset of +indices. + +[source,console] +---- +POST /_security/role/my_analyst_role?refresh=true +{ + "cluster": [ "monitor"], + "indices": [ + { + "names": [ "index1", "index2" ], + "privileges": ["manage"] + } + ], + "applications": [ + { + "application": "myapp", + "privileges": [ "read" ], + "resources": [ "*" ] + } + ], + "metadata" : { + "version" : 1 + } +} +---- + +We'll create an administrator user and assign them the role named `my_admin_role`, +which allows this user to submit requests as the `analyst_user`. + +[source,console] +---- +POST /_security/user/admin_user?refresh=true +{ + "password": "l0ng-r4nd0m-p@ssw0rd", + "roles": [ "my_admin_role" ], + "full_name": "Eirian Zola", + "metadata": { "intelligence" : 7} +} +---- + +We can also create an analyst user and assign them the role named +`my_analyst_role`. + +[source,console] +---- +POST /_security/user/analyst_user?refresh=true +{ + "password": "l0nger-r4nd0mer-p@ssw0rd", + "roles": [ "my_analyst_role" ], + "full_name": "Monday Jaffe", + "metadata": { "innovation" : 8} +} +---- + +You can then authenticate to {es} as the `admin_user` or `analyst_user`. However, the `admin_user` could optionally submit requests on +behalf of the `analyst_user`. The following request authenticates to {es} with a +`Basic` authorization token and submits the request as the `analyst_user`: + +[source,sh] +---- +curl -s -X GET -H "Authorization: Basic YWRtaW5fdXNlcjpsMG5nLXI0bmQwbS1wQHNzdzByZA==" -H "es-security-runas-user: analyst_user" https://localhost:9200/_security/_authenticate +---- + +The response indicates that the `analyst_user` submitted this request, using the +`my_analyst_role` that's assigned to that user. When the `admin_user` submitted +the request, {es} authenticated that user, discarded their roles, and then used +the roles of the `run_as` user. + +[source,sh] +---- +{"username":"analyst_user","roles":["my_analyst_role"],"full_name":"Monday Jaffe","email":null, +"metadata":{"innovation":8},"enabled":true,"authentication_realm":{"name":"native", +"type":"native"},"lookup_realm":{"name":"native","type":"native"},"authentication_type":"realm"} +% +---- + +The `authentication_realm` and `lookup_realm` in the response both specify +the `native` realm because both the `admin_user` and `analyst_user` are from +that realm. If the two users are in different realms, the values for +`authentication_realm` and `lookup_realm` are different (such as `pki` and +`native`). +""" + ) + + } +} diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/doc/RestTestFromSnippetsTaskTests.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/doc/RestTestFromSnippetsTaskTests.java deleted file mode 100644 index 534134e78d40b..0000000000000 --- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/doc/RestTestFromSnippetsTaskTests.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.gradle.internal.doc; - -import org.gradle.api.InvalidUserDataException; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; - -import static org.elasticsearch.gradle.internal.doc.RestTestsFromSnippetsTask.replaceBlockQuote; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -public class RestTestFromSnippetsTaskTests { - @Rule - public ExpectedException expectedEx = ExpectedException.none(); - - @Test - public void testInvalidBlockQuote() { - String input = "\"foo\": \"\"\"bar\""; - expectedEx.expect(InvalidUserDataException.class); - expectedEx.expectMessage("Invalid block quote starting at 7 in:\n" + input); - replaceBlockQuote(input); - } - - @Test - public void testSimpleBlockQuote() { - assertEquals("\"foo\": \"bort baz\"", replaceBlockQuote("\"foo\": \"\"\"bort baz\"\"\"")); - } - - @Test - public void testMultipleBlockQuotes() { - assertEquals( - "\"foo\": \"bort baz\", \"bar\": \"other\"", - replaceBlockQuote("\"foo\": \"\"\"bort baz\"\"\", \"bar\": \"\"\"other\"\"\"") - ); - } - - @Test - public void testEscapingInBlockQuote() { - assertEquals("\"foo\": \"bort\\\" baz\"", replaceBlockQuote("\"foo\": \"\"\"bort\" baz\"\"\"")); - assertEquals("\"foo\": \"bort\\n baz\"", replaceBlockQuote("\"foo\": \"\"\"bort\n baz\"\"\"")); - } - - @Test - public void testIsDocWriteRequest() { - assertTrue((boolean) RestTestsFromSnippetsTask.shouldAddShardFailureCheck("doc-index/_search")); - assertFalse((boolean) RestTestsFromSnippetsTask.shouldAddShardFailureCheck("_cat")); - assertFalse((boolean) RestTestsFromSnippetsTask.shouldAddShardFailureCheck("_ml/datafeeds/datafeed-id/_preview")); - } -} diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/doc/SnippetsTaskTests.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/doc/SnippetsTaskTests.java deleted file mode 100644 index 0acae6ca03297..0000000000000 --- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/doc/SnippetsTaskTests.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.gradle.internal.doc; - -import org.junit.Test; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; - -public class SnippetsTaskTests { - - @Test - public void testMatchSource() { - SnippetsTask.Source source = SnippetsTask.matchSource("[source,console]"); - assertTrue(source.getMatches()); - assertEquals("console", source.getLanguage()); - assertNull(source.getName()); - - source = SnippetsTask.matchSource("[source,console,id=snippet-name-1]"); - assertTrue(source.getMatches()); - assertEquals("console", source.getLanguage()); - assertEquals("snippet-name-1", source.getName()); - - source = SnippetsTask.matchSource("[source, console, id=snippet-name-1]"); - assertTrue(source.getMatches()); - assertEquals("console", source.getLanguage()); - assertEquals("snippet-name-1", source.getName()); - - source = SnippetsTask.matchSource("[source,console,attr=5,id=snippet-name-1,attr2=6]"); - assertTrue(source.getMatches()); - assertEquals("console", source.getLanguage()); - assertEquals("snippet-name-1", source.getName()); - - source = SnippetsTask.matchSource("[source,console, attr=5, id=snippet-name-1, attr2=6]"); - assertTrue(source.getMatches()); - assertEquals("console", source.getLanguage()); - assertEquals("snippet-name-1", source.getName()); - - source = SnippetsTask.matchSource("[\"source\",\"console\",id=\"snippet-name-1\"]"); - assertTrue(source.getMatches()); - assertEquals("console", source.getLanguage()); - assertEquals("snippet-name-1", source.getName()); - - source = SnippetsTask.matchSource("[source,console,id=\"snippet-name-1\"]"); - assertTrue(source.getMatches()); - assertEquals("console", source.getLanguage()); - assertEquals("snippet-name-1", source.getName()); - - source = SnippetsTask.matchSource("[source.merge.styled,esql]"); - assertTrue(source.getMatches()); - assertEquals("esql", source.getLanguage()); - - source = SnippetsTask.matchSource("[source.merge.styled,foo-bar]"); - assertTrue(source.getMatches()); - assertEquals("foo-bar", source.getLanguage()); - } -} From d6f9d1e69e0335f2fb27f55d60256af8dde9a4e1 Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Wed, 10 Apr 2024 12:21:08 +0200 Subject: [PATCH 124/173] ESQL: Rename AUTO_BUCKET to just BUCKET (#107197) This renames the function AUTO_BUCKET to just BUCKET. It also removes the experimental tagging of the function in the docs, making it generally available. --- docs/reference/esql/esql-get-started.asciidoc | 10 +- .../{auto_bucket.asciidoc => bucket.asciidoc} | 44 +++--- .../functions/date-time-functions.asciidoc | 4 +- .../{auto_bucket.asciidoc => bucket.asciidoc} | 0 .../functions/layout/auto_bucket.asciidoc | 14 -- .../esql/functions/layout/bucket.asciidoc | 14 ++ .../{auto_bucket.asciidoc => bucket.asciidoc} | 0 .../esql/functions/signature/auto_bucket.svg | 1 - .../esql/functions/signature/bucket.svg | 1 + .../{auto_bucket.asciidoc => bucket.asciidoc} | 0 .../src/main/resources/date.csv-spec | 134 +++++++++--------- .../src/main/resources/floats.csv-spec | 4 +- .../src/main/resources/ints.csv-spec | 24 ++-- .../src/main/resources/meta.csv-spec | 8 +- .../src/main/resources/unsigned_long.csv-spec | 4 +- .../function/EsqlFunctionRegistry.java | 4 +- .../math/{AutoBucket.java => Bucket.java} | 12 +- .../xpack/esql/io/stream/PlanNamedTypes.java | 10 +- ...{AutoBucketTests.java => BucketTests.java} | 6 +- .../optimizer/LogicalPlanOptimizerTests.java | 10 +- .../session/IndexResolverFieldNamesTests.java | 12 +- 21 files changed, 157 insertions(+), 159 deletions(-) rename docs/reference/esql/functions/{auto_bucket.asciidoc => bucket.asciidoc} (62%) rename docs/reference/esql/functions/description/{auto_bucket.asciidoc => bucket.asciidoc} (100%) delete mode 100644 docs/reference/esql/functions/layout/auto_bucket.asciidoc create mode 100644 docs/reference/esql/functions/layout/bucket.asciidoc rename docs/reference/esql/functions/parameters/{auto_bucket.asciidoc => bucket.asciidoc} (100%) delete mode 100644 docs/reference/esql/functions/signature/auto_bucket.svg create mode 100644 docs/reference/esql/functions/signature/bucket.svg rename docs/reference/esql/functions/types/{auto_bucket.asciidoc => bucket.asciidoc} (100%) rename x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/{AutoBucket.java => Bucket.java} (95%) rename x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/{AutoBucketTests.java => BucketTests.java} (96%) diff --git a/docs/reference/esql/esql-get-started.asciidoc b/docs/reference/esql/esql-get-started.asciidoc index 29f61299cec30..421272f741602 100644 --- a/docs/reference/esql/esql-get-started.asciidoc +++ b/docs/reference/esql/esql-get-started.asciidoc @@ -240,7 +240,7 @@ include::{esql-specs}/eval.csv-spec[tag=gs-eval-stats-backticks] === Create a histogram To track statistics over time, {esql} enables you to create histograms using the -<> function. `AUTO_BUCKET` creates human-friendly bucket sizes +<> function. `BUCKET` creates human-friendly bucket sizes and returns a value for each row that corresponds to the resulting bucket the row falls into. @@ -248,22 +248,22 @@ For example, to create hourly buckets for the data on October 23rd: [source,esql] ---- -include::{esql-specs}/date.csv-spec[tag=gs-auto_bucket] +include::{esql-specs}/date.csv-spec[tag=gs-bucket] ---- -Combine `AUTO_BUCKET` with <> to create a histogram. For example, +Combine `BUCKET` with <> to create a histogram. For example, to count the number of events per hour: [source,esql] ---- -include::{esql-specs}/date.csv-spec[tag=gs-auto_bucket-stats-by] +include::{esql-specs}/date.csv-spec[tag=gs-bucket-stats-by] ---- Or the median duration per hour: [source,esql] ---- -include::{esql-specs}/date.csv-spec[tag=gs-auto_bucket-stats-by-median] +include::{esql-specs}/date.csv-spec[tag=gs-bucket-stats-by-median] ---- [discrete] diff --git a/docs/reference/esql/functions/auto_bucket.asciidoc b/docs/reference/esql/functions/bucket.asciidoc similarity index 62% rename from docs/reference/esql/functions/auto_bucket.asciidoc rename to docs/reference/esql/functions/bucket.asciidoc index 651ac168aa83a..e436a79d0ec1e 100644 --- a/docs/reference/esql/functions/auto_bucket.asciidoc +++ b/docs/reference/esql/functions/bucket.asciidoc @@ -1,14 +1,12 @@ [discrete] -[[esql-auto_bucket]] -=== `AUTO_BUCKET` - -experimental::[] +[[esql-bucket]] +=== `BUCKET` *Syntax* [source,esql] ---- -AUTO_BUCKET(expression, buckets, from, to) +BUCKET(expression, buckets, from, to) ---- *Parameters* @@ -28,39 +26,39 @@ End of the range. Can be a number or a date expressed as a string. *Description* Creates human-friendly buckets and returns a value for each row that corresponds -to the resulting bucket the row falls into. +to the resulting bucket the row falls into. Using a target number of buckets, a start of a range, and an end of a range, -`AUTO_BUCKET` picks an appropriate bucket size to generate the target number of +`BUCKET` picks an appropriate bucket size to generate the target number of buckets or fewer. For example, asking for at most 20 buckets over a year results in monthly buckets: [source.merge.styled,esql] ---- -include::{esql-specs}/date.csv-spec[tag=docsAutoBucketMonth] +include::{esql-specs}/date.csv-spec[tag=docsBucketMonth] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/date.csv-spec[tag=docsAutoBucketMonth-result] +include::{esql-specs}/date.csv-spec[tag=docsBucketMonth-result] |=== The goal isn't to provide *exactly* the target number of buckets, it's to pick a range that people are comfortable with that provides at most the target number of buckets. -Combine `AUTO_BUCKET` with +Combine `BUCKET` with <> to create a histogram: [source.merge.styled,esql] ---- -include::{esql-specs}/date.csv-spec[tag=docsAutoBucketMonthlyHistogram] +include::{esql-specs}/date.csv-spec[tag=docsBucketMonthlyHistogram] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/date.csv-spec[tag=docsAutoBucketMonthlyHistogram-result] +include::{esql-specs}/date.csv-spec[tag=docsBucketMonthlyHistogram-result] |=== -NOTE: `AUTO_BUCKET` does not create buckets that don't match any documents. +NOTE: `BUCKET` does not create buckets that don't match any documents. That's why this example is missing `1985-03-01` and other dates. Asking for more buckets can result in a smaller range. For example, asking for @@ -68,28 +66,28 @@ at most 100 buckets in a year results in weekly buckets: [source.merge.styled,esql] ---- -include::{esql-specs}/date.csv-spec[tag=docsAutoBucketWeeklyHistogram] +include::{esql-specs}/date.csv-spec[tag=docsBucketWeeklyHistogram] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/date.csv-spec[tag=docsAutoBucketWeeklyHistogram-result] +include::{esql-specs}/date.csv-spec[tag=docsBucketWeeklyHistogram-result] |=== -NOTE: `AUTO_BUCKET` does not filter any rows. It only uses the provided range to +NOTE: `BUCKET` does not filter any rows. It only uses the provided range to pick a good bucket size. For rows with a value outside of the range, it returns a bucket value that corresponds to a bucket outside the range. Combine -`AUTO_BUCKET` with <> to filter rows. +`BUCKET` with <> to filter rows. -`AUTO_BUCKET` can also operate on numeric fields. For example, to create a +`BUCKET` can also operate on numeric fields. For example, to create a salary histogram: [source.merge.styled,esql] ---- -include::{esql-specs}/ints.csv-spec[tag=docsAutoBucketNumeric] +include::{esql-specs}/ints.csv-spec[tag=docsBucketNumeric] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/ints.csv-spec[tag=docsAutoBucketNumeric-result] +include::{esql-specs}/ints.csv-spec[tag=docsBucketNumeric-result] |=== Unlike the earlier example that intentionally filters on a date range, you @@ -104,7 +102,7 @@ per hour: [source.styled,esql] ---- -include::{esql-specs}/date.csv-spec[tag=docsAutoBucketLast24hr] +include::{esql-specs}/date.csv-spec[tag=docsBucketLast24hr] ---- Create monthly buckets for the year 1985, and calculate the average salary by @@ -112,9 +110,9 @@ hiring month: [source.merge.styled,esql] ---- -include::{esql-specs}/date.csv-spec[tag=auto_bucket_in_agg] +include::{esql-specs}/date.csv-spec[tag=bucket_in_agg] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/date.csv-spec[tag=auto_bucket_in_agg-result] +include::{esql-specs}/date.csv-spec[tag=bucket_in_agg-result] |=== diff --git a/docs/reference/esql/functions/date-time-functions.asciidoc b/docs/reference/esql/functions/date-time-functions.asciidoc index c1cd36e376a1c..149bdffb5ef07 100644 --- a/docs/reference/esql/functions/date-time-functions.asciidoc +++ b/docs/reference/esql/functions/date-time-functions.asciidoc @@ -8,7 +8,7 @@ {esql} supports these date-time functions: // tag::date_list[] -* experimental:[] <> +* <> * <> * <> * <> @@ -17,7 +17,7 @@ * <> // end::date_list[] -include::auto_bucket.asciidoc[] +include::bucket.asciidoc[] include::date_diff.asciidoc[] include::date_extract.asciidoc[] include::date_format.asciidoc[] diff --git a/docs/reference/esql/functions/description/auto_bucket.asciidoc b/docs/reference/esql/functions/description/bucket.asciidoc similarity index 100% rename from docs/reference/esql/functions/description/auto_bucket.asciidoc rename to docs/reference/esql/functions/description/bucket.asciidoc diff --git a/docs/reference/esql/functions/layout/auto_bucket.asciidoc b/docs/reference/esql/functions/layout/auto_bucket.asciidoc deleted file mode 100644 index 82e05ab5d215c..0000000000000 --- a/docs/reference/esql/functions/layout/auto_bucket.asciidoc +++ /dev/null @@ -1,14 +0,0 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. - -[discrete] -[[esql-auto_bucket]] -=== `AUTO_BUCKET` - -*Syntax* - -[.text-center] -image::esql/functions/signature/auto_bucket.svg[Embedded,opts=inline] - -include::../parameters/auto_bucket.asciidoc[] -include::../description/auto_bucket.asciidoc[] -include::../types/auto_bucket.asciidoc[] diff --git a/docs/reference/esql/functions/layout/bucket.asciidoc b/docs/reference/esql/functions/layout/bucket.asciidoc new file mode 100644 index 0000000000000..0445007237c8c --- /dev/null +++ b/docs/reference/esql/functions/layout/bucket.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-bucket]] +=== `BUCKET` + +*Syntax* + +[.text-center] +image::esql/functions/signature/bucket.svg[Embedded,opts=inline] + +include::../parameters/bucket.asciidoc[] +include::../description/bucket.asciidoc[] +include::../types/bucket.asciidoc[] diff --git a/docs/reference/esql/functions/parameters/auto_bucket.asciidoc b/docs/reference/esql/functions/parameters/bucket.asciidoc similarity index 100% rename from docs/reference/esql/functions/parameters/auto_bucket.asciidoc rename to docs/reference/esql/functions/parameters/bucket.asciidoc diff --git a/docs/reference/esql/functions/signature/auto_bucket.svg b/docs/reference/esql/functions/signature/auto_bucket.svg deleted file mode 100644 index 7da9a053825f1..0000000000000 --- a/docs/reference/esql/functions/signature/auto_bucket.svg +++ /dev/null @@ -1 +0,0 @@ -AUTO_BUCKET(field,buckets,from,to) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/bucket.svg b/docs/reference/esql/functions/signature/bucket.svg new file mode 100644 index 0000000000000..f6662910c010d --- /dev/null +++ b/docs/reference/esql/functions/signature/bucket.svg @@ -0,0 +1 @@ +BUCKET(field,buckets,from,to) \ No newline at end of file diff --git a/docs/reference/esql/functions/types/auto_bucket.asciidoc b/docs/reference/esql/functions/types/bucket.asciidoc similarity index 100% rename from docs/reference/esql/functions/types/auto_bucket.asciidoc rename to docs/reference/esql/functions/types/bucket.asciidoc diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec index 8f9ce9968d89d..5b3b6235ccb8b 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec @@ -261,36 +261,36 @@ int:integer |dt:date // end::to_datetime-int-result[] ; -autoBucketSimpleMonth -// tag::auto_bucket_month[] +bucketSimpleMonth#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +// tag::bucket_month[] ROW date=TO_DATETIME("1985-07-09T00:00:00.000Z") -| EVAL bucket=AUTO_BUCKET(date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") -// end::auto_bucket_month[] +| EVAL bucket=BUCKET(date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") +// end::bucket_month[] ; -// tag::auto_bucket_month-result[] +// tag::bucket_month-result[] date:datetime | bucket:datetime 1985-07-09T00:00:00.000Z | 1985-07-01T00:00:00.000Z -// end::auto_bucket_month-result[] +// end::bucket_month-result[] ; -autoBucketSimpleWeek -// tag::auto_bucket_week[] +bucketSimpleWeek#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +// tag::bucket_week[] ROW date=TO_DATETIME("1985-07-09T00:00:00.000Z") -| EVAL bucket=AUTO_BUCKET(date, 100, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") -// end::auto_bucket_week[] +| EVAL bucket=BUCKET(date, 100, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") +// end::bucket_week[] ; -// tag::auto_bucket_week-result[] +// tag::bucket_week-result[] date:datetime | bucket:datetime 1985-07-09T00:00:00.000Z | 1985-07-08T00:00:00.000Z -// end::auto_bucket_week-result[] +// end::bucket_week-result[] ; -autoBucketMonth +bucketMonth#[skip:-8.13.99, reason:BUCKET renamed in 8.14] from employees | where hire_date >= "1985-01-01T00:00:00Z" and hire_date < "1986-01-01T00:00:00Z" -| eval hd = auto_bucket(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") +| eval hd = bucket(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") | sort hire_date | keep hire_date, hd; @@ -308,10 +308,10 @@ hire_date:date | hd:date 1985-11-21T00:00:00.000Z | 1985-11-01T00:00:00.000Z ; -autoBucketWeek +bucketWeek#[skip:-8.13.99, reason:BUCKET renamed in 8.14] from employees | where hire_date >= "1985-01-01T00:00:00Z" and hire_date < "1986-01-01T00:00:00Z" -| eval hd = auto_bucket(hire_date, 55, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") +| eval hd = bucket(hire_date, 55, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") | sort hire_date | keep hire_date, hd; @@ -350,10 +350,10 @@ from employees | where birth_date > now() | sort emp_no asc | keep emp_no, birth emp_no:integer | birth_date:date ; -autoBucketYearInAgg#[skip:-8.12.99, reason:date type is supported in 8.13] +bucketYearInAgg#[skip:-8.13.99, reason:BUCKET renamed in 8.14] FROM employees | WHERE hire_date >= "1999-01-01T00:00:00Z" -| EVAL bucket = AUTO_BUCKET(hire_date, 5, "1999-01-01T00:00:00Z", NOW()) +| EVAL bucket = BUCKET(hire_date, 5, "1999-01-01T00:00:00Z", NOW()) | STATS COUNT(*) by bucket | sort bucket; @@ -361,12 +361,12 @@ COUNT(*):long | bucket:date 1 | 1999-01-01T00:00:00.000Z ; -autoBucketYearInAggConstRefsString#[skip:-8.12.99, reason:date type is supported in 8.13] +bucketYearInAggConstRefsString#[skip:-8.13.99, reason:BUCKET renamed in 8.14] FROM employees | WHERE hire_date >= "1999-01-01T00:00:00Z" | EVAL bucket_start = "1999-01-01T00:00:00Z" | EVAL bucket_end = NOW() -| EVAL bucket = AUTO_BUCKET(hire_date, 5, bucket_start, bucket_end) +| EVAL bucket = BUCKET(hire_date, 5, bucket_start, bucket_end) | STATS COUNT(*) by bucket | sort bucket; @@ -374,12 +374,12 @@ COUNT(*):long | bucket:date 1 | 1999-01-01T00:00:00.000Z ; -autoBucketYearInAggConstRefsConcat#[skip:-8.12.99, reason:date type is supported in 8.13] +bucketYearInAggConstRefsConcat#[skip:-8.13.99, reason:BUCKET renamed in 8.14] FROM employees | WHERE hire_date >= "1999-01-01T00:00:00Z" | EVAL bucket_start = CONCAT("1999-01-01", "T", "00:00:00Z") | EVAL bucket_end = NOW() -| EVAL bucket = AUTO_BUCKET(hire_date, 5, bucket_start, bucket_end) +| EVAL bucket = BUCKET(hire_date, 5, bucket_start, bucket_end) | STATS COUNT(*) by bucket | sort bucket; @@ -387,12 +387,12 @@ COUNT(*):long | bucket:date 1 | 1999-01-01T00:00:00.000Z ; -autoBucketYearInAggConstRefsDate#[skip:-8.12.99, reason:date type is supported in 8.13] +bucketYearInAggConstRefsDate#[skip:-8.13.99, reason:BUCKET renamed in 8.14] FROM employees | WHERE hire_date >= "1999-01-01T00:00:00Z" | EVAL bucket_start = TO_DATETIME("1999-01-01T00:00:00.000Z") | EVAL bucket_end = NOW() -| EVAL bucket = AUTO_BUCKET(hire_date, 5, bucket_start, bucket_end) +| EVAL bucket = BUCKET(hire_date, 5, bucket_start, bucket_end) | keep bucket_start, bucket_end, bucket | STATS COUNT(*) by bucket | sort bucket; @@ -401,31 +401,31 @@ COUNT(*):long | bucket:date 1 | 1999-01-01T00:00:00.000Z ; -autoBucketYearInAggConstRefsRename#[skip:-8.12.99, reason:date type is supported in 8.13] +bucketYearInAggConstRefsRename#[skip:-8.13.99, reason:BUCKET renamed in 8.14] FROM employees | WHERE hire_date >= "1999-01-01T00:00:00Z" | EVAL bucket_start = "1999-01-01T00:00:00Z" | EVAL bucket_end = NOW() | RENAME bucket_end as be, bucket_start as bs -| STATS c = COUNT(*) by AUTO_BUCKET(hire_date, 5, bs, be) +| STATS c = COUNT(*) by BUCKET(hire_date, 5, bs, be) | SORT c ; -c:long | AUTO_BUCKET(hire_date, 5, bs, be):date +c:long | BUCKET(hire_date, 5, bs, be):date 1 | 1999-01-01T00:00:00.000Z ; -autoBucketMonthInAgg -// tag::auto_bucket_in_agg[] +bucketMonthInAgg#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +// tag::bucket_in_agg[] FROM employees | WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z" -| EVAL bucket = AUTO_BUCKET(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") +| EVAL bucket = BUCKET(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") | STATS AVG(salary) BY bucket | SORT bucket -// end::auto_bucket_in_agg[] +// end::bucket_in_agg[] ; -// tag::auto_bucket_in_agg-result[] +// tag::bucket_in_agg-result[] AVG(salary):double | bucket:date 46305.0 | 1985-02-01T00:00:00.000Z 44817.0 | 1985-05-01T00:00:00.000Z @@ -433,7 +433,7 @@ AVG(salary):double | bucket:date 49095.0 | 1985-09-01T00:00:00.000Z 51532.0 | 1985-10-01T00:00:00.000Z 54539.75 | 1985-11-01T00:00:00.000Z -// end::auto_bucket_in_agg-result[] +// end::bucket_in_agg-result[] ; evalDateDiffInNanoAndMicroAndMilliSeconds#[skip:-8.12.99, reason:date_diff added in 8.13] @@ -950,17 +950,17 @@ birth_date:datetime 1953-04-21T00:00:00.000Z ; -docsAutoBucketMonth -//tag::docsAutoBucketMonth[] +docsBucketMonth#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +//tag::docsBucketMonth[] FROM employees | WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z" -| EVAL month = AUTO_BUCKET(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") +| EVAL month = BUCKET(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") | KEEP hire_date, month | SORT hire_date -//end::docsAutoBucketMonth[] +//end::docsBucketMonth[] ; -//tag::docsAutoBucketMonth-result[] +//tag::docsBucketMonth-result[] hire_date:date | month:date 1985-02-18T00:00:00.000Z|1985-02-01T00:00:00.000Z 1985-02-24T00:00:00.000Z|1985-02-01T00:00:00.000Z @@ -973,20 +973,20 @@ FROM employees 1985-11-20T00:00:00.000Z|1985-11-01T00:00:00.000Z 1985-11-20T00:00:00.000Z|1985-11-01T00:00:00.000Z 1985-11-21T00:00:00.000Z|1985-11-01T00:00:00.000Z -//end::docsAutoBucketMonth-result[] +//end::docsBucketMonth-result[] ; -docsAutoBucketMonthlyHistogram -//tag::docsAutoBucketMonthlyHistogram[] +docsBucketMonthlyHistogram#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +//tag::docsBucketMonthlyHistogram[] FROM employees | WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z" -| EVAL month = AUTO_BUCKET(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") +| EVAL month = BUCKET(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") | STATS hires_per_month = COUNT(*) BY month | SORT month -//end::docsAutoBucketMonthlyHistogram[] +//end::docsBucketMonthlyHistogram[] ; -//tag::docsAutoBucketMonthlyHistogram-result[] +//tag::docsBucketMonthlyHistogram-result[] hires_per_month:long | month:date 2 |1985-02-01T00:00:00.000Z 1 |1985-05-01T00:00:00.000Z @@ -994,20 +994,20 @@ FROM employees 1 |1985-09-01T00:00:00.000Z 2 |1985-10-01T00:00:00.000Z 4 |1985-11-01T00:00:00.000Z -//end::docsAutoBucketMonthlyHistogram-result[] +//end::docsBucketMonthlyHistogram-result[] ; -docsAutoBucketWeeklyHistogram -//tag::docsAutoBucketWeeklyHistogram[] +docsBucketWeeklyHistogram#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +//tag::docsBucketWeeklyHistogram[] FROM employees | WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z" -| EVAL week = AUTO_BUCKET(hire_date, 100, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") +| EVAL week = BUCKET(hire_date, 100, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") | STATS hires_per_week = COUNT(*) BY week | SORT week -//end::docsAutoBucketWeeklyHistogram[] +//end::docsBucketWeeklyHistogram[] ; -//tag::docsAutoBucketWeeklyHistogram-result[] +//tag::docsBucketWeeklyHistogram-result[] hires_per_week:long | week:date 2 |1985-02-18T00:00:00.000Z 1 |1985-05-13T00:00:00.000Z @@ -1015,40 +1015,40 @@ FROM employees 1 |1985-09-16T00:00:00.000Z 2 |1985-10-14T00:00:00.000Z 4 |1985-11-18T00:00:00.000Z -//end::docsAutoBucketWeeklyHistogram-result[] +//end::docsBucketWeeklyHistogram-result[] ; -docsAutoBucketLast24hr#[skip:-8.12.99, reason:date type is supported in 8.13] -//tag::docsAutoBucketLast24hr[] +docsBucketLast24hr#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +//tag::docsBucketLast24hr[] FROM sample_data | WHERE @timestamp >= NOW() - 1 day and @timestamp < NOW() -| EVAL bucket = AUTO_BUCKET(@timestamp, 25, NOW() - 1 day, NOW()) +| EVAL bucket = BUCKET(@timestamp, 25, NOW() - 1 day, NOW()) | STATS COUNT(*) BY bucket -//end::docsAutoBucketLast24hr[] +//end::docsBucketLast24hr[] ; COUNT(*):long | bucket:date ; -docsGettingStartedAutoBucket#[skip:-8.12.99, reason:date type is supported in 8.13] -// tag::gs-auto_bucket[] +docsGettingStartedBucket#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +// tag::gs-bucket[] FROM sample_data | KEEP @timestamp -| EVAL bucket = AUTO_BUCKET(@timestamp, 24, "2023-10-23T00:00:00Z", NOW()) -// end::gs-auto_bucket[] +| EVAL bucket = BUCKET(@timestamp, 24, "2023-10-23T00:00:00Z", NOW()) +// end::gs-bucket[] | LIMIT 0 ; @timestamp:date | bucket:date ; -docsGettingStartedAutoBucketStatsBy -// tag::gs-auto_bucket-stats-by[] +docsGettingStartedBucketStatsBy#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +// tag::gs-bucket-stats-by[] FROM sample_data | KEEP @timestamp, event_duration -| EVAL bucket = AUTO_BUCKET(@timestamp, 24, "2023-10-23T00:00:00Z", "2023-10-23T23:59:59Z") +| EVAL bucket = BUCKET(@timestamp, 24, "2023-10-23T00:00:00Z", "2023-10-23T23:59:59Z") | STATS COUNT(*) BY bucket -// end::gs-auto_bucket-stats-by[] +// end::gs-bucket-stats-by[] | SORT bucket ; @@ -1057,13 +1057,13 @@ COUNT(*):long | bucket:date 5 |2023-10-23T13:00:00.000Z ; -docsGettingStartedAutoBucketStatsByMedian -// tag::gs-auto_bucket-stats-by-median[] +docsGettingStartedBucketStatsByMedian#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +// tag::gs-bucket-stats-by-median[] FROM sample_data | KEEP @timestamp, event_duration -| EVAL bucket = AUTO_BUCKET(@timestamp, 24, "2023-10-23T00:00:00Z", "2023-10-23T23:59:59Z") +| EVAL bucket = BUCKET(@timestamp, 24, "2023-10-23T00:00:00Z", "2023-10-23T23:59:59Z") | STATS median_duration = MEDIAN(event_duration) BY bucket -// end::gs-auto_bucket-stats-by-median[] +// end::gs-bucket-stats-by-median[] | SORT bucket ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec index 0882fec5ec0bf..8f8f218fd9821 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec @@ -257,10 +257,10 @@ emp_no:integer | salary_change:double | a1:double 10005 | [-2.14,13.07] | [-2.14,13.07] ; -autoBucket +bucket#[skip:-8.13.99, reason:BUCKET renamed in 8.14] FROM employees | WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z" -| EVAL bh = auto_bucket(height, 20, 1.41, 2.10) +| EVAL bh = bucket(height, 20, 1.41, 2.10) | SORT hire_date, height | KEEP hire_date, height, bh ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec index 3e1d1b19a7f67..026e3d922d00d 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec @@ -642,17 +642,17 @@ emp_no:integer | salary_change.long:long | a1:long 10005 | [-2, 13] | [-2, 13] ; -autoBucket -// tag::auto_bucket[] +bucket#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +// tag::bucket[] FROM employees | WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z" -| EVAL bs = AUTO_BUCKET(salary, 20, 25324, 74999) +| EVAL bs = BUCKET(salary, 20, 25324, 74999) | SORT hire_date, salary | KEEP hire_date, salary, bs -// end::auto_bucket[] +// end::bucket[] ; -// tag::auto_bucket-result[] +// tag::bucket-result[] hire_date:date | salary:integer | bs:double 1985-02-18T00:00:00.000Z | 66174 | 65000.0 1985-02-24T00:00:00.000Z | 26436 | 25000.0 @@ -665,19 +665,19 @@ hire_date:date | salary:integer | bs:double 1985-11-20T00:00:00.000Z | 33956 | 30000.0 1985-11-20T00:00:00.000Z | 74999 | 70000.0 1985-11-21T00:00:00.000Z | 56371 | 55000.0 -// end::auto_bucket-result[] +// end::bucket-result[] ; -docsAutoBucketNumeric -//tag::docsAutoBucketNumeric[] +docsBucketNumeric#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +//tag::docsBucketNumeric[] FROM employees -| EVAL bs = AUTO_BUCKET(salary, 20, 25324, 74999) +| EVAL bs = BUCKET(salary, 20, 25324, 74999) | STATS COUNT(*) by bs | SORT bs -//end::docsAutoBucketNumeric[] +//end::docsBucketNumeric[] ; -//tag::docsAutoBucketNumeric-result[] +//tag::docsBucketNumeric-result[] COUNT(*):long | bs:double 9 |25000.0 9 |30000.0 @@ -689,7 +689,7 @@ FROM employees 9 |60000.0 8 |65000.0 8 |70000.0 -//end::docsAutoBucketNumeric-result[] +//end::docsBucketNumeric-result[] ; cos diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec index 492da4ee5ef36..d0e18426f03ab 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -7,8 +7,8 @@ synopsis:keyword "double asin(number:double|integer|long|unsigned_long)" "double atan(number:double|integer|long|unsigned_long)" "double atan2(y_coordinate:double|integer|long|unsigned_long, x_coordinate:double|integer|long|unsigned_long)" -"double|date auto_bucket(field:integer|long|double|date, buckets:integer, from:integer|long|double|date|keyword|text, to:integer|long|double|date|keyword|text)" "double avg(number:double|integer|long)" +"double|date bucket(field:integer|long|double|date, buckets:integer, from:integer|long|double|date|keyword|text, to:integer|long|double|date|keyword|text)" "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version case(condition:boolean, trueValue...:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" "double|integer|long|unsigned_long ceil(number:double|integer|long|unsigned_long)" "boolean cidr_match(ip:ip, blockX...:keyword|text)" @@ -117,8 +117,8 @@ acos |number |"double|integer|long|unsigne asin |number |"double|integer|long|unsigned_long" |Number between -1 and 1. If `null`, the function returns `null`. atan |number |"double|integer|long|unsigned_long" |Numeric expression. If `null`, the function returns `null`. atan2 |[y_coordinate, x_coordinate] |["double|integer|long|unsigned_long", "double|integer|long|unsigned_long"] |[y coordinate. If `null`\, the function returns `null`., x coordinate. If `null`\, the function returns `null`.] -auto_bucket |[field, buckets, from, to] |["integer|long|double|date", integer, "integer|long|double|date|keyword|text", "integer|long|double|date|keyword|text"] |["", "", "", ""] avg |number |"double|integer|long" |[""] +bucket |[field, buckets, from, to] |["integer|long|double|date", integer, "integer|long|double|date|keyword|text", "integer|long|double|date|keyword|text"] |["", "", "", ""] case |[condition, trueValue] |[boolean, "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version"] |["", ""] ceil |number |"double|integer|long|unsigned_long" |Numeric expression. If `null`, the function returns `null`. cidr_match |[ip, blockX] |[ip, "keyword|text"] |[, CIDR block to test the IP against.] @@ -228,8 +228,8 @@ acos |Returns the {wikipedia}/Inverse_trigonometric_functions[arccosine asin |Returns the {wikipedia}/Inverse_trigonometric_functions[arcsine] of the input numeric expression as an angle, expressed in radians. atan |Returns the {wikipedia}/Inverse_trigonometric_functions[arctangent] of the input numeric expression as an angle, expressed in radians. atan2 |The {wikipedia}/Atan2[angle] between the positive x-axis and the ray from the origin to the point (x , y) in the Cartesian plane, expressed in radians. -auto_bucket |Creates human-friendly buckets and returns a datetime value for each row that corresponds to the resulting bucket the row falls into. avg |The average of a numeric field. +bucket |Creates human-friendly buckets and returns a datetime value for each row that corresponds to the resulting bucket the row falls into. case |Accepts pairs of conditions and values. The function returns the value that belongs to the first condition that evaluates to true. ceil |Round a number up to the nearest integer. cidr_match |Returns true if the provided IP is contained in one of the provided CIDR blocks. @@ -340,8 +340,8 @@ acos |double asin |double |false |false |false atan |double |false |false |false atan2 |double |[false, false] |false |false -auto_bucket |"double|date" |[false, false, false, false]|false |false avg |double |false |false |true +bucket |"double|date" |[false, false, false, false]|false |false case |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" |[false, false] |true |false ceil |"double|integer|long|unsigned_long" |false |false |false cidr_match |boolean |[false, false] |true |false diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec index f1a15f41af7b3..2bf9259478032 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec @@ -150,10 +150,10 @@ warning:Line 1:27: java.lang.IllegalArgumentException: single-value function enc 2017-11-10T20:21:58.000Z|154551962150890564|9382204513185396493|63 |OK ; -autoBucket +bucket#[skip:-8.13.99, reason:BUCKET renamed in 8.14] FROM ul_logs | WHERE @timestamp >= "2017-11-10T20:30:00Z" AND @timestamp < "2017-11-10T20:35:00Z" -| EVAL bh = auto_bucket(bytes_in, 20, 5480608687137202404, 17764691215469285192) +| EVAL bh = bucket(bytes_in, 20, 5480608687137202404, 17764691215469285192) | SORT @timestamp | KEEP @timestamp, bytes_in, bh ; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index a1a7c95ece2f9..62688d753aeef 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -48,7 +48,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.math.Asin; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Atan; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Atan2; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.AutoBucket; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Bucket; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Ceil; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cos; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cosh; @@ -143,7 +143,7 @@ private FunctionDefinition[][] functions() { def(Asin.class, Asin::new, "asin"), def(Atan.class, Atan::new, "atan"), def(Atan2.class, Atan2::new, "atan2"), - def(AutoBucket.class, AutoBucket::new, "auto_bucket"), + def(Bucket.class, Bucket::new, "bucket"), def(Ceil.class, Ceil::new, "ceil"), def(Cos.class, Cos::new, "cos"), def(Cosh.class, Cosh::new, "cosh"), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Bucket.java similarity index 95% rename from x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Bucket.java index ea581437f6c4f..b58a9bae08146 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Bucket.java @@ -48,13 +48,13 @@ *

    * Takes a date field and three constants and picks a bucket size based on the * constants. The constants are "target bucket count", "from", and "to". It looks like: - * {@code auto_bucket(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z")}. + * {@code bucket(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z")}. * We have a list of "human" bucket sizes like "one month" and "four hours". We pick * the largest range that covers the range in fewer than the target bucket count. So * in the above case we'll pick month long buckets, yielding 12 buckets. *

    */ -public class AutoBucket extends EsqlScalarFunction implements Validatable { +public class Bucket extends EsqlScalarFunction implements Validatable { // TODO maybe we should just cover the whole of representable dates here - like ten years, 100 years, 1000 years, all the way up. // That way you never end up with more than the target number of buckets. private static final Rounding LARGEST_HUMAN_DATE_ROUNDING = Rounding.builder(Rounding.DateTimeUnit.YEAR_OF_CENTURY).build(); @@ -86,7 +86,7 @@ public class AutoBucket extends EsqlScalarFunction implements Validatable { @FunctionInfo(returnType = { "double", "date" }, description = """ Creates human-friendly buckets and returns a datetime value for each row that corresponds to the resulting bucket the row falls into.""") - public AutoBucket( + public Bucket( Source source, @Param(name = "field", type = { "integer", "long", "double", "date" }) Expression field, @Param(name = "buckets", type = { "integer" }) Expression buckets, @@ -226,12 +226,12 @@ public DataType dataType() { @Override public Expression replaceChildren(List newChildren) { - return new AutoBucket(source(), newChildren.get(0), newChildren.get(1), newChildren.get(2), newChildren.get(3)); + return new Bucket(source(), newChildren.get(0), newChildren.get(1), newChildren.get(2), newChildren.get(3)); } @Override protected NodeInfo info() { - return NodeInfo.create(this, AutoBucket::new, field, buckets, from, to); + return NodeInfo.create(this, Bucket::new, field, buckets, from, to); } public Expression field() { @@ -252,6 +252,6 @@ public Expression to() { @Override public String toString() { - return "AutoBucket{" + "field=" + field + ", buckets=" + buckets + ", from=" + from + ", to=" + to + '}'; + return "Bucket{" + "field=" + field + ", buckets=" + buckets + ", from=" + from + ", to=" + to + '}'; } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java index 27e3c95bd123a..a0fecd731c71c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java @@ -69,7 +69,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.math.Asin; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Atan; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Atan2; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.AutoBucket; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Bucket; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Ceil; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cos; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cosh; @@ -378,7 +378,7 @@ public static List namedTypeEntries() { of(ESQL_UNARY_SCLR_CLS, Trim.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), // ScalarFunction of(ScalarFunction.class, Atan2.class, PlanNamedTypes::writeAtan2, PlanNamedTypes::readAtan2), - of(ScalarFunction.class, AutoBucket.class, PlanNamedTypes::writeAutoBucket, PlanNamedTypes::readAutoBucket), + of(ScalarFunction.class, Bucket.class, PlanNamedTypes::writeBucket, PlanNamedTypes::readBucket), of(ScalarFunction.class, Case.class, PlanNamedTypes::writeVararg, PlanNamedTypes::readVarag), of(ScalarFunction.class, CIDRMatch.class, PlanNamedTypes::writeCIDRMatch, PlanNamedTypes::readCIDRMatch), of(ScalarFunction.class, Coalesce.class, PlanNamedTypes::writeVararg, PlanNamedTypes::readVarag), @@ -1395,11 +1395,11 @@ static void writeAtan2(PlanStreamOutput out, Atan2 atan2) throws IOException { out.writeExpression(atan2.x()); } - static AutoBucket readAutoBucket(PlanStreamInput in) throws IOException { - return new AutoBucket(in.readSource(), in.readExpression(), in.readExpression(), in.readExpression(), in.readExpression()); + static Bucket readBucket(PlanStreamInput in) throws IOException { + return new Bucket(in.readSource(), in.readExpression(), in.readExpression(), in.readExpression(), in.readExpression()); } - static void writeAutoBucket(PlanStreamOutput out, AutoBucket bucket) throws IOException { + static void writeBucket(PlanStreamOutput out, Bucket bucket) throws IOException { out.writeSource(bucket.source()); out.writeExpression(bucket.field()); out.writeExpression(bucket.buckets()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucketTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/BucketTests.java similarity index 96% rename from x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucketTests.java rename to x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/BucketTests.java index 9d8cf702a375a..23122863b95f3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucketTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/BucketTests.java @@ -28,8 +28,8 @@ import static org.hamcrest.Matchers.equalTo; -public class AutoBucketTests extends AbstractFunctionTestCase { - public AutoBucketTests(@Name("TestCase") Supplier testCaseSupplier) { +public class BucketTests extends AbstractFunctionTestCase { + public BucketTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } @@ -141,7 +141,7 @@ private static Matcher dateResultsMatcher(List args) { - return new AutoBucket(source, args.get(0), args.get(1), args.get(2), args.get(3)); + return new Bucket(source, args.get(0), args.get(1), args.get(2), args.get(3)); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index a60999baba9fe..7de3308fcab16 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -3299,26 +3299,26 @@ public void testNestedExpressionsInStatsWithExpression() { assertThat(Expressions.names(fields), contains("languages + emp_no")); } - public void testLogicalPlanOptimizerVerifier() { + public void testBucketAcceptsEvalLiteralReferences() { var plan = plan(""" from test | eval bucket_start = 1, bucket_end = 100000 - | eval auto_bucket(salary, 10, bucket_start, bucket_end) + | eval bucket(salary, 10, bucket_start, bucket_end) """); var ab = as(plan, Eval.class); assertTrue(ab.optimized()); } - public void testLogicalPlanOptimizerVerificationException() { + public void testBucketFailsOnFieldArgument() { VerificationException e = expectThrows(VerificationException.class, () -> plan(""" from test | eval bucket_end = 100000 - | eval auto_bucket(salary, 10, emp_no, bucket_end) + | eval bucket(salary, 10, emp_no, bucket_end) """)); assertTrue(e.getMessage().startsWith("Found ")); final String header = "Found 1 problem\nline "; assertEquals( - "3:32: third argument of [auto_bucket(salary, 10, emp_no, bucket_end)] must be a constant, received [emp_no]", + "3:27: third argument of [bucket(salary, 10, emp_no, bucket_end)] must be a constant, received [emp_no]", e.getMessage().substring(header.length()) ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java index 17ad5eb8b9f3d..ff6c60310fd87 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java @@ -212,11 +212,11 @@ public void testIn2() { | limit 4""", Set.of("hire_date", "hire_date.*", "birth_date", "birth_date.*")); } - public void testAutoBucketMonth() { + public void testBucketMonth() { assertFieldNames(""" from employees | where hire_date >= "1985-01-01T00:00:00Z" and hire_date < "1986-01-01T00:00:00Z" - | eval hd = auto_bucket(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") + | eval hd = bucket(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") | sort hire_date | keep hire_date, hd""", Set.of("hire_date", "hire_date.*")); } @@ -228,11 +228,11 @@ public void testBorn_before_today() { ); } - public void testAutoBucketMonthInAgg() { + public void testBucketMonthInAgg() { assertFieldNames(""" FROM employees | WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z" - | EVAL bucket = AUTO_BUCKET(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") + | EVAL bucket = BUCKET(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") | STATS AVG(salary) BY bucket | SORT bucket""", Set.of("salary", "salary.*", "hire_date", "hire_date.*")); } @@ -554,11 +554,11 @@ public void testConvertFromDatetime() { ); } - public void testAutoBucket() { + public void testBucket() { assertFieldNames(""" FROM employees | WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z" - | EVAL bh = auto_bucket(height, 20, 1.41, 2.10) + | EVAL bh = bucket(height, 20, 1.41, 2.10) | SORT hire_date | KEEP hire_date, height, bh""", Set.of("hire_date", "hire_date.*", "height", "height.*")); } From ceeee1bf563db4115a197bee76784572d8ba040d Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 10 Apr 2024 11:39:38 +0100 Subject: [PATCH 125/173] Recommend NFS over S3-like repositories (#107297) Allegedly-S3-compatible APIs are very popular these days, but many third-party systems offering such an API also support a shared filesystem interface. Shared filesystem protocols such as NFS are much better specified than the S3 API, and experience shows that they lead to fewer compatibility headaches. This commit adds a recommendation to the `repository-s3` docs to consider such an interface instead. --- .../snapshot-restore/repository-s3.asciidoc | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/docs/reference/snapshot-restore/repository-s3.asciidoc b/docs/reference/snapshot-restore/repository-s3.asciidoc index 0c79793ee6c5a..11324639cb2f0 100644 --- a/docs/reference/snapshot-restore/repository-s3.asciidoc +++ b/docs/reference/snapshot-restore/repository-s3.asciidoc @@ -541,13 +541,17 @@ MinIO-backed repositories as well as repositories stored on AWS S3. Other S3-compatible storage systems may also work with {es}, but these are not covered by the {es} test suite. -Note that some storage systems claim to be S3-compatible but do not faithfully -emulate S3's behaviour in full. The `repository-s3` type requires full -compatibility with S3. In particular it must support the same set of API -endpoints, return the same errors in case of failures, and offer consistency and -performance at least as good as S3 even when accessed concurrently by multiple -nodes. You will need to work with the supplier of your storage system to address -any incompatibilities you encounter. Please do not report {es} issues involving +There are many systems, including some from very well-known storage vendors, +which claim to offer an S3-compatible API despite failing to emulate S3's +behaviour in full. If you are using such a system for your snapshots, consider +using a <> based +on a standardized protocol such as NFS to access your storage system instead. +The `repository-s3` type requires full compatibility with S3. In particular it +must support the same set of API endpoints, with the same parameters, return +the same errors in case of failures, and offer consistency and performance at +least as good as S3 even when accessed concurrently by multiple nodes. You will +need to work with the supplier of your storage system to address any +incompatibilities you encounter. Please do not report {es} issues involving storage systems which claim to be S3-compatible unless you can demonstrate that the same issue exists when using a genuine AWS S3 repository. From 8bcbc971288bc14c9aeee9d0ea2424ebb55dc572 Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Wed, 10 Apr 2024 12:50:12 +0200 Subject: [PATCH 126/173] Rename generated docs for (renamed) BUCKET func (#107299) This checks in the generated-by-test doc files for newly renamed BUCKET function. --- .../kibana/definition/{auto_bucket.json => bucket.json} | 2 +- .../esql/functions/kibana/docs/{auto_bucket.md => bucket.md} | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) rename docs/reference/esql/functions/kibana/definition/{auto_bucket.json => bucket.json} (99%) rename docs/reference/esql/functions/kibana/docs/{auto_bucket.md => bucket.md} (94%) diff --git a/docs/reference/esql/functions/kibana/definition/auto_bucket.json b/docs/reference/esql/functions/kibana/definition/bucket.json similarity index 99% rename from docs/reference/esql/functions/kibana/definition/auto_bucket.json rename to docs/reference/esql/functions/kibana/definition/bucket.json index 96940e5f051f2..dda3f384424b4 100644 --- a/docs/reference/esql/functions/kibana/definition/auto_bucket.json +++ b/docs/reference/esql/functions/kibana/definition/bucket.json @@ -1,7 +1,7 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "eval", - "name" : "auto_bucket", + "name" : "bucket", "description" : "Creates human-friendly buckets and returns a datetime value\nfor each row that corresponds to the resulting bucket the row falls into.", "signatures" : [ { diff --git a/docs/reference/esql/functions/kibana/docs/auto_bucket.md b/docs/reference/esql/functions/kibana/docs/bucket.md similarity index 94% rename from docs/reference/esql/functions/kibana/docs/auto_bucket.md rename to docs/reference/esql/functions/kibana/docs/bucket.md index df3999f968486..6ebfe7de5527d 100644 --- a/docs/reference/esql/functions/kibana/docs/auto_bucket.md +++ b/docs/reference/esql/functions/kibana/docs/bucket.md @@ -2,7 +2,7 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. --> -### AUTO_BUCKET +### BUCKET Creates human-friendly buckets and returns a datetime value for each row that corresponds to the resulting bucket the row falls into. From 84d61579c164df39a7cfb70be19775bd95a93c94 Mon Sep 17 00:00:00 2001 From: Przemyslaw Gomulka Date: Wed, 10 Apr 2024 13:04:40 +0200 Subject: [PATCH 127/173] Do not report document metering on system indices (#107041) For system indices we don't want to emit metrics. DocumentSizeReporter will be created given an index. It will internally contain a SystemIndices instance that will verify the indexName with isSystemName --- docs/changelog/107041.yaml | 6 ++++++ .../internal/DocumentSizeObserverWithPipelinesIT.java | 2 +- .../plugins/internal/DocumentSizeObserverIT.java | 2 +- .../elasticsearch/action/bulk/TransportShardBulkAction.java | 2 +- .../plugins/internal/DocumentParsingProvider.java | 4 ++-- .../java/org/elasticsearch/ingest/IngestServiceTests.java | 2 +- 6 files changed, 12 insertions(+), 6 deletions(-) create mode 100644 docs/changelog/107041.yaml diff --git a/docs/changelog/107041.yaml b/docs/changelog/107041.yaml new file mode 100644 index 0000000000000..b8b4f3d7c5690 --- /dev/null +++ b/docs/changelog/107041.yaml @@ -0,0 +1,6 @@ +pr: 107041 +summary: '`DocumentParsingObserver` to accept an `indexName` to allow skipping system + indices' +area: Infra/Metrics +type: enhancement +issues: [] diff --git a/modules/ingest-common/src/internalClusterTest/java/org/elasticsearch/plugins/internal/DocumentSizeObserverWithPipelinesIT.java b/modules/ingest-common/src/internalClusterTest/java/org/elasticsearch/plugins/internal/DocumentSizeObserverWithPipelinesIT.java index d1cdc719b02f1..49ab73e8d2375 100644 --- a/modules/ingest-common/src/internalClusterTest/java/org/elasticsearch/plugins/internal/DocumentSizeObserverWithPipelinesIT.java +++ b/modules/ingest-common/src/internalClusterTest/java/org/elasticsearch/plugins/internal/DocumentSizeObserverWithPipelinesIT.java @@ -92,7 +92,7 @@ public DocumentSizeObserver newDocumentSizeObserver() { } @Override - public DocumentSizeReporter getDocumentParsingReporter() { + public DocumentSizeReporter getDocumentParsingReporter(String indexName) { return new TestDocumentSizeReporter(); } }; diff --git a/server/src/internalClusterTest/java/org/elasticsearch/plugins/internal/DocumentSizeObserverIT.java b/server/src/internalClusterTest/java/org/elasticsearch/plugins/internal/DocumentSizeObserverIT.java index fd6151e8eadde..edf6973849bad 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/plugins/internal/DocumentSizeObserverIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/plugins/internal/DocumentSizeObserverIT.java @@ -86,7 +86,7 @@ public DocumentSizeObserver newDocumentSizeObserver() { } @Override - public DocumentSizeReporter getDocumentParsingReporter() { + public DocumentSizeReporter getDocumentParsingReporter(String indexName) { return new TestDocumentSizeReporter(); } }; diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java index 265719b4738c0..39de11d39bc34 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java @@ -487,7 +487,7 @@ private static void onComplete( final BulkItemResponse executionResult = context.getExecutionResult(); final boolean isFailed = executionResult.isFailed(); if (isFailed == false && opType != DocWriteRequest.OpType.DELETE) { - DocumentSizeReporter documentSizeReporter = documentParsingProvider.getDocumentParsingReporter(); + DocumentSizeReporter documentSizeReporter = documentParsingProvider.getDocumentParsingReporter(docWriteRequest.index()); DocumentSizeObserver documentSizeObserver = context.getDocumentSizeObserver(); documentSizeReporter.onCompleted(docWriteRequest.index(), documentSizeObserver.normalisedBytesParsed()); } diff --git a/server/src/main/java/org/elasticsearch/plugins/internal/DocumentParsingProvider.java b/server/src/main/java/org/elasticsearch/plugins/internal/DocumentParsingProvider.java index 946cd97968e22..329f3d704e50b 100644 --- a/server/src/main/java/org/elasticsearch/plugins/internal/DocumentParsingProvider.java +++ b/server/src/main/java/org/elasticsearch/plugins/internal/DocumentParsingProvider.java @@ -19,7 +19,7 @@ public DocumentSizeObserver newDocumentSizeObserver() { } @Override - public DocumentSizeReporter getDocumentParsingReporter() { + public DocumentSizeReporter getDocumentParsingReporter(String indexName) { return DocumentSizeReporter.EMPTY_INSTANCE; } @@ -42,6 +42,6 @@ public DocumentSizeObserver newFixedSizeDocumentObserver(long normalisedBytesPar /** * @return an instance of a reporter to use when parsing has been completed and indexing successful */ - DocumentSizeReporter getDocumentParsingReporter(); + DocumentSizeReporter getDocumentParsingReporter(String indexName); } diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java index 084eb94852524..41e865ceb97fb 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java @@ -1206,7 +1206,7 @@ public long normalisedBytesParsed() { } @Override - public DocumentSizeReporter getDocumentParsingReporter() { + public DocumentSizeReporter getDocumentParsingReporter(String indexName) { return null; } From b3bcc81ca6fafd505ac7b9a168017f59c0a3477b Mon Sep 17 00:00:00 2001 From: Pat Whelan Date: Wed, 10 Apr 2024 08:28:40 -0400 Subject: [PATCH 128/173] [Transform] Release test resources (#107057) Consume the HttpEntity after the API response is parsed, releasing network and thread resources back to their respective pools. Leaving them unconsumed does not appear to be causing issues during tests, but it does log a large amount of hanging threads on test failure, making it harder to spot what may be the issue when a thread is hanging during a transform test. Close #107055 --- .../test/rest/ESRestTestCase.java | 29 ++++++++-- .../common/TransformCommonRestTestCase.java | 2 +- .../integration/TransformChainIT.java | 2 +- .../integration/TransformRestTestCase.java | 54 +++++++++---------- 4 files changed, 53 insertions(+), 34 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 29b74478bec6b..4c1980fb1f673 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -157,11 +157,16 @@ public abstract class ESRestTestCase extends ESTestCase { /** * Convert the entity from a {@link Response} into a map of maps. + * Consumes the underlying HttpEntity, releasing any resources it may be holding. */ public static Map entityAsMap(Response response) throws IOException { return entityAsMap(response.getEntity()); } + /** + * Convert the entity from a {@link Response} into a map of maps. + * Consumes the underlying HttpEntity, releasing any resources it may be holding. + */ public static Map entityAsMap(HttpEntity entity) throws IOException { XContentType xContentType = XContentType.fromMediaType(entity.getContentType().getValue()); // EMPTY and THROW are fine here because `.map` doesn't use named x content or deprecation @@ -174,11 +179,14 @@ public static Map entityAsMap(HttpEntity entity) throws IOExcept ) ) { return parser.map(); + } finally { + EntityUtils.consumeQuietly(entity); } } /** * Convert the entity from a {@link Response} into a list of maps. + * Consumes the underlying HttpEntity, releasing any resources it may be holding. */ public static List entityAsList(Response response) throws IOException { XContentType xContentType = XContentType.fromMediaType(response.getEntity().getContentType().getValue()); @@ -192,6 +200,8 @@ public static List entityAsList(Response response) throws IOException { ) ) { return parser.list(); + } finally { + EntityUtils.consumeQuietly(response.getEntity()); } } @@ -1603,6 +1613,14 @@ public static Response assertOK(Response response) { return response; } + public static void assertOKAndConsume(Response response) { + try { + assertOK(response); + } finally { + EntityUtils.consumeQuietly(response.getEntity()); + } + } + public static ObjectPath assertOKAndCreateObjectPath(Response response) throws IOException { assertOK(response); return ObjectPath.createFromResponse(response); @@ -1622,9 +1640,14 @@ public static void assertDocCount(RestClient client, String indexName, long docC } public static void assertAcknowledged(Response response) throws IOException { - assertOK(response); - String jsonBody = EntityUtils.toString(response.getEntity()); - assertThat(jsonBody, containsString("\"acknowledged\":true")); + try { + assertOK(response); + String jsonBody = EntityUtils.toString(response.getEntity()); + assertThat(jsonBody, containsString("\"acknowledged\":true")); + } finally { + // if assertOK throws an exception, still release resources + EntityUtils.consumeQuietly(response.getEntity()); + } } /** diff --git a/x-pack/plugin/transform/qa/common/src/main/java/org/elasticsearch/xpack/transform/integration/common/TransformCommonRestTestCase.java b/x-pack/plugin/transform/qa/common/src/main/java/org/elasticsearch/xpack/transform/integration/common/TransformCommonRestTestCase.java index 486dd7c581032..98cf817d6c018 100644 --- a/x-pack/plugin/transform/qa/common/src/main/java/org/elasticsearch/xpack/transform/integration/common/TransformCommonRestTestCase.java +++ b/x-pack/plugin/transform/qa/common/src/main/java/org/elasticsearch/xpack/transform/integration/common/TransformCommonRestTestCase.java @@ -122,6 +122,6 @@ protected void logAudits() throws Exception { protected void refreshIndex(String index) throws IOException { Request refreshRequest = new Request("POST", index + "/_refresh"); - assertOK(adminClient().performRequest(refreshRequest)); + assertOKAndConsume(adminClient().performRequest(refreshRequest)); } } diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformChainIT.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformChainIT.java index 600ceb3cd8202..4d9a9e7705052 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformChainIT.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformChainIT.java @@ -188,7 +188,7 @@ private void testChainedTransforms(final int numTransforms) throws Exception { assertFalse(aliasExists(destWriteAlias)); String transformConfig = createTransformConfig(sourceIndex, destIndex, destReadAlias, destWriteAlias); - assertAcknowledged(putTransform(transformId, transformConfig, true, RequestOptions.DEFAULT)); + putTransform(transformId, transformConfig, true, RequestOptions.DEFAULT); } List transformIdsShuffled = new ArrayList<>(transformIds); diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java index 4cc9a31c8eff5..4b7e478dbb61d 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java @@ -10,6 +10,7 @@ import org.apache.http.client.methods.HttpGet; import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; +import org.apache.http.util.EntityUtils; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; @@ -168,15 +169,15 @@ protected void deleteTransform(String id, boolean force) throws IOException { if (force) { request.addParameter(TransformField.FORCE.getPreferredName(), "true"); } - assertOK(adminClient().performRequest(request)); + assertAcknowledged(adminClient().performRequest(request)); createdTransformIds.remove(id); } - protected Response putTransform(String id, String config, RequestOptions options) throws IOException { - return putTransform(id, config, false, options); + protected void putTransform(String id, String config, RequestOptions options) throws IOException { + putTransform(id, config, false, options); } - protected Response putTransform(String id, String config, boolean deferValidation, RequestOptions options) throws IOException { + protected void putTransform(String id, String config, boolean deferValidation, RequestOptions options) throws IOException { if (createdTransformIds.contains(id)) { throw new IllegalArgumentException("transform [" + id + "] is already registered"); } @@ -187,9 +188,8 @@ protected Response putTransform(String id, String config, boolean deferValidatio request.addParameter("defer_validation", "true"); } request.setOptions(options); - Response response = assertOK(client().performRequest(request)); + assertAcknowledged(client().performRequest(request)); createdTransformIds.add(id); - return response; } protected Map previewTransform(String transformConfig, RequestOptions options) throws IOException { @@ -214,8 +214,7 @@ protected Map getBasicTransformStats(String id) throws IOExcepti var request = new Request("GET", TRANSFORM_ENDPOINT + id + "/_stats"); request.addParameter(BASIC_STATS.getPreferredName(), "true"); request.setOptions(RequestOptions.DEFAULT); - Response response = client().performRequest(request); - List> stats = (List>) XContentMapValues.extractValue("transforms", entityAsMap(response)); + var stats = (List>) XContentMapValues.extractValue("transforms", entityAsMap(client().performRequest(request))); assertThat(stats, hasSize(1)); return stats.get(0); } @@ -226,11 +225,10 @@ protected String getTransformState(String id) throws IOException { @SuppressWarnings("unchecked") protected Map getTransform(String id) throws IOException { - Request request = new Request("GET", TRANSFORM_ENDPOINT + id); - Response response = client().performRequest(request); - List> transformConfigs = (List>) XContentMapValues.extractValue( + var request = new Request("GET", TRANSFORM_ENDPOINT + id); + var transformConfigs = (List>) XContentMapValues.extractValue( "transforms", - entityAsMap(response) + entityAsMap(client().performRequest(request)) ); assertThat(transformConfigs, hasSize(1)); return transformConfigs.get(0); @@ -257,14 +255,6 @@ protected long getCheckpoint(Map stats) { return ((Integer) XContentMapValues.extractValue("checkpointing.last.checkpoint", stats)).longValue(); } - protected DateHistogramGroupSource createDateHistogramGroupSourceWithFixedInterval( - String field, - DateHistogramInterval interval, - ZoneId zone - ) { - return new DateHistogramGroupSource(field, null, false, new DateHistogramGroupSource.FixedInterval(interval), zone, null); - } - protected DateHistogramGroupSource createDateHistogramGroupSourceWithCalendarInterval( String field, DateHistogramInterval interval, @@ -357,7 +347,7 @@ protected TransformConfig.Builder createTransformConfigBuilder( String destinationIndex, QueryConfig queryConfig, String... sourceIndices - ) throws Exception { + ) { return TransformConfig.builder() .setId(id) .setSource(new SourceConfig(sourceIndices, queryConfig, Collections.emptyMap())) @@ -377,7 +367,7 @@ protected void updateConfig(String id, String update, boolean deferValidation, R } updateRequest.setJsonEntity(update); updateRequest.setOptions(options); - assertOK(client().performRequest(updateRequest)); + assertOKAndConsume(client().performRequest(updateRequest)); } protected void createReviewsIndex( @@ -447,7 +437,7 @@ protected void createReviewsIndex( Request req = new Request("PUT", indexName); req.setEntity(indexMappings); req.setOptions(RequestOptions.DEFAULT); - assertOK(adminClient().performRequest(req)); + assertAcknowledged(adminClient().performRequest(req)); } // create index @@ -489,9 +479,12 @@ protected void doBulk(String bulkDocuments, boolean refresh) throws IOException bulkRequest.setJsonEntity(bulkDocuments); bulkRequest.setOptions(RequestOptions.DEFAULT); Response bulkResponse = adminClient().performRequest(bulkRequest); - assertOK(bulkResponse); - var bulkMap = entityAsMap(bulkResponse); - assertThat((boolean) bulkMap.get("errors"), is(equalTo(false))); + try { + var bulkMap = entityAsMap(assertOK(bulkResponse)); + assertThat((boolean) bulkMap.get("errors"), is(equalTo(false))); + } finally { + EntityUtils.consumeQuietly(bulkResponse.getEntity()); + } } protected Map matchAllSearch(String index, int size, RequestOptions options) throws IOException { @@ -499,8 +492,11 @@ protected Map matchAllSearch(String index, int size, RequestOpti request.addParameter("size", Integer.toString(size)); request.setOptions(options); Response response = client().performRequest(request); - assertOK(response); - return entityAsMap(response); + try { + return entityAsMap(assertOK(response)); + } finally { + EntityUtils.consumeQuietly(response.getEntity()); + } } private void waitForPendingTasks() { @@ -515,7 +511,7 @@ private void waitForPendingTasks() { ); request.addParameters(parameters); try { - adminClient().performRequest(request); + EntityUtils.consumeQuietly(adminClient().performRequest(request).getEntity()); } catch (Exception e) { throw new AssertionError("Failed to wait for pending tasks to complete", e); } From 8638deeb9e5c14f1fb1f0fddbcee76e3ec0c342d Mon Sep 17 00:00:00 2001 From: Jonathan Buttner <56361221+jonathan-buttner@users.noreply.github.com> Date: Wed, 10 Apr 2024 08:37:49 -0400 Subject: [PATCH 129/173] Openai model_id is required (#107286) --- docs/reference/inference/put-inference.asciidoc | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/docs/reference/inference/put-inference.asciidoc b/docs/reference/inference/put-inference.asciidoc index 6df1993175a0d..7d0ede82f70fa 100644 --- a/docs/reference/inference/put-inference.asciidoc +++ b/docs/reference/inference/put-inference.asciidoc @@ -165,7 +165,7 @@ want to use a different API key, delete the {infer} model and recreate it with the same name and the updated API key. `model_id`::: -(Optional, string) +(Required, string) The name of the model to use for the {infer} task. Refer to the https://platform.openai.com/docs/guides/embeddings/what-are-embeddings[OpenAI documentation] for the list of available text embedding models. @@ -431,4 +431,3 @@ PUT _inference/completion/openai_completion } ------------------------------------------------------------ // TEST[skip:TBD] - From 943885d0cd180be0436a9191181ebb768d5d4e05 Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Wed, 10 Apr 2024 15:12:20 +0200 Subject: [PATCH 130/173] [DOCS][ESQL] Render locate function docs (#107305) --- docs/reference/esql/functions/string-functions.asciidoc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/reference/esql/functions/string-functions.asciidoc b/docs/reference/esql/functions/string-functions.asciidoc index b568ae1061bb5..273c508fc6f63 100644 --- a/docs/reference/esql/functions/string-functions.asciidoc +++ b/docs/reference/esql/functions/string-functions.asciidoc @@ -11,6 +11,7 @@ * <> * <> * <> +* <> * <> * <> * <> @@ -25,6 +26,7 @@ include::concat.asciidoc[] include::layout/left.asciidoc[] include::length.asciidoc[] +include::layout/locate.asciidoc[] include::ltrim.asciidoc[] include::replace.asciidoc[] include::right.asciidoc[] From 19e9fc32f2d449e77e905aba6ca03c6351b3bbdc Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Wed, 10 Apr 2024 16:13:21 +0200 Subject: [PATCH 131/173] ES|QL: regex warnings in csv-spec tests (#107273) --- .../xpack/esql/ccq/MultiClustersIT.java | 6 +- .../xpack/esql/qa/rest/EsqlSpecTestCase.java | 17 ++++-- .../esql/qa/rest/RestEnrichTestCase.java | 8 +-- .../xpack/esql/qa/rest/RestEsqlTestCase.java | 57 ++++++++++++------- .../xpack/esql/EsqlTestUtils.java | 14 +++++ .../testFixtures/src/main/resources/README.md | 43 +++++++++++++- .../src/main/resources/ip.csv-spec | 21 +++++++ .../elasticsearch/xpack/esql/CsvTests.java | 4 +- .../elasticsearch/xpack/ql/CsvSpecReader.java | 14 +++++ 9 files changed, 148 insertions(+), 36 deletions(-) diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java index 9a494f6309997..2f681fc23bf31 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java @@ -133,14 +133,12 @@ protected boolean supportsAsync() { private Map runEsql(RestEsqlTestCase.RequestObjectBuilder requestObject) throws IOException { if (supportsAsync()) { - return RestEsqlTestCase.runEsqlAsync(requestObject, NO_WARNINGS); + return RestEsqlTestCase.runEsqlAsync(requestObject); } else { - return RestEsqlTestCase.runEsqlSync(requestObject, NO_WARNINGS); + return RestEsqlTestCase.runEsqlSync(requestObject); } } - private static final List NO_WARNINGS = List.of(); - public void testCount() throws Exception { { Map result = run("FROM test-local-index,*:test-remote-index | STATS c = COUNT(*)"); diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java index 349954450904d..4d8770a6ff112 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java @@ -35,6 +35,7 @@ import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.regex.Pattern; import static org.apache.lucene.geo.GeoEncodingUtils.decodeLatitude; import static org.apache.lucene.geo.GeoEncodingUtils.decodeLongitude; @@ -143,7 +144,11 @@ protected void shouldSkipTest(String testName) throws IOException { protected final void doTest() throws Throwable { RequestObjectBuilder builder = new RequestObjectBuilder(randomFrom(XContentType.values())); - Map answer = runEsql(builder.query(testCase.query), testCase.expectedWarnings(false)); + Map answer = runEsql( + builder.query(testCase.query), + testCase.expectedWarnings(false), + testCase.expectedWarningsRegex() + ); var expectedColumnsWithValues = loadCsvSpecValues(testCase.expectedResults); var metadata = answer.get("columns"); @@ -160,12 +165,16 @@ protected final void doTest() throws Throwable { assertResults(expectedColumnsWithValues, actualColumns, actualValues, testCase.ignoreOrder, logger); } - private Map runEsql(RequestObjectBuilder requestObject, List expectedWarnings) throws IOException { + private Map runEsql( + RequestObjectBuilder requestObject, + List expectedWarnings, + List expectedWarningsRegex + ) throws IOException { if (mode == Mode.ASYNC) { assert supportsAsync(); - return RestEsqlTestCase.runEsqlAsync(requestObject, expectedWarnings); + return RestEsqlTestCase.runEsqlAsync(requestObject, expectedWarnings, expectedWarningsRegex); } else { - return RestEsqlTestCase.runEsqlSync(requestObject, expectedWarnings); + return RestEsqlTestCase.runEsqlSync(requestObject, expectedWarnings, expectedWarningsRegex); } } diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java index e04435b715c99..a670b11c61780 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java @@ -144,7 +144,7 @@ public void wipeTestData() throws IOException { public void testNonExistentEnrichPolicy() throws IOException { ResponseException re = expectThrows( ResponseException.class, - () -> RestEsqlTestCase.runEsqlSync(new RestEsqlTestCase.RequestObjectBuilder().query("from test | enrich countris"), List.of()) + () -> RestEsqlTestCase.runEsqlSync(new RestEsqlTestCase.RequestObjectBuilder().query("from test | enrich countris")) ); assertThat( EntityUtils.toString(re.getResponse().getEntity()), @@ -188,14 +188,12 @@ public void testMatchField_ImplicitFieldsList_WithStats() throws IOException { private Map runEsql(RestEsqlTestCase.RequestObjectBuilder requestObject) throws IOException { if (mode == Mode.ASYNC) { - return RestEsqlTestCase.runEsqlAsync(requestObject, NO_WARNINGS); + return RestEsqlTestCase.runEsqlAsync(requestObject); } else { - return RestEsqlTestCase.runEsqlSync(requestObject, NO_WARNINGS); + return RestEsqlTestCase.runEsqlSync(requestObject); } } - private static final List NO_WARNINGS = List.of(); - @Override protected boolean preserveClusterUponCompletion() { return true; diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java index 5aa48234cb11a..a2296168c5fc0 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java @@ -50,10 +50,10 @@ import java.util.Map; import java.util.Set; import java.util.function.IntFunction; +import java.util.regex.Pattern; import static java.util.Collections.emptySet; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; -import static org.elasticsearch.test.ListMatcher.matchesList; import static org.elasticsearch.test.MapMatcher.assertMap; import static org.elasticsearch.test.MapMatcher.matchesMap; import static org.elasticsearch.xpack.esql.EsqlTestUtils.as; @@ -76,6 +76,7 @@ public abstract class RestEsqlTestCase extends ESRestTestCase { private static final Logger LOGGER = LogManager.getLogger(RestEsqlTestCase.class); private static final List NO_WARNINGS = List.of(); + private static final List NO_WARNINGS_REGEX = List.of(); private static final String MAPPING_ALL_TYPES; @@ -393,7 +394,7 @@ public void testCSVNoHeaderMode() throws IOException { options.addHeader("Content-Type", mediaType); options.addHeader("Accept", "text/csv; header=absent"); request.setOptions(options); - HttpEntity entity = performRequest(request, List.of()); + HttpEntity entity = performRequest(request, NO_WARNINGS, NO_WARNINGS_REGEX); String actual = Streams.copyToString(new InputStreamReader(entity.getContent(), StandardCharsets.UTF_8)); assertEquals("keyword0,0\r\n", actual); } @@ -452,7 +453,7 @@ public void testOutOfRangeComparisons() throws IOException { "Line 1:29: evaluation of [" + comparison + "] failed, treating result as null. Only first 20 failures recorded.", "Line 1:29: java.lang.IllegalArgumentException: single-value function encountered multi-value" ); - var result = runEsql(query, expectedWarnings, mode); + var result = runEsql(query, expectedWarnings, NO_WARNINGS_REGEX, mode); var values = as(result.get("values"), ArrayList.class); assertThat( @@ -661,22 +662,35 @@ private static String expectedTextBody(String format, int count, @Nullable Chara } public Map runEsql(RequestObjectBuilder requestObject) throws IOException { - return runEsql(requestObject, NO_WARNINGS, mode); + return runEsql(requestObject, NO_WARNINGS, NO_WARNINGS_REGEX, mode); } public static Map runEsqlSync(RequestObjectBuilder requestObject) throws IOException { - return runEsqlSync(requestObject, NO_WARNINGS); + return runEsqlSync(requestObject, NO_WARNINGS, NO_WARNINGS_REGEX); } - static Map runEsql(RequestObjectBuilder requestObject, List expectedWarnings, Mode mode) throws IOException { + public static Map runEsqlAsync(RequestObjectBuilder requestObject) throws IOException { + return runEsqlAsync(requestObject, NO_WARNINGS, NO_WARNINGS_REGEX); + } + + static Map runEsql( + RequestObjectBuilder requestObject, + List expectedWarnings, + List expectedWarningsRegex, + Mode mode + ) throws IOException { if (mode == ASYNC) { - return runEsqlAsync(requestObject, expectedWarnings); + return runEsqlAsync(requestObject, expectedWarnings, expectedWarningsRegex); } else { - return runEsqlSync(requestObject, expectedWarnings); + return runEsqlSync(requestObject, expectedWarnings, expectedWarningsRegex); } } - public static Map runEsqlSync(RequestObjectBuilder requestObject, List expectedWarnings) throws IOException { + public static Map runEsqlSync( + RequestObjectBuilder requestObject, + List expectedWarnings, + List expectedWarningsRegex + ) throws IOException { requestObject.build(); Request request = prepareRequest(SYNC); String mediaType = attachBody(requestObject, request); @@ -692,11 +706,15 @@ public static Map runEsqlSync(RequestObjectBuilder requestObject } request.setOptions(options); - HttpEntity entity = performRequest(request, expectedWarnings); + HttpEntity entity = performRequest(request, expectedWarnings, expectedWarningsRegex); return entityToMap(entity, requestObject.contentType()); } - public static Map runEsqlAsync(RequestObjectBuilder requestObject, List expectedWarnings) throws IOException { + public static Map runEsqlAsync( + RequestObjectBuilder requestObject, + List expectedWarnings, + List expectedWarningsRegex + ) throws IOException { addAsyncParameters(requestObject); requestObject.build(); Request request = prepareRequest(ASYNC); @@ -730,7 +748,7 @@ public static Map runEsqlAsync(RequestObjectBuilder requestObjec // no id returned from an async call, must have completed immediately and without keep_on_completion assertThat(requestObject.keepOnCompletion(), either(nullValue()).or(is(false))); assertThat((boolean) json.get("is_running"), is(false)); - assertWarnings(response, expectedWarnings); + assertWarnings(response, expectedWarnings, expectedWarningsRegex); json.remove("is_running"); // remove this to not mess up later map assertions return Collections.unmodifiableMap(json); } else { @@ -739,7 +757,7 @@ public static Map runEsqlAsync(RequestObjectBuilder requestObjec if ((boolean) json.get("is_running") == false) { // must have completed immediately so keep_on_completion must be true assertThat(requestObject.keepOnCompletion(), is(true)); - assertWarnings(response, expectedWarnings); + assertWarnings(response, expectedWarnings, expectedWarningsRegex); // we already have the results, but let's remember them so that we can compare to async get initialColumns = json.get("columns"); initialValues = json.get("values"); @@ -763,7 +781,7 @@ public static Map runEsqlAsync(RequestObjectBuilder requestObjec assertEquals(initialValues, result.get("values")); } - assertWarnings(response, expectedWarnings); + assertWarnings(response, expectedWarnings, expectedWarningsRegex); assertDeletable(id); return removeAsyncProperties(result); } @@ -837,7 +855,7 @@ static String runEsqlAsTextWithFormat(RequestObjectBuilder builder, String forma } request.setOptions(options); - HttpEntity entity = performRequest(request, List.of()); + HttpEntity entity = performRequest(request, NO_WARNINGS, NO_WARNINGS_REGEX); return Streams.copyToString(new InputStreamReader(entity.getContent(), StandardCharsets.UTF_8)); } @@ -870,8 +888,9 @@ private static String attachBody(RequestObjectBuilder requestObject, Request req return mediaType; } - private static HttpEntity performRequest(Request request, List allowedWarnings) throws IOException { - return assertWarnings(performRequest(request), allowedWarnings); + private static HttpEntity performRequest(Request request, List allowedWarnings, List allowedWarningsRegex) + throws IOException { + return assertWarnings(performRequest(request), allowedWarnings, allowedWarningsRegex); } private static Response performRequest(Request request) throws IOException { @@ -884,13 +903,13 @@ private static Response performRequest(Request request) throws IOException { return response; } - private static HttpEntity assertWarnings(Response response, List allowedWarnings) { + private static HttpEntity assertWarnings(Response response, List allowedWarnings, List allowedWarningsRegex) { List warnings = new ArrayList<>(response.getWarnings()); warnings.removeAll(mutedWarnings()); if (shouldLog()) { LOGGER.info("RESPONSE warnings (after muted)={}", warnings); } - assertMap(warnings, matchesList(allowedWarnings)); + EsqlTestUtils.assertWarnings(warnings, allowedWarnings, allowedWarningsRegex); return response.getEntity(); } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java index fc8f80a19f09f..e6470e0eb2d05 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java @@ -43,11 +43,15 @@ import java.util.Locale; import java.util.Map; import java.util.Set; +import java.util.regex.Pattern; import static java.util.Collections.emptyList; import static org.elasticsearch.test.ESTestCase.randomBoolean; +import static org.elasticsearch.test.ListMatcher.matchesList; +import static org.elasticsearch.test.MapMatcher.assertMap; import static org.elasticsearch.xpack.ql.TestUtils.of; import static org.hamcrest.Matchers.instanceOf; +import static org.junit.Assert.assertTrue; public final class EsqlTestUtils { @@ -244,4 +248,14 @@ public static String randomEnrichCommand(String name, Enrich.Mode mode, String m all.addAll(after); return String.join(" | ", all); } + + public static void assertWarnings(List warnings, List allowedWarnings, List allowedWarningsRegex) { + if (allowedWarningsRegex.isEmpty()) { + assertMap(warnings.stream().sorted().toList(), matchesList(allowedWarnings.stream().sorted().toList())); + } else { + for (String warning : warnings) { + assertTrue("Unexpected warning: " + warning, allowedWarningsRegex.stream().anyMatch(x -> x.matcher(warning).matches())); + } + } + } } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/README.md b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/README.md index fdd52c6aac229..dad5ae2828174 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/README.md +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/README.md @@ -102,7 +102,7 @@ include::{esql-specs}/floats.csv-spec[tag=sin-result]
    What is this asciidoc syntax? -The first section is a source code block for the ES|QL query: +The first section is a source code block for the ES|QL query: - a [source](https://docs.asciidoctor.org/asciidoc/latest/verbatim/source-blocks/) code block (delimited by `----`) - `source.merge.styled,esql` indicates custom syntax highlighting for ES|QL @@ -176,3 +176,44 @@ row a = [true, false, false, true] ``` That skips nodes that don't have the `esql.mv_sort` feature. + + +### Warnings + +Some queries can return warnings, eg. for number overflows or when a multi-value is passed to a funciton +that does not support it. + +Each CSV-SPEC test has to also assert all the expected warnings. + +Warnings can be specified as plain text or as a regular expression (but a single test cannot have a mix of both). +Each warning has to be specified on a single row, between the query and the result, prefixed by `warning:` or `warningRegex:`. +If multiple warnings are defined, the order is not relevant. + +This is an example of how to test a query that returns two warnings: + +```csv-spec +addLongOverflow +row max = 9223372036854775807 | eval sum = max + 1 | keep sum; + +warning:Line 1:44: evaluation of [max + 1] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:44: java.lang.ArithmeticException: long overflow + +sum:long +null +; +``` + +The same, using regular expressions: + +```csv-spec +addLongOverflow +row max = 9223372036854775807 | eval sum = max + 1 | keep sum; + +warningRegex:Line \d+:\d+: evaluation of \[max + 1\] failed, treating result as null. Only first 20 failures recorded. +warningRegex:Line \d+:\d+: java.lang.ArithmeticException: long overflow + +sum:long +null +; +``` + diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec index 58c1cf3dc9174..8e0da1dd354ed 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec @@ -166,6 +166,27 @@ eth0 |gamma |fe80::cae2:65ff:fece:feb9 eth0 |epsilon |[fe80::cae2:65ff:fece:feb9, fe80::cae2:65ff:fece:fec0, fe80::cae2:65ff:fece:fec1]|fe80::cae2:65ff:fece:fec1|fe80::cae2:65ff:fece:fec1 ; + +inWithWarningsRegex#[skip:-8.13.99, reason:regex warnings in tests introduced in v 8.14.0] +required_feature: esql.mv_warn + +from hosts | eval eq=case(ip0==ip1, ip0, ip1) | where eq in (ip0, ip1) | keep card, host, ip0, ip1, eq; +ignoreOrder:true +warningRegex:Line \d+:\d+: evaluation of \[ip0==ip1\] failed, treating result as null. Only first 20 failures recorded. +warningRegex:Line \d+:\d+: evaluation of \[eq in \(ip0, ip1\)\] failed, treating result as null. Only first 20 failures recorded. +warningRegex:java.lang.IllegalArgumentException: single-value function encountered multi-value + +card:keyword |host:keyword |ip0:ip |ip1:ip |eq:ip +eth0 |alpha |127.0.0.1 |127.0.0.1 |127.0.0.1 +eth1 |alpha |::1 |::1 |::1 +eth0 |beta |127.0.0.1 |::1 |::1 +eth1 |beta |127.0.0.1 |127.0.0.2 |127.0.0.2 +eth1 |beta |127.0.0.1 |128.0.0.1 |128.0.0.1 +lo0 |gamma |fe80::cae2:65ff:fece:feb9 |fe81::cae2:65ff:fece:feb9|fe81::cae2:65ff:fece:feb9 +eth0 |gamma |fe80::cae2:65ff:fece:feb9 |127.0.0.3 |127.0.0.3 +eth0 |epsilon |[fe80::cae2:65ff:fece:feb9, fe80::cae2:65ff:fece:fec0, fe80::cae2:65ff:fece:fec1]|fe80::cae2:65ff:fece:fec1|fe80::cae2:65ff:fece:fec1 +; + cidrMatchSimple required_feature: esql.mv_warn diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index 662ae1a208ed0..573dbd20b39c5 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -99,8 +99,6 @@ import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; -import static org.elasticsearch.test.ListMatcher.matchesList; -import static org.elasticsearch.test.MapMatcher.assertMap; import static org.elasticsearch.xpack.esql.CsvTestUtils.ExpectedResults; import static org.elasticsearch.xpack.esql.CsvTestUtils.isEnabled; import static org.elasticsearch.xpack.esql.CsvTestUtils.loadCsvSpecValues; @@ -470,6 +468,6 @@ private void assertWarnings(List warnings) { normalized.add(normW); } } - assertMap(normalized, matchesList(testCase.expectedWarnings(true))); + EsqlTestUtils.assertWarnings(normalized, testCase.expectedWarnings(true), testCase.expectedWarningsRegex()); } } diff --git a/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/CsvSpecReader.java b/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/CsvSpecReader.java index 5023b5a4bf877..757fe411387d6 100644 --- a/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/CsvSpecReader.java +++ b/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/CsvSpecReader.java @@ -11,6 +11,7 @@ import java.util.List; import java.util.Locale; import java.util.function.Function; +import java.util.regex.Pattern; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertThat; @@ -68,7 +69,15 @@ public Object parse(String line) { // read data String lower = line.toLowerCase(Locale.ROOT); if (lower.startsWith("warning:")) { + if (testCase.expectedWarningsRegex.isEmpty() == false) { + throw new IllegalArgumentException("Cannot mix warnings and regex warnings in CSV SPEC files: [" + line + "]"); + } testCase.expectedWarnings.add(line.substring("warning:".length()).trim()); + } else if (lower.startsWith("warningregex:")) { + if (testCase.expectedWarnings.isEmpty() == false) { + throw new IllegalArgumentException("Cannot mix warnings and regex warnings in CSV SPEC files: [" + line + "]"); + } + testCase.expectedWarningsRegex.add(Pattern.compile(".*" + line.substring("warningregex:".length()).trim() + ".*")); } else if (lower.startsWith("ignoreorder:")) { testCase.ignoreOrder = Boolean.parseBoolean(line.substring("ignoreOrder:".length()).trim()); } else if (line.startsWith(";")) { @@ -93,6 +102,7 @@ public static class CsvTestCase { public String earlySchema; public String expectedResults; private final List expectedWarnings = new ArrayList<>(); + private final List expectedWarningsRegex = new ArrayList<>(); public boolean ignoreOrder; public List requiredFeatures = List.of(); @@ -137,6 +147,10 @@ public List expectedWarnings(boolean forEmulated) { public void adjustExpectedWarnings(Function updater) { expectedWarnings.replaceAll(updater::apply); } + + public List expectedWarningsRegex() { + return expectedWarningsRegex; + } } } From 48a88c575cca0579b3eeca46bcdeb1242b12b3db Mon Sep 17 00:00:00 2001 From: Keith Massey Date: Wed, 10 Apr 2024 09:21:24 -0500 Subject: [PATCH 132/173] Renaming GeoIpDownloaderStatsAction (#107290) Renaming GeoIpDownloaderStatsAction to GeoIpStatsAction --- .../ingest/apis/geoip-stats-api.asciidoc | 8 ++--- .../ingest/geoip/GeoIpDownloaderIT.java | 25 +++++---------- .../ingest/geoip/GeoIpDownloaderStatsIT.java | 8 ++--- .../ingest/geoip/IngestGeoIpPlugin.java | 10 +++--- ...StatsAction.java => GeoIpStatsAction.java} | 32 +++++++++---------- ...on.java => GeoIpStatsTransportAction.java} | 18 +++++------ ...sAction.java => RestGeoIpStatsAction.java} | 8 ++--- ...tsActionNodeResponseSerializingTests.java} | 15 ++++----- ...=> GeoIpStatsActionNodeResponseTests.java} | 4 +-- ...pStatsActionResponseSerializingTests.java} | 17 +++++----- 10 files changed, 65 insertions(+), 80 deletions(-) rename modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/{GeoIpDownloaderStatsAction.java => GeoIpStatsAction.java} (88%) rename modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/{GeoIpDownloaderStatsTransportAction.java => GeoIpStatsTransportAction.java} (80%) rename modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/{RestGeoIpDownloaderStatsAction.java => RestGeoIpStatsAction.java} (80%) rename modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/{GeoIpDownloaderStatsActionNodeResponseSerializingTests.java => GeoIpStatsActionNodeResponseSerializingTests.java} (68%) rename modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/{GeoIpDownloaderStatsActionNodeResponseTests.java => GeoIpStatsActionNodeResponseTests.java} (91%) rename modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/{GeoIpDownloaderStatsActionResponseSerializingTests.java => GeoIpStatsActionResponseSerializingTests.java} (50%) diff --git a/docs/reference/ingest/apis/geoip-stats-api.asciidoc b/docs/reference/ingest/apis/geoip-stats-api.asciidoc index 6ef0db546342b..84a2b00737e5a 100644 --- a/docs/reference/ingest/apis/geoip-stats-api.asciidoc +++ b/docs/reference/ingest/apis/geoip-stats-api.asciidoc @@ -4,8 +4,8 @@ GeoIP stats ++++ -Gets download statistics for GeoIP2 databases used with the -<>. +Gets statistics about the <>, including +download statistics for GeoIP2 databases used with it. [source,console] ---- @@ -60,7 +60,7 @@ Total number of database updates skipped. `nodes`:: (object) -Downloaded GeoIP2 databases for each node. +Statistics for each node. + .Properties of `nodes` [%collapsible%open] @@ -90,4 +90,4 @@ Downloaded database files, including related license files. {es} stores these files in the node's <>: `$ES_TMPDIR/geoip-databases/`. ===== -==== \ No newline at end of file +==== diff --git a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java index 54d465aecda52..9dcd8abc7bc57 100644 --- a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java +++ b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java @@ -30,7 +30,7 @@ import org.elasticsearch.ingest.AbstractProcessor; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Processor; -import org.elasticsearch.ingest.geoip.stats.GeoIpDownloaderStatsAction; +import org.elasticsearch.ingest.geoip.stats.GeoIpStatsAction; import org.elasticsearch.persistent.PersistentTaskParams; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.plugins.IngestPlugin; @@ -121,13 +121,10 @@ public void cleanUp() throws Exception { } }); assertBusy(() -> { - GeoIpDownloaderStatsAction.Response response = client().execute( - GeoIpDownloaderStatsAction.INSTANCE, - new GeoIpDownloaderStatsAction.Request() - ).actionGet(); - assertThat(response.getStats().getDatabasesCount(), equalTo(0)); + GeoIpStatsAction.Response response = client().execute(GeoIpStatsAction.INSTANCE, new GeoIpStatsAction.Request()).actionGet(); + assertThat(response.getDownloaderStats().getDatabasesCount(), equalTo(0)); assertThat(response.getNodes(), not(empty())); - for (GeoIpDownloaderStatsAction.NodeResponse nodeResponse : response.getNodes()) { + for (GeoIpStatsAction.NodeResponse nodeResponse : response.getNodes()) { assertThat(nodeResponse.getConfigDatabases(), empty()); assertThat(nodeResponse.getDatabases(), empty()); assertThat(nodeResponse.getFilesInTemp().stream().filter(s -> s.endsWith(".txt") == false).toList(), empty()); @@ -703,12 +700,9 @@ private void setupDatabasesInConfigDirectory() throws Exception { }); assertBusy(() -> { - GeoIpDownloaderStatsAction.Response response = client().execute( - GeoIpDownloaderStatsAction.INSTANCE, - new GeoIpDownloaderStatsAction.Request() - ).actionGet(); + GeoIpStatsAction.Response response = client().execute(GeoIpStatsAction.INSTANCE, new GeoIpStatsAction.Request()).actionGet(); assertThat(response.getNodes(), not(empty())); - for (GeoIpDownloaderStatsAction.NodeResponse nodeResponse : response.getNodes()) { + for (GeoIpStatsAction.NodeResponse nodeResponse : response.getNodes()) { assertThat( nodeResponse.getConfigDatabases(), containsInAnyOrder("GeoLite2-Country.mmdb", "GeoLite2-City.mmdb", "GeoLite2-ASN.mmdb") @@ -751,12 +745,9 @@ private void deleteDatabasesInConfigDirectory() throws Exception { }); assertBusy(() -> { - GeoIpDownloaderStatsAction.Response response = client().execute( - GeoIpDownloaderStatsAction.INSTANCE, - new GeoIpDownloaderStatsAction.Request() - ).actionGet(); + GeoIpStatsAction.Response response = client().execute(GeoIpStatsAction.INSTANCE, new GeoIpStatsAction.Request()).actionGet(); assertThat(response.getNodes(), not(empty())); - for (GeoIpDownloaderStatsAction.NodeResponse nodeResponse : response.getNodes()) { + for (GeoIpStatsAction.NodeResponse nodeResponse : response.getNodes()) { assertThat(nodeResponse.getConfigDatabases(), empty()); } }); diff --git a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderStatsIT.java b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderStatsIT.java index 77b0faeeb6ebd..ec54317e144d1 100644 --- a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderStatsIT.java +++ b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderStatsIT.java @@ -11,7 +11,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.ingest.geoip.stats.GeoIpDownloaderStatsAction; +import org.elasticsearch.ingest.geoip.stats.GeoIpStatsAction; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.reindex.ReindexPlugin; import org.elasticsearch.test.ESIntegTestCase; @@ -65,8 +65,8 @@ public void testStats() throws Exception { * slowly to pass. */ assumeTrue("only test with fixture to have stable results", getEndpoint() != null); - GeoIpDownloaderStatsAction.Request req = new GeoIpDownloaderStatsAction.Request(); - GeoIpDownloaderStatsAction.Response response = client().execute(GeoIpDownloaderStatsAction.INSTANCE, req).actionGet(); + GeoIpStatsAction.Request req = new GeoIpStatsAction.Request(); + GeoIpStatsAction.Response response = client().execute(GeoIpStatsAction.INSTANCE, req).actionGet(); XContentTestUtils.JsonMapView jsonMapView = new XContentTestUtils.JsonMapView(convertToMap(response)); assertThat(jsonMapView.get("stats.successful_downloads"), equalTo(0)); assertThat(jsonMapView.get("stats.failed_downloads"), equalTo(0)); @@ -78,7 +78,7 @@ public void testStats() throws Exception { updateClusterSettings(Settings.builder().put(GeoIpDownloaderTaskExecutor.ENABLED_SETTING.getKey(), true)); assertBusy(() -> { - GeoIpDownloaderStatsAction.Response res = client().execute(GeoIpDownloaderStatsAction.INSTANCE, req).actionGet(); + GeoIpStatsAction.Response res = client().execute(GeoIpStatsAction.INSTANCE, req).actionGet(); XContentTestUtils.JsonMapView view = new XContentTestUtils.JsonMapView(convertToMap(res)); assertThat(view.get("stats.successful_downloads"), equalTo(4)); assertThat(view.get("stats.failed_downloads"), equalTo(0)); diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java index 2e0a84cfde23b..e5756652a9842 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java @@ -28,9 +28,9 @@ import org.elasticsearch.ingest.IngestService; import org.elasticsearch.ingest.Processor; import org.elasticsearch.ingest.geoip.stats.GeoIpDownloaderStats; -import org.elasticsearch.ingest.geoip.stats.GeoIpDownloaderStatsAction; -import org.elasticsearch.ingest.geoip.stats.GeoIpDownloaderStatsTransportAction; -import org.elasticsearch.ingest.geoip.stats.RestGeoIpDownloaderStatsAction; +import org.elasticsearch.ingest.geoip.stats.GeoIpStatsAction; +import org.elasticsearch.ingest.geoip.stats.GeoIpStatsTransportAction; +import org.elasticsearch.ingest.geoip.stats.RestGeoIpStatsAction; import org.elasticsearch.persistent.PersistentTaskParams; import org.elasticsearch.persistent.PersistentTaskState; import org.elasticsearch.persistent.PersistentTasksExecutor; @@ -144,7 +144,7 @@ public List> getPersistentTasksExecutor( @Override public List> getActions() { - return List.of(new ActionHandler<>(GeoIpDownloaderStatsAction.INSTANCE, GeoIpDownloaderStatsTransportAction.class)); + return List.of(new ActionHandler<>(GeoIpStatsAction.INSTANCE, GeoIpStatsTransportAction.class)); } @Override @@ -159,7 +159,7 @@ public List getRestHandlers( Supplier nodesInCluster, Predicate clusterSupportsFeature ) { - return List.of(new RestGeoIpDownloaderStatsAction()); + return List.of(new RestGeoIpStatsAction()); } @Override diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsAction.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsAction.java similarity index 88% rename from modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsAction.java rename to modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsAction.java index f9b1d8c637f68..db1242888ca82 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsAction.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsAction.java @@ -30,11 +30,11 @@ import java.util.Objects; import java.util.Set; -public class GeoIpDownloaderStatsAction { +public class GeoIpStatsAction { public static final ActionType INSTANCE = new ActionType<>("cluster:monitor/ingest/geoip/stats"); - private GeoIpDownloaderStatsAction() {/* no instances */} + private GeoIpStatsAction() {/* no instances */} public static class Request extends BaseNodesRequest implements ToXContentObject { @@ -89,8 +89,8 @@ public Response(ClusterName clusterName, List nodes, List n.stats).filter(Objects::nonNull).findFirst().orElse(GeoIpDownloaderStats.EMPTY); + public GeoIpDownloaderStats getDownloaderStats() { + return getNodes().stream().map(n -> n.downloaderStats).filter(Objects::nonNull).findFirst().orElse(GeoIpDownloaderStats.EMPTY); } @Override @@ -105,7 +105,7 @@ protected void writeNodesTo(StreamOutput out, List nodes) throws I @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - GeoIpDownloaderStats stats = getStats(); + GeoIpDownloaderStats stats = getDownloaderStats(); builder.startObject(); builder.field("stats", stats); builder.startObject("nodes"); @@ -153,14 +153,14 @@ public int hashCode() { public static class NodeResponse extends BaseNodeResponse { - private final GeoIpDownloaderStats stats; + private final GeoIpDownloaderStats downloaderStats; private final Set databases; private final Set filesInTemp; private final Set configDatabases; protected NodeResponse(StreamInput in) throws IOException { super(in); - stats = in.readBoolean() ? new GeoIpDownloaderStats(in) : null; + downloaderStats = in.readBoolean() ? new GeoIpDownloaderStats(in) : null; databases = in.readCollectionAsImmutableSet(StreamInput::readString); filesInTemp = in.readCollectionAsImmutableSet(StreamInput::readString); configDatabases = in.getTransportVersion().onOrAfter(TransportVersions.V_8_0_0) @@ -170,20 +170,20 @@ protected NodeResponse(StreamInput in) throws IOException { protected NodeResponse( DiscoveryNode node, - GeoIpDownloaderStats stats, + GeoIpDownloaderStats downloaderStats, Set databases, Set filesInTemp, Set configDatabases ) { super(node); - this.stats = stats; + this.downloaderStats = downloaderStats; this.databases = Set.copyOf(databases); this.filesInTemp = Set.copyOf(filesInTemp); this.configDatabases = Set.copyOf(configDatabases); } - public GeoIpDownloaderStats getStats() { - return stats; + public GeoIpDownloaderStats getDownloaderStats() { + return downloaderStats; } public Set getDatabases() { @@ -201,9 +201,9 @@ public Set getConfigDatabases() { @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeBoolean(stats != null); - if (stats != null) { - stats.writeTo(out); + out.writeBoolean(downloaderStats != null); + if (downloaderStats != null) { + downloaderStats.writeTo(out); } out.writeStringCollection(databases); out.writeStringCollection(filesInTemp); @@ -217,7 +217,7 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; NodeResponse that = (NodeResponse) o; - return stats.equals(that.stats) + return downloaderStats.equals(that.downloaderStats) && databases.equals(that.databases) && filesInTemp.equals(that.filesInTemp) && Objects.equals(configDatabases, that.configDatabases); @@ -225,7 +225,7 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(stats, databases, filesInTemp, configDatabases); + return Objects.hash(downloaderStats, databases, filesInTemp, configDatabases); } } } diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsTransportAction.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsTransportAction.java similarity index 80% rename from modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsTransportAction.java rename to modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsTransportAction.java index 0958002405fbe..13f9544e1b9e4 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsTransportAction.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsTransportAction.java @@ -18,10 +18,10 @@ import org.elasticsearch.ingest.geoip.DatabaseNodeService; import org.elasticsearch.ingest.geoip.GeoIpDownloader; import org.elasticsearch.ingest.geoip.GeoIpDownloaderTaskExecutor; -import org.elasticsearch.ingest.geoip.stats.GeoIpDownloaderStatsAction.NodeRequest; -import org.elasticsearch.ingest.geoip.stats.GeoIpDownloaderStatsAction.NodeResponse; -import org.elasticsearch.ingest.geoip.stats.GeoIpDownloaderStatsAction.Request; -import org.elasticsearch.ingest.geoip.stats.GeoIpDownloaderStatsAction.Response; +import org.elasticsearch.ingest.geoip.stats.GeoIpStatsAction.NodeRequest; +import org.elasticsearch.ingest.geoip.stats.GeoIpStatsAction.NodeResponse; +import org.elasticsearch.ingest.geoip.stats.GeoIpStatsAction.Request; +import org.elasticsearch.ingest.geoip.stats.GeoIpStatsAction.Response; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -29,14 +29,14 @@ import java.io.IOException; import java.util.List; -public class GeoIpDownloaderStatsTransportAction extends TransportNodesAction { +public class GeoIpStatsTransportAction extends TransportNodesAction { private final TransportService transportService; private final DatabaseNodeService registry; private final GeoIpDownloaderTaskExecutor geoIpDownloaderTaskExecutor; @Inject - public GeoIpDownloaderStatsTransportAction( + public GeoIpStatsTransportAction( TransportService transportService, ClusterService clusterService, ThreadPool threadPool, @@ -45,7 +45,7 @@ public GeoIpDownloaderStatsTransportAction( GeoIpDownloaderTaskExecutor geoIpDownloaderTaskExecutor ) { super( - GeoIpDownloaderStatsAction.INSTANCE.name(), + GeoIpStatsAction.INSTANCE.name(), clusterService, transportService, actionFilters, @@ -75,10 +75,10 @@ protected NodeResponse newNodeResponse(StreamInput in, DiscoveryNode node) throw @Override protected NodeResponse nodeOperation(NodeRequest request, Task task) { GeoIpDownloader geoIpTask = geoIpDownloaderTaskExecutor.getCurrentTask(); - GeoIpDownloaderStats stats = geoIpTask == null || geoIpTask.getStatus() == null ? null : geoIpTask.getStatus(); + GeoIpDownloaderStats downloaderStats = geoIpTask == null || geoIpTask.getStatus() == null ? null : geoIpTask.getStatus(); return new NodeResponse( transportService.getLocalNode(), - stats, + downloaderStats, registry.getAvailableDatabases(), registry.getFilesInTemp(), registry.getConfigDatabases() diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/RestGeoIpDownloaderStatsAction.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/RestGeoIpStatsAction.java similarity index 80% rename from modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/RestGeoIpDownloaderStatsAction.java rename to modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/RestGeoIpStatsAction.java index 49f3ee81c7f62..ac6022205d04e 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/RestGeoIpDownloaderStatsAction.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/RestGeoIpStatsAction.java @@ -20,7 +20,7 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; @ServerlessScope(Scope.INTERNAL) -public class RestGeoIpDownloaderStatsAction extends BaseRestHandler { +public class RestGeoIpStatsAction extends BaseRestHandler { @Override public String getName() { @@ -34,10 +34,6 @@ public List routes() { @Override protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { - return channel -> client.execute( - GeoIpDownloaderStatsAction.INSTANCE, - new GeoIpDownloaderStatsAction.Request(), - new RestToXContentListener<>(channel) - ); + return channel -> client.execute(GeoIpStatsAction.INSTANCE, new GeoIpStatsAction.Request(), new RestToXContentListener<>(channel)); } } diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsActionNodeResponseSerializingTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsActionNodeResponseSerializingTests.java similarity index 68% rename from modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsActionNodeResponseSerializingTests.java rename to modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsActionNodeResponseSerializingTests.java index 54193967ba853..1008dcf56c4f1 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsActionNodeResponseSerializingTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsActionNodeResponseSerializingTests.java @@ -15,30 +15,29 @@ import java.util.Set; -public class GeoIpDownloaderStatsActionNodeResponseSerializingTests extends AbstractWireSerializingTestCase< - GeoIpDownloaderStatsAction.NodeResponse> { +public class GeoIpStatsActionNodeResponseSerializingTests extends AbstractWireSerializingTestCase { @Override - protected Writeable.Reader instanceReader() { - return GeoIpDownloaderStatsAction.NodeResponse::new; + protected Writeable.Reader instanceReader() { + return GeoIpStatsAction.NodeResponse::new; } @Override - protected GeoIpDownloaderStatsAction.NodeResponse createTestInstance() { + protected GeoIpStatsAction.NodeResponse createTestInstance() { return createRandomInstance(); } @Override - protected GeoIpDownloaderStatsAction.NodeResponse mutateInstance(GeoIpDownloaderStatsAction.NodeResponse instance) { + protected GeoIpStatsAction.NodeResponse mutateInstance(GeoIpStatsAction.NodeResponse instance) { return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929 } - static GeoIpDownloaderStatsAction.NodeResponse createRandomInstance() { + static GeoIpStatsAction.NodeResponse createRandomInstance() { DiscoveryNode node = DiscoveryNodeUtils.create("id"); Set databases = Set.copyOf(randomList(10, () -> randomAlphaOfLengthBetween(5, 10))); Set files = Set.copyOf(randomList(10, () -> randomAlphaOfLengthBetween(5, 10))); Set configDatabases = Set.copyOf(randomList(10, () -> randomAlphaOfLengthBetween(5, 10))); - return new GeoIpDownloaderStatsAction.NodeResponse( + return new GeoIpStatsAction.NodeResponse( node, GeoIpDownloaderStatsSerializingTests.createRandomInstance(), databases, diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsActionNodeResponseTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsActionNodeResponseTests.java similarity index 91% rename from modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsActionNodeResponseTests.java rename to modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsActionNodeResponseTests.java index a0fd470ef0468..27a332c3b42f9 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsActionNodeResponseTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsActionNodeResponseTests.java @@ -18,14 +18,14 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; -public class GeoIpDownloaderStatsActionNodeResponseTests extends ESTestCase { +public class GeoIpStatsActionNodeResponseTests extends ESTestCase { public void testInputsAreDefensivelyCopied() { DiscoveryNode node = DiscoveryNodeUtils.create("id"); Set databases = new HashSet<>(randomList(10, () -> randomAlphaOfLengthBetween(5, 10))); Set files = new HashSet<>(randomList(10, () -> randomAlphaOfLengthBetween(5, 10))); Set configDatabases = new HashSet<>(randomList(10, () -> randomAlphaOfLengthBetween(5, 10))); - GeoIpDownloaderStatsAction.NodeResponse nodeResponse = new GeoIpDownloaderStatsAction.NodeResponse( + GeoIpStatsAction.NodeResponse nodeResponse = new GeoIpStatsAction.NodeResponse( node, GeoIpDownloaderStatsSerializingTests.createRandomInstance(), databases, diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsActionResponseSerializingTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsActionResponseSerializingTests.java similarity index 50% rename from modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsActionResponseSerializingTests.java rename to modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsActionResponseSerializingTests.java index d566fa8838df1..6e057843b9776 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsActionResponseSerializingTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsActionResponseSerializingTests.java @@ -14,25 +14,24 @@ import java.util.List; -public class GeoIpDownloaderStatsActionResponseSerializingTests extends AbstractWireSerializingTestCase< - GeoIpDownloaderStatsAction.Response> { +public class GeoIpStatsActionResponseSerializingTests extends AbstractWireSerializingTestCase { @Override - protected Writeable.Reader instanceReader() { - return GeoIpDownloaderStatsAction.Response::new; + protected Writeable.Reader instanceReader() { + return GeoIpStatsAction.Response::new; } @Override - protected GeoIpDownloaderStatsAction.Response createTestInstance() { - List nodeResponses = randomList( + protected GeoIpStatsAction.Response createTestInstance() { + List nodeResponses = randomList( 10, - GeoIpDownloaderStatsActionNodeResponseSerializingTests::createRandomInstance + GeoIpStatsActionNodeResponseSerializingTests::createRandomInstance ); - return new GeoIpDownloaderStatsAction.Response(ClusterName.DEFAULT, nodeResponses, List.of()); + return new GeoIpStatsAction.Response(ClusterName.DEFAULT, nodeResponses, List.of()); } @Override - protected GeoIpDownloaderStatsAction.Response mutateInstance(GeoIpDownloaderStatsAction.Response instance) { + protected GeoIpStatsAction.Response mutateInstance(GeoIpStatsAction.Response instance) { return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929 } } From a9cab350e0263a6a5f59ea4dd7974c3b8eb6b51d Mon Sep 17 00:00:00 2001 From: Dianna Hohensee Date: Wed, 10 Apr 2024 10:40:13 -0400 Subject: [PATCH 133/173] Log skipped elections due to shutdown marker (#106701) --- .../cluster/SpecificMasterNodesIT.java | 4 --- .../cluster/coordination/Coordinator.java | 36 +++++++++++++------ .../coordination/ElectionStrategy.java | 20 +++++++++-- .../AbstractCoordinatorTestCase.java | 22 +++++++----- 4 files changed, 57 insertions(+), 25 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/SpecificMasterNodesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/SpecificMasterNodesIT.java index 43506647f89ba..cd0bf5c428118 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/SpecificMasterNodesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/SpecificMasterNodesIT.java @@ -103,10 +103,6 @@ public void testElectOnlyBetweenMasterNodes() throws Exception { internalCluster().nonMasterClient().admin().cluster().prepareState().get().getState().nodes().getMasterNode().getName(), equalTo(masterNodeName) ); - assertThat( - internalCluster().nonMasterClient().admin().cluster().prepareState().get().getState().nodes().getMasterNode().getName(), - equalTo(masterNodeName) - ); assertThat( internalCluster().masterClient().admin().cluster().prepareState().get().getState().nodes().getMasterNode().getName(), equalTo(masterNodeName) diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java b/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java index fc7eaa97c677b..156ba88a7d2b1 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java @@ -27,6 +27,7 @@ import org.elasticsearch.cluster.coordination.CoordinationMetadata.VotingConfigExclusion; import org.elasticsearch.cluster.coordination.CoordinationMetadata.VotingConfiguration; import org.elasticsearch.cluster.coordination.CoordinationState.VoteCollection; +import org.elasticsearch.cluster.coordination.ElectionStrategy.NodeEligibility; import org.elasticsearch.cluster.coordination.FollowersChecker.FollowerCheckRequest; import org.elasticsearch.cluster.coordination.JoinHelper.InitialJoinAccumulator; import org.elasticsearch.cluster.metadata.Metadata; @@ -544,8 +545,14 @@ private void startElection() { // The preVoteCollector is only active while we are candidate, but it does not call this method with synchronisation, so we have // to check our mode again here. if (mode == Mode.CANDIDATE) { - if (localNodeMayWinElection(getLastAcceptedState(), electionStrategy) == false) { - logger.trace("skip election as local node may not win it: {}", getLastAcceptedState().coordinationMetadata()); + final var nodeEligibility = localNodeMayWinElection(getLastAcceptedState(), electionStrategy); + if (nodeEligibility.mayWin() == false) { + assert nodeEligibility.reason().isEmpty() == false; + logger.trace( + "skip election as local node may not win it ({}): {}", + nodeEligibility.reason(), + getLastAcceptedState().coordinationMetadata() + ); return; } @@ -598,7 +605,7 @@ private void abdicateTo(DiscoveryNode newMaster) { becomeCandidate("after abdicating to " + newMaster); } - private static boolean localNodeMayWinElection(ClusterState lastAcceptedState, ElectionStrategy electionStrategy) { + private static NodeEligibility localNodeMayWinElection(ClusterState lastAcceptedState, ElectionStrategy electionStrategy) { final DiscoveryNode localNode = lastAcceptedState.nodes().getLocalNode(); assert localNode != null; return electionStrategy.nodeMayWinElection(lastAcceptedState, localNode); @@ -1283,8 +1290,12 @@ public boolean setInitialConfiguration(final VotingConfiguration votingConfigura metadataBuilder.coordinationMetadata(coordinationMetadata); coordinationState.get().setInitialState(ClusterState.builder(currentState).metadata(metadataBuilder).build()); - assert localNodeMayWinElection(getLastAcceptedState(), electionStrategy) - : "initial state does not allow local node to win election: " + getLastAcceptedState().coordinationMetadata(); + var nodeEligibility = localNodeMayWinElection(getLastAcceptedState(), electionStrategy); + assert nodeEligibility.mayWin() + : "initial state does not allow local node to win election, reason: " + + nodeEligibility.reason() + + " , metadata: " + + getLastAcceptedState().coordinationMetadata(); preVoteCollector.update(getPreVoteResponse(), null); // pick up the change to last-accepted version startElectionScheduler(); return true; @@ -1767,9 +1778,14 @@ public void run() { synchronized (mutex) { if (mode == Mode.CANDIDATE) { final ClusterState lastAcceptedState = coordinationState.get().getLastAcceptedState(); - - if (localNodeMayWinElection(lastAcceptedState, electionStrategy) == false) { - logger.trace("skip prevoting as local node may not win election: {}", lastAcceptedState.coordinationMetadata()); + final var nodeEligibility = localNodeMayWinElection(lastAcceptedState, electionStrategy); + if (nodeEligibility.mayWin() == false) { + assert nodeEligibility.reason().isEmpty() == false; + logger.trace( + "skip prevoting as local node may not win election ({}): {}", + nodeEligibility.reason(), + lastAcceptedState.coordinationMetadata() + ); return; } @@ -1983,10 +1999,10 @@ public void onResponse(Void ignored) { // if necessary, abdicate to another node or improve the voting configuration boolean attemptReconfiguration = true; final ClusterState state = getLastAcceptedState(); // committed state - if (localNodeMayWinElection(state, electionStrategy) == false) { + if (localNodeMayWinElection(state, electionStrategy).mayWin() == false) { final List masterCandidates = completedNodes().stream() .filter(DiscoveryNode::isMasterNode) - .filter(node -> electionStrategy.nodeMayWinElection(state, node)) + .filter(node -> electionStrategy.nodeMayWinElection(state, node).mayWin()) .filter(node -> { // check if master candidate would be able to get an election quorum if we were to // abdicate to it. Assume that every node that completed the publication can provide diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/ElectionStrategy.java b/server/src/main/java/org/elasticsearch/cluster/coordination/ElectionStrategy.java index c98d9ec39e0f0..2bf6e10a9855a 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/ElectionStrategy.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/ElectionStrategy.java @@ -34,6 +34,17 @@ protected boolean satisfiesAdditionalQuorumConstraints( } }; + /** + * Contains a result for whether a node may win an election and the reason if not. + */ + public record NodeEligibility(boolean mayWin, String reason) {} + + public static final NodeEligibility NODE_MAY_WIN_ELECTION = new NodeEligibility(true, ""); + public static final NodeEligibility NODE_MAY_NOT_WIN_ELECTION = new NodeEligibility( + false, + "node is ineligible for election, not a voting node in the voting configuration" + ); + /** * Whether there is an election quorum from the point of view of the given local node under the provided voting configurations */ @@ -105,10 +116,13 @@ public void beforeCommit(long term, long version, ActionListener listener) listener.onResponse(null); } - public boolean nodeMayWinElection(ClusterState lastAcceptedState, DiscoveryNode node) { + public NodeEligibility nodeMayWinElection(ClusterState lastAcceptedState, DiscoveryNode node) { final String nodeId = node.getId(); - return lastAcceptedState.getLastCommittedConfiguration().getNodeIds().contains(nodeId) + if (lastAcceptedState.getLastCommittedConfiguration().getNodeIds().contains(nodeId) || lastAcceptedState.getLastAcceptedConfiguration().getNodeIds().contains(nodeId) - || lastAcceptedState.getVotingConfigExclusions().stream().noneMatch(vce -> vce.getNodeId().equals(nodeId)); + || lastAcceptedState.getVotingConfigExclusions().stream().noneMatch(vce -> vce.getNodeId().equals(nodeId))) { + return NODE_MAY_WIN_ELECTION; + } + return NODE_MAY_NOT_WIN_ELECTION; } } diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java index 4e43cb33111a1..cb70ab8e491cb 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java @@ -288,7 +288,7 @@ public class Cluster implements Releasable { @Nullable // null means construct a list from all the current nodes private List seedHostsList; - Cluster(int initialNodeCount) { + public Cluster(int initialNodeCount) { this(initialNodeCount, true, Settings.EMPTY); } @@ -364,7 +364,13 @@ List addNodes(int newNodesCount) { return addedNodes; } - int size() { + public static void becomeCandidate(ClusterNode node, String reason) { + synchronized (node.coordinator.mutex) { + node.coordinator.becomeCandidate(reason); + } + } + + public int size() { return clusterNodes.size(); } @@ -760,7 +766,7 @@ private void stabilise(long stabilisationDurationMillis, boolean expectIdleJoinV } } - void bootstrapIfNecessary() { + public void bootstrapIfNecessary() { if (clusterNodes.stream().allMatch(ClusterNode::isNotUsefullyBootstrapped)) { assertThat("setting initial configuration may fail with disconnected nodes", disconnectedNodes, empty()); assertThat("setting initial configuration may fail with blackholed nodes", blackholedNodes, empty()); @@ -773,7 +779,7 @@ void bootstrapIfNecessary() { } } - void runFor(long runDurationMillis, String description) { + public void runFor(long runDurationMillis, String description) { final long endTime = deterministicTaskQueue.getCurrentTimeMillis() + runDurationMillis; logger.info("--> runFor({}ms) running until [{}ms]: {}", runDurationMillis, endTime, description); @@ -856,7 +862,7 @@ ClusterNode getAnyNode() { return getAnyNodeExcept(); } - ClusterNode getAnyNodeExcept(ClusterNode... clusterNodesToExclude) { + public ClusterNode getAnyNodeExcept(ClusterNode... clusterNodesToExclude) { List filteredNodes = getAllNodesExcept(clusterNodesToExclude); assert filteredNodes.isEmpty() == false; return randomFrom(filteredNodes); @@ -956,7 +962,7 @@ public final class ClusterNode { private static final Logger logger = LogManager.getLogger(ClusterNode.class); private final int nodeIndex; - Coordinator coordinator; + public Coordinator coordinator; private final DiscoveryNode localNode; final CoordinationState.PersistedState persistedState; final Settings nodeSettings; @@ -1388,7 +1394,7 @@ public void onFailure(Exception e) { }); } - AckCollector submitUpdateTask( + public AckCollector submitUpdateTask( String source, UnaryOperator clusterStateUpdate, CoordinatorTestClusterStateUpdateTask taskListener @@ -1460,7 +1466,7 @@ void onDisconnectEventFrom(ClusterNode clusterNode) { transportService.disconnectFromNode(clusterNode.localNode); } - ClusterState getLastAppliedClusterState() { + public ClusterState getLastAppliedClusterState() { return clusterApplierService.state(); } From c57dd98ef483e3e87fb01d3ca6542f86a30298ad Mon Sep 17 00:00:00 2001 From: Carlos Delgado <6339205+carlosdelest@users.noreply.github.com> Date: Wed, 10 Apr 2024 16:44:46 +0200 Subject: [PATCH 134/173] semantic_text: Add index metadata information for inference field mappers (#107147) Co-authored-by: @jimczi Co-authored-by: @Mikep86 --- .../cluster/ClusterStateDiffIT.java | 6 +- .../org/elasticsearch/TransportVersions.java | 1 + .../cluster/metadata/IndexMetadata.java | 70 ++++++++++ .../metadata/InferenceFieldMetadata.java | 132 ++++++++++++++++++ .../metadata/MetadataCreateIndexService.java | 9 +- .../metadata/MetadataMappingService.java | 7 +- .../index/mapper/InferenceFieldMapper.java | 27 ++++ .../index/mapper/MappingLookup.java | 31 +++- .../cluster/metadata/IndexMetadataTests.java | 33 ++++- .../metadata/InferenceFieldMetadataTests.java | 72 ++++++++++ .../metadata/DataStreamTestHelper.java | 1 + 11 files changed, 373 insertions(+), 16 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/cluster/metadata/InferenceFieldMetadata.java create mode 100644 server/src/main/java/org/elasticsearch/index/mapper/InferenceFieldMapper.java create mode 100644 server/src/test/java/org/elasticsearch/cluster/metadata/InferenceFieldMetadataTests.java diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterStateDiffIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterStateDiffIT.java index 409fbdd70333e..e0dbc74567053 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterStateDiffIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterStateDiffIT.java @@ -61,6 +61,7 @@ import static java.util.Collections.emptyList; import static java.util.Collections.emptySet; import static org.elasticsearch.cluster.metadata.AliasMetadata.newAliasMetadataBuilder; +import static org.elasticsearch.cluster.metadata.IndexMetadataTests.randomInferenceFields; import static org.elasticsearch.cluster.routing.RandomShardRoutingMutator.randomChange; import static org.elasticsearch.cluster.routing.TestShardRouting.shardRoutingBuilder; import static org.elasticsearch.cluster.routing.UnassignedInfoTests.randomUnassignedInfo; @@ -571,7 +572,7 @@ public IndexMetadata randomCreate(String name) { @Override public IndexMetadata randomChange(IndexMetadata part) { IndexMetadata.Builder builder = IndexMetadata.builder(part); - switch (randomIntBetween(0, 2)) { + switch (randomIntBetween(0, 3)) { case 0: builder.settings(Settings.builder().put(part.getSettings()).put(randomSettings(Settings.EMPTY))); break; @@ -585,6 +586,9 @@ public IndexMetadata randomChange(IndexMetadata part) { case 2: builder.settings(Settings.builder().put(part.getSettings()).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)); break; + case 3: + builder.putInferenceFields(randomInferenceFields()); + break; default: throw new IllegalArgumentException("Shouldn't be here"); } diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 8589e183a150e..9d21e9fe5d794 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -166,6 +166,7 @@ static TransportVersion def(int id) { public static final TransportVersion INDEXING_PRESSURE_DOCUMENT_REJECTIONS_COUNT = def(8_625_00_0); public static final TransportVersion ALIAS_ACTION_RESULTS = def(8_626_00_0); public static final TransportVersion HISTOGRAM_AGGS_KEY_SORTED = def(8_627_00_0); + public static final TransportVersion INFERENCE_FIELDS_METADATA = def(8_628_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java index 22672756bdaf0..529814e83ba38 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java @@ -540,6 +540,8 @@ public Iterator> settings() { public static final String KEY_SHARD_SIZE_FORECAST = "shard_size_forecast"; + public static final String KEY_INFERENCE_FIELDS = "field_inference"; + public static final String INDEX_STATE_FILE_PREFIX = "state-"; static final TransportVersion SYSTEM_INDEX_FLAG_ADDED = TransportVersions.V_7_10_0; @@ -574,6 +576,8 @@ public Iterator> settings() { @Nullable private final MappingMetadata mapping; + private final ImmutableOpenMap inferenceFields; + private final ImmutableOpenMap customData; private final Map> inSyncAllocationIds; @@ -642,6 +646,7 @@ private IndexMetadata( final int numberOfReplicas, final Settings settings, final MappingMetadata mapping, + final ImmutableOpenMap inferenceFields, final ImmutableOpenMap aliases, final ImmutableOpenMap customData, final Map> inSyncAllocationIds, @@ -692,6 +697,7 @@ private IndexMetadata( this.totalNumberOfShards = numberOfShards * (numberOfReplicas + 1); this.settings = settings; this.mapping = mapping; + this.inferenceFields = inferenceFields; this.customData = customData; this.aliases = aliases; this.inSyncAllocationIds = inSyncAllocationIds; @@ -748,6 +754,7 @@ IndexMetadata withMappingMetadata(MappingMetadata mapping) { this.numberOfReplicas, this.settings, mapping, + this.inferenceFields, this.aliases, this.customData, this.inSyncAllocationIds, @@ -806,6 +813,7 @@ public IndexMetadata withInSyncAllocationIds(int shardId, Set inSyncSet) this.numberOfReplicas, this.settings, this.mapping, + this.inferenceFields, this.aliases, this.customData, Maps.copyMapWithAddedOrReplacedEntry(this.inSyncAllocationIds, shardId, Set.copyOf(inSyncSet)), @@ -862,6 +870,7 @@ public IndexMetadata withIncrementedPrimaryTerm(int shardId) { this.numberOfReplicas, this.settings, this.mapping, + this.inferenceFields, this.aliases, this.customData, this.inSyncAllocationIds, @@ -918,6 +927,7 @@ public IndexMetadata withTimestampRange(IndexLongFieldRange timestampRange) { this.numberOfReplicas, this.settings, this.mapping, + this.inferenceFields, this.aliases, this.customData, this.inSyncAllocationIds, @@ -970,6 +980,7 @@ public IndexMetadata withIncrementedVersion() { this.numberOfReplicas, this.settings, this.mapping, + this.inferenceFields, this.aliases, this.customData, this.inSyncAllocationIds, @@ -1193,6 +1204,10 @@ public MappingMetadata mapping() { return mapping; } + public Map getInferenceFields() { + return inferenceFields; + } + @Nullable public IndexMetadataStats getStats() { return stats; @@ -1403,6 +1418,9 @@ public boolean equals(Object o) { if (rolloverInfos.equals(that.rolloverInfos) == false) { return false; } + if (inferenceFields.equals(that.inferenceFields) == false) { + return false; + } if (isSystem != that.isSystem) { return false; } @@ -1423,6 +1441,7 @@ public int hashCode() { result = 31 * result + Arrays.hashCode(primaryTerms); result = 31 * result + inSyncAllocationIds.hashCode(); result = 31 * result + rolloverInfos.hashCode(); + result = 31 * result + inferenceFields.hashCode(); result = 31 * result + Boolean.hashCode(isSystem); return result; } @@ -1469,6 +1488,7 @@ private static class IndexMetadataDiff implements Diff { @Nullable private final Diff settingsDiff; private final Diff> mappings; + private final Diff> inferenceFields; private final Diff> aliases; private final Diff> customData; private final Diff>> inSyncAllocationIds; @@ -1500,6 +1520,7 @@ private static class IndexMetadataDiff implements Diff { : ImmutableOpenMap.builder(1).fPut(MapperService.SINGLE_MAPPING_NAME, after.mapping).build(), DiffableUtils.getStringKeySerializer() ); + inferenceFields = DiffableUtils.diff(before.inferenceFields, after.inferenceFields, DiffableUtils.getStringKeySerializer()); aliases = DiffableUtils.diff(before.aliases, after.aliases, DiffableUtils.getStringKeySerializer()); customData = DiffableUtils.diff(before.customData, after.customData, DiffableUtils.getStringKeySerializer()); inSyncAllocationIds = DiffableUtils.diff( @@ -1524,6 +1545,8 @@ private static class IndexMetadataDiff implements Diff { new DiffableUtils.DiffableValueReader<>(DiffableStringMap::readFrom, DiffableStringMap::readDiffFrom); private static final DiffableUtils.DiffableValueReader ROLLOVER_INFO_DIFF_VALUE_READER = new DiffableUtils.DiffableValueReader<>(RolloverInfo::new, RolloverInfo::readDiffFrom); + private static final DiffableUtils.DiffableValueReader INFERENCE_FIELDS_METADATA_DIFF_VALUE_READER = + new DiffableUtils.DiffableValueReader<>(InferenceFieldMetadata::new, InferenceFieldMetadata::readDiffFrom); IndexMetadataDiff(StreamInput in) throws IOException { index = in.readString(); @@ -1546,6 +1569,15 @@ private static class IndexMetadataDiff implements Diff { } primaryTerms = in.readVLongArray(); mappings = DiffableUtils.readImmutableOpenMapDiff(in, DiffableUtils.getStringKeySerializer(), MAPPING_DIFF_VALUE_READER); + if (in.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_FIELDS_METADATA)) { + inferenceFields = DiffableUtils.readImmutableOpenMapDiff( + in, + DiffableUtils.getStringKeySerializer(), + INFERENCE_FIELDS_METADATA_DIFF_VALUE_READER + ); + } else { + inferenceFields = DiffableUtils.emptyDiff(); + } aliases = DiffableUtils.readImmutableOpenMapDiff(in, DiffableUtils.getStringKeySerializer(), ALIAS_METADATA_DIFF_VALUE_READER); customData = DiffableUtils.readImmutableOpenMapDiff(in, DiffableUtils.getStringKeySerializer(), CUSTOM_DIFF_VALUE_READER); inSyncAllocationIds = DiffableUtils.readJdkMapDiff( @@ -1595,6 +1627,9 @@ public void writeTo(StreamOutput out) throws IOException { } out.writeVLongArray(primaryTerms); mappings.writeTo(out); + if (out.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_FIELDS_METADATA)) { + inferenceFields.writeTo(out); + } aliases.writeTo(out); customData.writeTo(out); inSyncAllocationIds.writeTo(out); @@ -1628,6 +1663,7 @@ public IndexMetadata apply(IndexMetadata part) { builder.mapping = mappings.apply( ImmutableOpenMap.builder(1).fPut(MapperService.SINGLE_MAPPING_NAME, part.mapping).build() ).get(MapperService.SINGLE_MAPPING_NAME); + builder.inferenceFields.putAllFromMap(inferenceFields.apply(part.inferenceFields)); builder.aliases.putAllFromMap(aliases.apply(part.aliases)); builder.customMetadata.putAllFromMap(customData.apply(part.customData)); builder.inSyncAllocationIds.putAll(inSyncAllocationIds.apply(part.inSyncAllocationIds)); @@ -1673,6 +1709,10 @@ public static IndexMetadata readFrom(StreamInput in, @Nullable Function builder.putInferenceField(f)); + } int aliasesSize = in.readVInt(); for (int i = 0; i < aliasesSize; i++) { AliasMetadata aliasMd = new AliasMetadata(in); @@ -1733,6 +1773,9 @@ public void writeTo(StreamOutput out, boolean mappingsAsHash) throws IOException mapping.writeTo(out); } } + if (out.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_FIELDS_METADATA)) { + out.writeCollection(inferenceFields.values()); + } out.writeCollection(aliases.values()); out.writeMap(customData, StreamOutput::writeWriteable); out.writeMap( @@ -1788,6 +1831,7 @@ public static class Builder { private long[] primaryTerms = null; private Settings settings = Settings.EMPTY; private MappingMetadata mapping; + private final ImmutableOpenMap.Builder inferenceFields; private final ImmutableOpenMap.Builder aliases; private final ImmutableOpenMap.Builder customMetadata; private final Map> inSyncAllocationIds; @@ -1802,6 +1846,7 @@ public static class Builder { public Builder(String index) { this.index = index; + this.inferenceFields = ImmutableOpenMap.builder(); this.aliases = ImmutableOpenMap.builder(); this.customMetadata = ImmutableOpenMap.builder(); this.inSyncAllocationIds = new HashMap<>(); @@ -1819,6 +1864,7 @@ public Builder(IndexMetadata indexMetadata) { this.settings = indexMetadata.getSettings(); this.primaryTerms = indexMetadata.primaryTerms.clone(); this.mapping = indexMetadata.mapping; + this.inferenceFields = ImmutableOpenMap.builder(indexMetadata.inferenceFields); this.aliases = ImmutableOpenMap.builder(indexMetadata.aliases); this.customMetadata = ImmutableOpenMap.builder(indexMetadata.customData); this.routingNumShards = indexMetadata.routingNumShards; @@ -2059,6 +2105,16 @@ public Builder shardSizeInBytesForecast(Long shardSizeInBytesForecast) { return this; } + public Builder putInferenceField(InferenceFieldMetadata value) { + this.inferenceFields.put(value.getName(), value); + return this; + } + + public Builder putInferenceFields(Map values) { + this.inferenceFields.putAllFromMap(values); + return this; + } + public IndexMetadata build() { return build(false); } @@ -2221,6 +2277,7 @@ IndexMetadata build(boolean repair) { numberOfReplicas, settings, mapping, + inferenceFields.build(), aliasesMap, newCustomMetadata, Map.ofEntries(denseInSyncAllocationIds), @@ -2379,6 +2436,14 @@ public static void toXContent(IndexMetadata indexMetadata, XContentBuilder build builder.field(KEY_SHARD_SIZE_FORECAST, indexMetadata.shardSizeInBytesForecast); } + if (indexMetadata.getInferenceFields().isEmpty() == false) { + builder.startObject(KEY_INFERENCE_FIELDS); + for (InferenceFieldMetadata field : indexMetadata.getInferenceFields().values()) { + field.toXContent(builder, params); + } + builder.endObject(); + } + builder.endObject(); } @@ -2456,6 +2521,11 @@ public static IndexMetadata fromXContent(XContentParser parser, Map, ToXContentFragment { + private static final String INFERENCE_ID_FIELD = "inference_id"; + private static final String SOURCE_FIELDS_FIELD = "source_fields"; + + private final String name; + private final String inferenceId; + private final String[] sourceFields; + + public InferenceFieldMetadata(String name, String inferenceId, String[] sourceFields) { + this.name = Objects.requireNonNull(name); + this.inferenceId = Objects.requireNonNull(inferenceId); + this.sourceFields = Objects.requireNonNull(sourceFields); + } + + public InferenceFieldMetadata(StreamInput input) throws IOException { + this.name = input.readString(); + this.inferenceId = input.readString(); + this.sourceFields = input.readStringArray(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(name); + out.writeString(inferenceId); + out.writeStringArray(sourceFields); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + InferenceFieldMetadata that = (InferenceFieldMetadata) o; + return Objects.equals(name, that.name) + && Objects.equals(inferenceId, that.inferenceId) + && Arrays.equals(sourceFields, that.sourceFields); + } + + @Override + public int hashCode() { + int result = Objects.hash(name, inferenceId); + result = 31 * result + Arrays.hashCode(sourceFields); + return result; + } + + public String getName() { + return name; + } + + public String getInferenceId() { + return inferenceId; + } + + public String[] getSourceFields() { + return sourceFields; + } + + public static Diff readDiffFrom(StreamInput in) throws IOException { + return SimpleDiffable.readDiffFrom(InferenceFieldMetadata::new, in); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(name); + builder.field(INFERENCE_ID_FIELD, inferenceId); + builder.array(SOURCE_FIELDS_FIELD, sourceFields); + return builder.endObject(); + } + + public static InferenceFieldMetadata fromXContent(XContentParser parser) throws IOException { + final String name = parser.currentName(); + + XContentParser.Token token = parser.nextToken(); + Objects.requireNonNull(token, "Expected InferenceFieldMetadata but got EOF"); + + String currentFieldName = null; + String inferenceId = null; + List inputFields = new ArrayList<>(); + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token == XContentParser.Token.VALUE_STRING) { + if (INFERENCE_ID_FIELD.equals(currentFieldName)) { + inferenceId = parser.text(); + } + } else if (token == XContentParser.Token.START_ARRAY) { + if (SOURCE_FIELDS_FIELD.equals(currentFieldName)) { + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + if (token == XContentParser.Token.VALUE_STRING) { + inputFields.add(parser.text()); + } else { + parser.skipChildren(); + } + } + } + } else { + parser.skipChildren(); + } + } + return new InferenceFieldMetadata(name, inferenceId, inputFields.toArray(String[]::new)); + } +} diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java index da24f0b9d0dc5..52642e1de8ac9 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java @@ -1263,10 +1263,11 @@ static IndexMetadata buildIndexMetadata( indexMetadataBuilder.system(isSystem); // now, update the mappings with the actual source Map mappingsMetadata = new HashMap<>(); - DocumentMapper mapper = documentMapperSupplier.get(); - if (mapper != null) { - MappingMetadata mappingMd = new MappingMetadata(mapper); - mappingsMetadata.put(mapper.type(), mappingMd); + DocumentMapper docMapper = documentMapperSupplier.get(); + if (docMapper != null) { + MappingMetadata mappingMd = new MappingMetadata(docMapper); + mappingsMetadata.put(docMapper.type(), mappingMd); + indexMetadataBuilder.putInferenceFields(docMapper.mappers().inferenceFields()); } for (MappingMetadata mappingMd : mappingsMetadata.values()) { diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java index 3ca206eaddb28..4e714b96f64c7 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java @@ -193,9 +193,10 @@ private static ClusterState applyRequest( IndexMetadata.Builder indexMetadataBuilder = IndexMetadata.builder(indexMetadata); // Mapping updates on a single type may have side-effects on other types so we need to // update mapping metadata on all types - DocumentMapper mapper = mapperService.documentMapper(); - if (mapper != null) { - indexMetadataBuilder.putMapping(new MappingMetadata(mapper)); + DocumentMapper docMapper = mapperService.documentMapper(); + if (docMapper != null) { + indexMetadataBuilder.putMapping(new MappingMetadata(docMapper)); + indexMetadataBuilder.putInferenceFields(docMapper.mappers().inferenceFields()); } if (updatedMapping) { indexMetadataBuilder.mappingVersion(1 + indexMetadataBuilder.mappingVersion()); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/InferenceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/InferenceFieldMapper.java new file mode 100644 index 0000000000000..2b0833c72021b --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/mapper/InferenceFieldMapper.java @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.mapper; + +import org.elasticsearch.cluster.metadata.InferenceFieldMetadata; +import org.elasticsearch.inference.InferenceService; + +import java.util.Set; + +/** + * Field mapper that requires to transform its input before indexation through the {@link InferenceService}. + */ +public interface InferenceFieldMapper { + + /** + * Retrieve the inference metadata associated with this mapper. + * + * @param sourcePaths The source path that populates the input for the field (before inference) + */ + InferenceFieldMetadata getMetadata(Set sourcePaths); +} diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java b/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java index 673593cc6e240..bf879f30e5a29 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java @@ -10,9 +10,11 @@ import org.apache.lucene.codecs.PostingsFormat; import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.cluster.metadata.InferenceFieldMetadata; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.inference.InferenceService; import java.util.ArrayList; import java.util.Collection; @@ -47,6 +49,7 @@ private CacheKey() {} /** Full field name to mapper */ private final Map fieldMappers; private final Map objectMappers; + private final Map inferenceFields; private final int runtimeFieldMappersCount; private final NestedLookup nestedLookup; private final FieldTypeLookup fieldTypeLookup; @@ -84,12 +87,12 @@ private static void collect( Collection fieldMappers, Collection fieldAliasMappers ) { - if (mapper instanceof ObjectMapper) { - objectMappers.add((ObjectMapper) mapper); - } else if (mapper instanceof FieldMapper) { - fieldMappers.add((FieldMapper) mapper); - } else if (mapper instanceof FieldAliasMapper) { - fieldAliasMappers.add((FieldAliasMapper) mapper); + if (mapper instanceof ObjectMapper objectMapper) { + objectMappers.add(objectMapper); + } else if (mapper instanceof FieldMapper fieldMapper) { + fieldMappers.add(fieldMapper); + } else if (mapper instanceof FieldAliasMapper fieldAliasMapper) { + fieldAliasMappers.add(fieldAliasMapper); } else { throw new IllegalStateException("Unrecognized mapper type [" + mapper.getClass().getSimpleName() + "]."); } @@ -174,6 +177,15 @@ private MappingLookup( final Collection runtimeFields = mapping.getRoot().runtimeFields(); this.fieldTypeLookup = new FieldTypeLookup(mappers, aliasMappers, runtimeFields); + + Map inferenceFields = new HashMap<>(); + for (FieldMapper mapper : mappers) { + if (mapper instanceof InferenceFieldMapper inferenceFieldMapper) { + inferenceFields.put(mapper.name(), inferenceFieldMapper.getMetadata(fieldTypeLookup.sourcePaths(mapper.name()))); + } + } + this.inferenceFields = Map.copyOf(inferenceFields); + if (runtimeFields.isEmpty()) { // without runtime fields this is the same as the field type lookup this.indexTimeLookup = fieldTypeLookup; @@ -360,6 +372,13 @@ public Map objectMappers() { return objectMappers; } + /** + * Returns a map containing all fields that require to run inference (through the {@link InferenceService} prior to indexation. + */ + public Map inferenceFields() { + return inferenceFields; + } + public NestedLookup nestedLookup() { return nestedLookup; } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetadataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetadataTests.java index 5cc1a7206e7e4..116acf938fcbc 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetadataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetadataTests.java @@ -83,6 +83,8 @@ public void testIndexMetadataSerialization() throws IOException { IndexMetadataStats indexStats = randomBoolean() ? randomIndexStats(numShard) : null; Double indexWriteLoadForecast = randomBoolean() ? randomDoubleBetween(0.0, 128, true) : null; Long shardSizeInBytesForecast = randomBoolean() ? randomLongBetween(1024, 10240) : null; + Map inferenceFields = randomInferenceFields(); + IndexMetadata metadata = IndexMetadata.builder("foo") .settings(indexSettings(numShard, numberOfReplicas).put("index.version.created", 1)) .creationDate(randomLong()) @@ -107,6 +109,7 @@ public void testIndexMetadataSerialization() throws IOException { .stats(indexStats) .indexWriteLoadForecast(indexWriteLoadForecast) .shardSizeInBytesForecast(shardSizeInBytesForecast) + .putInferenceFields(inferenceFields) .build(); assertEquals(system, metadata.isSystem()); @@ -141,6 +144,7 @@ public void testIndexMetadataSerialization() throws IOException { assertEquals(metadata.getStats(), fromXContentMeta.getStats()); assertEquals(metadata.getForecastedWriteLoad(), fromXContentMeta.getForecastedWriteLoad()); assertEquals(metadata.getForecastedShardSizeInBytes(), fromXContentMeta.getForecastedShardSizeInBytes()); + assertEquals(metadata.getInferenceFields(), fromXContentMeta.getInferenceFields()); final BytesStreamOutput out = new BytesStreamOutput(); metadata.writeTo(out); @@ -162,8 +166,9 @@ public void testIndexMetadataSerialization() throws IOException { assertEquals(metadata.getCustomData(), deserialized.getCustomData()); assertEquals(metadata.isSystem(), deserialized.isSystem()); assertEquals(metadata.getStats(), deserialized.getStats()); - assertEquals(metadata.getForecastedWriteLoad(), fromXContentMeta.getForecastedWriteLoad()); - assertEquals(metadata.getForecastedShardSizeInBytes(), fromXContentMeta.getForecastedShardSizeInBytes()); + assertEquals(metadata.getForecastedWriteLoad(), deserialized.getForecastedWriteLoad()); + assertEquals(metadata.getForecastedShardSizeInBytes(), deserialized.getForecastedShardSizeInBytes()); + assertEquals(metadata.getInferenceFields(), deserialized.getInferenceFields()); } } @@ -547,10 +552,34 @@ public void testPartialIndexReceivesDataFrozenTierPreference() { } } + public void testInferenceFieldMetadata() { + Settings.Builder settings = indexSettings(IndexVersion.current(), randomIntBetween(1, 8), 0); + IndexMetadata idxMeta1 = IndexMetadata.builder("test").settings(settings).build(); + assertTrue(idxMeta1.getInferenceFields().isEmpty()); + + Map dynamicFields = randomInferenceFields(); + IndexMetadata idxMeta2 = IndexMetadata.builder(idxMeta1).putInferenceFields(dynamicFields).build(); + assertThat(idxMeta2.getInferenceFields(), equalTo(dynamicFields)); + } + private static Settings indexSettingsWithDataTier(String dataTier) { return indexSettings(IndexVersion.current(), 1, 0).put(DataTier.TIER_PREFERENCE, dataTier).build(); } + public static Map randomInferenceFields() { + Map map = new HashMap<>(); + int numFields = randomIntBetween(0, 5); + for (int i = 0; i < numFields; i++) { + String field = randomAlphaOfLengthBetween(5, 10); + map.put(field, randomInferenceFieldMetadata(field)); + } + return map; + } + + private static InferenceFieldMetadata randomInferenceFieldMetadata(String name) { + return new InferenceFieldMetadata(name, randomIdentifier(), randomSet(1, 5, ESTestCase::randomIdentifier).toArray(String[]::new)); + } + private IndexMetadataStats randomIndexStats(int numberOfShards) { IndexWriteLoad.Builder indexWriteLoadBuilder = IndexWriteLoad.builder(numberOfShards); int numberOfPopulatedWriteLoads = randomIntBetween(0, numberOfShards); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/InferenceFieldMetadataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/InferenceFieldMetadataTests.java new file mode 100644 index 0000000000000..bd4c87be51157 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/InferenceFieldMetadataTests.java @@ -0,0 +1,72 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.cluster.metadata; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.test.AbstractXContentTestCase; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; +import java.util.function.Predicate; + +import static org.hamcrest.Matchers.equalTo; + +public class InferenceFieldMetadataTests extends AbstractXContentTestCase { + + public void testSerialization() throws IOException { + final InferenceFieldMetadata before = createTestItem(); + final BytesStreamOutput out = new BytesStreamOutput(); + before.writeTo(out); + + final StreamInput in = out.bytes().streamInput(); + final InferenceFieldMetadata after = new InferenceFieldMetadata(in); + + assertThat(after, equalTo(before)); + } + + @Override + protected InferenceFieldMetadata createTestInstance() { + return createTestItem(); + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + return p -> p.equals(""); // do not add elements at the top-level as any element at this level is parsed as a new inference field + } + + @Override + protected InferenceFieldMetadata doParseInstance(XContentParser parser) throws IOException { + if (parser.nextToken() == XContentParser.Token.START_OBJECT) { + parser.nextToken(); + } + assertEquals(XContentParser.Token.FIELD_NAME, parser.currentToken()); + InferenceFieldMetadata inferenceMetadata = InferenceFieldMetadata.fromXContent(parser); + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); + return inferenceMetadata; + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } + + private static InferenceFieldMetadata createTestItem() { + String name = randomAlphaOfLengthBetween(3, 10); + String inferenceId = randomIdentifier(); + String[] inputFields = generateRandomStringArray(5, 10, false, false); + return new InferenceFieldMetadata(name, inferenceId, inputFields); + } + + public void testNullCtorArgsThrowException() { + assertThrows(NullPointerException.class, () -> new InferenceFieldMetadata(null, "inferenceId", new String[0])); + assertThrows(NullPointerException.class, () -> new InferenceFieldMetadata("name", null, new String[0])); + assertThrows(NullPointerException.class, () -> new InferenceFieldMetadata("name", "inferenceId", null)); + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java index c83caa617e16e..e2b03c6b81af3 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java @@ -729,6 +729,7 @@ public static IndicesService mockIndicesServices(MappingLookup mappingLookup) th Mapping mapping = new Mapping(root, new MetadataFieldMapper[0], null); DocumentMapper documentMapper = mock(DocumentMapper.class); when(documentMapper.mapping()).thenReturn(mapping); + when(documentMapper.mappers()).thenReturn(MappingLookup.EMPTY); when(documentMapper.mappingSource()).thenReturn(mapping.toCompressedXContent()); RoutingFieldMapper routingFieldMapper = mock(RoutingFieldMapper.class); when(routingFieldMapper.required()).thenReturn(false); From 9e502aa4a082fb7f7bc1bfdb693efdf525fc5959 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Wed, 10 Apr 2024 10:49:10 -0400 Subject: [PATCH 135/173] Expanding and refactoring the vector rolling upgrade tests (#107020) This commit removes the legacy yaml rolling upgrade tests for vectors to the new rolling upgrade package. Also, it adds rolling upgrade tests for `int8_hnsw`. --- qa/rolling-upgrade-legacy/build.gradle | 22 +- .../test/mixed_cluster/30_vector_search.yml | 144 ------- .../test/old_cluster/30_vector_search.yml | 236 ----------- .../upgraded_cluster/30_vector_search.yml | 148 ------- .../upgrades/VectorSearchIT.java | 389 ++++++++++++++++++ 5 files changed, 390 insertions(+), 549 deletions(-) delete mode 100644 qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/mixed_cluster/30_vector_search.yml delete mode 100644 qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/old_cluster/30_vector_search.yml delete mode 100644 qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/upgraded_cluster/30_vector_search.yml create mode 100644 qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/VectorSearchIT.java diff --git a/qa/rolling-upgrade-legacy/build.gradle b/qa/rolling-upgrade-legacy/build.gradle index e48d05f98b20a..77dfc9724ce8f 100644 --- a/qa/rolling-upgrade-legacy/build.gradle +++ b/qa/rolling-upgrade-legacy/build.gradle @@ -7,8 +7,8 @@ */ + import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.internal.BwcVersions import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask @@ -59,11 +59,6 @@ BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> systemProperty 'tests.upgrade_from_version', oldVersion nonInputProperties.systemProperty('tests.rest.cluster', baseCluster.map(c -> c.allHttpSocketURI.join(","))) nonInputProperties.systemProperty('tests.clustername', baseName) - if (bwcVersion.before("8.4.0")) { - excludeList.addAll(["old_cluster/30_vector_search/*"]) - } else if (bwcVersion.before("8.6.0")) { - excludeList.addAll(["old_cluster/30_vector_search/Create indexed byte vectors and search"]) - } if (excludeList.isEmpty() == false) { systemProperty 'tests.rest.blacklist', excludeList.join(',') } @@ -81,11 +76,6 @@ BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> nonInputProperties.systemProperty('tests.rest.cluster', baseCluster.map(c -> c.allHttpSocketURI.join(","))) nonInputProperties.systemProperty('tests.clustername', baseName) def excludeList = [] - if (bwcVersion.before("8.4.0")) { - excludeList.addAll(["mixed_cluster/30_vector_search/*"]) - } else if (bwcVersion.before("8.6.0")) { - excludeList.addAll(["mixed_cluster/30_vector_search/Search byte indices created in old cluster"]) - } if (excludeList.isEmpty() == false) { systemProperty 'tests.rest.blacklist', excludeList.join(',') } @@ -103,11 +93,6 @@ BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> nonInputProperties.systemProperty('tests.rest.cluster', baseCluster.map(c -> c.allHttpSocketURI.join(","))) nonInputProperties.systemProperty('tests.clustername', baseName) def excludeList = [] - if (bwcVersion.before("8.4.0")) { - excludeList.addAll(["mixed_cluster/30_vector_search/*"]) - } else if (bwcVersion.before("8.6.0")) { - excludeList.addAll(["mixed_cluster/30_vector_search/Search byte indices created in old cluster"]) - } if (excludeList.isEmpty() == false) { systemProperty 'tests.rest.blacklist', excludeList.join(',') } @@ -124,11 +109,6 @@ BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> nonInputProperties.systemProperty('tests.rest.cluster', baseCluster.map(c -> c.allHttpSocketURI.join(","))) nonInputProperties.systemProperty('tests.clustername', baseName) def excludeList = [] - if (bwcVersion.before("8.4.0")) { - excludeList.addAll(["upgraded_cluster/30_vector_search/*"]) - } else if (bwcVersion.before("8.6.0")) { - excludeList.addAll(["upgraded_cluster/30_vector_search/Search byte indices created in old cluster"]) - } if (excludeList.isEmpty() == false) { systemProperty 'tests.rest.blacklist', excludeList.join(',') } diff --git a/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/mixed_cluster/30_vector_search.yml b/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/mixed_cluster/30_vector_search.yml deleted file mode 100644 index 108f58b29bf27..0000000000000 --- a/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/mixed_cluster/30_vector_search.yml +++ /dev/null @@ -1,144 +0,0 @@ ---- -"Search float indices created in old cluster": - - skip: - features: close_to - - do: - search: - index: test-float-index - body: - query: - script_score: - query: { "exists": { "field": "bdv" } } - script: - source: | - field(params.field).get().dotProduct(params.query) - params: - query: [4, 5, 6] - field: bdv - - - match: { hits.hits.0._id: "3" } - - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - - match: { hits.hits.1._id: "2" } - - close_to: { hits.hits.1._score: { value: 21.0, error: 0.00001 } } - - match: { hits.hits.2._id: "1" } - - close_to: { hits.hits.2._score: { value: 15.0, error: 0.00001 } } - - - do: - search: - index: test-float-index - body: - query: - script_score: - query: { "exists": { "field": "knn" } } - script: - source: | - field(params.field).get().dotProduct(params.query) - params: - query: [4, 5, 6] - field: knn - - - match: { hits.hits.0._id: "3" } - - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - - match: { hits.hits.1._id: "5" } - - close_to: { hits.hits.1._score: { value: 25.0, error: 0.00001 } } - - match: { hits.hits.2._id: "7" } - - close_to: { hits.hits.2._score: { value: 23.0, error: 0.00001 } } - - do: - search: - index: test-float-index - body: - knn: - field: "knn" - query_vector: [4, 5, 6] - k: 3 - num_candidates: 7 - - - match: { hits.hits.0._id: "3" } - - match: { hits.hits.1._id: "5" } - - match: { hits.hits.2._id: "2" } - - - do: - search: - index: test-float-index - body: - knn: - field: "knn" - query_vector: [ 4, 5, 6 ] - k: 3 - num_candidates: 6 - - - match: { hits.hits.0._id: "3" } - - match: { hits.hits.1._id: "5" } - - match: { hits.hits.2._id: "2" } - ---- -"Search byte indices created in old cluster": - - do: - search: - index: test-byte-index - body: - query: - script_score: - query: { "exists": { "field": "bdv" } } - script: - source: | - field(params.field).get().dotProduct(params.query) - params: - query: [4, 5, 6] - field: bdv - - - match: { hits.hits.0._id: "3" } - - match: { hits.hits.0._score: 27 } - - match: { hits.hits.1._id: "2" } - - match: { hits.hits.1._score: 21 } - - match: { hits.hits.2._id: "1" } - - match: { hits.hits.2._score: 15 } - - - do: - search: - index: test-byte-index - body: - query: - script_score: - query: { "exists": { "field": "knn" } } - script: - source: | - field(params.field).get().dotProduct(params.query) - params: - query: [4, 5, 6] - field: knn - - - match: { hits.hits.0._id: "3" } - - match: { hits.hits.0._score: 27 } - - match: { hits.hits.1._id: "5" } - - match: { hits.hits.1._score: 25 } - - match: { hits.hits.2._id: "7" } - - match: { hits.hits.2._score: 23 } - - - do: - search: - index: test-byte-index - body: - knn: - field: "knn" - query_vector: [4, 5, 6] - k: 3 - num_candidates: 6 - - - match: { hits.hits.0._id: "3" } - - match: { hits.hits.1._id: "5" } - - match: { hits.hits.2._id: "2" } - - - do: - search: - index: test-byte-index - body: - knn: - field: "knn" - query_vector: [4, 5, 6] - k: 3 - num_candidates: 7 - - - match: { hits.hits.0._id: "3" } - - match: { hits.hits.1._id: "5" } - - match: { hits.hits.2._id: "2" } diff --git a/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/old_cluster/30_vector_search.yml b/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/old_cluster/30_vector_search.yml deleted file mode 100644 index 96b950e5ae927..0000000000000 --- a/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/old_cluster/30_vector_search.yml +++ /dev/null @@ -1,236 +0,0 @@ ---- -"Create indexed float vectors and search": - - skip: - features: close_to - - do: - indices.create: - index: test-float-index - body: - settings: - number_of_shards: "1" - mappings: - properties: - bdv: - type: dense_vector - dims: 3 - knn: - type: dense_vector - dims: 3 - index: true - similarity: l2_norm - index_options: - type: hnsw - m: 16 - ef_construction: 100 - - do: - bulk: - index: test-float-index - refresh: true - body: - - '{"index": {"_id": "1"}}' - - '{"bdv": [1, 1, 1], "knn": [1, 1, 1]}' - - '{"index": {"_id": "2"}}' - - '{"bdv": [1, 1, 2], "knn": [1, 1, 2]}' - - '{"index": {"_id": "3"}}' - - '{"bdv": [1, 1, 3], "knn": [1, 1, 3]}' - - '{"index": {"_id": "4"}}' - - '{"knn": [1, 2, 1]}' - - '{"index": {"_id": "5"}}' - - '{"knn": [1, 3, 1]}' - - '{"index": {"_id": "6"}}' - - '{"knn": [2, 1, 1]}' - - '{"index": {"_id": "7"}}' - - '{"knn": [3, 1, 1]}' - - '{"index": {"_id": "missing_vector"}}' - - '{}' - - do: - indices.forcemerge: - index: test-float-index - max_num_segments: 1 - - do: - search: - index: test-float-index - body: - query: - script_score: - query: { "exists": { "field": "bdv" } } - script: - source: | - field(params.field).get().dotProduct(params.query) - params: - query: [4, 5, 6] - field: bdv - - - match: { hits.hits.0._id: "3" } - - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - - match: { hits.hits.1._id: "2" } - - close_to: { hits.hits.1._score: { value: 21.0, error: 0.00001 } } - - match: { hits.hits.2._id: "1" } - - close_to: { hits.hits.2._score: { value: 15.0, error: 0.00001 } } - - - do: - search: - index: test-float-index - body: - query: - script_score: - query: { "exists": { "field": "knn" } } - script: - source: | - field(params.field).get().dotProduct(params.query) - params: - query: [4, 5, 6] - field: knn - - - match: { hits.hits.0._id: "3" } - - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - - match: { hits.hits.1._id: "5" } - - close_to: { hits.hits.1._score: { value: 25.0, error: 0.00001 } } - - match: { hits.hits.2._id: "7" } - - close_to: { hits.hits.2._score: { value: 23.0, error: 0.00001 } } - - - do: - search: - index: test-float-index - body: - knn: - field: "knn" - query_vector: [4, 5, 6] - k: 3 - num_candidates: 7 - - - match: { hits.hits.0._id: "3" } - - match: { hits.hits.1._id: "5" } - - match: { hits.hits.2._id: "2" } - - - do: - search: - index: test-float-index - body: - knn: - field: "knn" - query_vector: [ 4, 5, 6 ] - k: 3 - num_candidates: 6 - - - match: { hits.hits.0._id: "3" } - - match: { hits.hits.1._id: "5" } - - match: { hits.hits.2._id: "2" } - - ---- -"Create indexed byte vectors and search": - - skip: - features: close_to - - do: - indices.create: - index: test-byte-index - body: - settings: - number_of_shards: "1" - mappings: - properties: - bdv: - type: dense_vector - element_type: byte - dims: 3 - knn: - type: dense_vector - element_type: byte - dims: 3 - index: true - similarity: l2_norm - - do: - bulk: - index: test-byte-index - refresh: true - body: - - '{"index": {"_id": "1"}}' - - '{"bdv": [1, 1, 1], "knn": [1, 1, 1]}' - - '{"index": {"_id": "2"}}' - - '{"bdv": [1, 1, 2], "knn": [1, 1, 2]}' - - '{"index": {"_id": "3"}}' - - '{"bdv": [1, 1, 3], "knn": [1, 1, 3]}' - - '{"index": {"_id": "4"}}' - - '{"knn": [1, 2, 1]}' - - '{"index": {"_id": "5"}}' - - '{"knn": [1, 3, 1]}' - - '{"index": {"_id": "6"}}' - - '{"knn": [2, 1, 1]}' - - '{"index": {"_id": "7"}}' - - '{"knn": [3, 1, 1]}' - - '{"index": {"_id": "missing_vector"}}' - - '{}' - - do: - indices.forcemerge: - index: test-byte-index - max_num_segments: 1 - - do: - search: - index: test-byte-index - body: - query: - script_score: - query: { "exists": { "field": "bdv" } } - script: - source: | - field(params.field).get().dotProduct(params.query) - params: - query: [4, 5, 6] - field: bdv - - - match: { hits.hits.0._id: "3" } - - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - - match: { hits.hits.1._id: "2" } - - close_to: { hits.hits.1._score: { value: 21.0, error: 0.00001 } } - - match: { hits.hits.2._id: "1" } - - close_to: { hits.hits.2._score: { value: 15.0, error: 0.00001 } } - - - do: - search: - index: test-byte-index - body: - query: - script_score: - query: { "exists": { "field": "knn" } } - script: - source: | - field(params.field).get().dotProduct(params.query) - params: - query: [4, 5, 6] - field: knn - - - match: { hits.hits.0._id: "3" } - - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - - match: { hits.hits.1._id: "5" } - - close_to: { hits.hits.1._score: { value: 25.0, error: 0.00001 } } - - match: { hits.hits.2._id: "7" } - - close_to: { hits.hits.2._score: { value: 23.0, error: 0.00001 } } - - - do: - search: - index: test-byte-index - body: - knn: - field: "knn" - query_vector: [4, 5, 6] - k: 3 - num_candidates: 6 - - - match: { hits.hits.0._id: "3" } - - match: { hits.hits.1._id: "5" } - - match: { hits.hits.2._id: "2" } - - - do: - search: - index: test-byte-index - body: - knn: - field: "knn" - query_vector: [4, 5, 6] - k: 3 - num_candidates: 7 - - - match: { hits.hits.0._id: "3" } - - match: { hits.hits.1._id: "5" } - - match: { hits.hits.2._id: "2" } diff --git a/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/upgraded_cluster/30_vector_search.yml b/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/upgraded_cluster/30_vector_search.yml deleted file mode 100644 index ee2c357594b94..0000000000000 --- a/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/upgraded_cluster/30_vector_search.yml +++ /dev/null @@ -1,148 +0,0 @@ ---- -"Search float indices created in old cluster": - - skip: - features: close_to - - do: - search: - index: test-float-index - body: - query: - script_score: - query: { "exists": { "field": "bdv" } } - script: - source: | - field(params.field).get().dotProduct(params.query) - params: - query: [4, 5, 6] - field: bdv - - - match: { hits.hits.0._id: "3" } - - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - - match: { hits.hits.1._id: "2" } - - close_to: { hits.hits.1._score: { value: 21.0, error: 0.00001 } } - - match: { hits.hits.2._id: "1" } - - close_to: { hits.hits.2._score: { value: 15.0, error: 0.00001 } } - - - do: - search: - index: test-float-index - body: - query: - script_score: - query: { "exists": { "field": "knn" } } - script: - source: | - field(params.field).get().dotProduct(params.query) - params: - query: [4, 5, 6] - field: knn - - - match: { hits.hits.0._id: "3" } - - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - - match: { hits.hits.1._id: "5" } - - close_to: { hits.hits.1._score: { value: 25.0, error: 0.00001 } } - - match: { hits.hits.2._id: "7" } - - close_to: { hits.hits.2._score: { value: 23.0, error: 0.00001 } } - - - do: - search: - index: test-float-index - body: - knn: - field: "knn" - query_vector: [4, 5, 6] - k: 3 - num_candidates: 7 - - - match: { hits.hits.0._id: "3" } - - match: { hits.hits.1._id: "5" } - - match: { hits.hits.2._id: "2" } - - - do: - search: - index: test-float-index - body: - knn: - field: "knn" - query_vector: [ 4, 5, 6 ] - k: 3 - num_candidates: 6 - - - match: { hits.hits.0._id: "3" } - - match: { hits.hits.1._id: "5" } - - match: { hits.hits.2._id: "2" } - ---- -"Search byte indices created in old cluster": - - skip: - features: close_to - - do: - search: - index: test-byte-index - body: - query: - script_score: - query: { "exists": { "field": "bdv" } } - script: - source: | - field(params.field).get().dotProduct(params.query) - params: - query: [4, 5, 6] - field: bdv - - - match: { hits.hits.0._id: "3" } - - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - - match: { hits.hits.1._id: "2" } - - close_to: { hits.hits.1._score: { value: 21.0, error: 0.00001 } } - - match: { hits.hits.2._id: "1" } - - close_to: { hits.hits.2._score: { value: 15.0, error: 0.00001 } } - - - do: - search: - index: test-byte-index - body: - query: - script_score: - query: { "exists": { "field": "knn" } } - script: - source: | - field(params.field).get().dotProduct(params.query) - params: - query: [4, 5, 6] - field: knn - - - match: { hits.hits.0._id: "3" } - - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - - match: { hits.hits.1._id: "5" } - - close_to: { hits.hits.1._score: { value: 25.0, error: 0.00001 } } - - match: { hits.hits.2._id: "7" } - - close_to: { hits.hits.2._score: { value: 23.0, error: 0.00001 } } - - - - do: - search: - index: test-byte-index - body: - knn: - field: "knn" - query_vector: [4, 5, 6] - k: 3 - num_candidates: 6 - - - match: { hits.hits.0._id: "3" } - - match: { hits.hits.1._id: "5" } - - match: { hits.hits.2._id: "2" } - - - do: - search: - index: test-byte-index - body: - knn: - field: "knn" - query_vector: [4, 5, 6] - k: 3 - num_candidates: 7 - - - match: { hits.hits.0._id: "3" } - - match: { hits.hits.1._id: "5" } - - match: { hits.hits.2._id: "2" } diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/VectorSearchIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/VectorSearchIT.java new file mode 100644 index 0000000000000..d77910f443d58 --- /dev/null +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/VectorSearchIT.java @@ -0,0 +1,389 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.upgrades; + +import com.carrotsearch.randomizedtesting.annotations.Name; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.support.XContentMapValues; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.closeTo; +import static org.hamcrest.Matchers.equalTo; + +public class VectorSearchIT extends ParameterizedRollingUpgradeTestCase { + public VectorSearchIT(@Name("upgradedNodes") int upgradedNodes) { + super(upgradedNodes); + } + + private static final String FLOAT_INDEX_NAME = "float_vector_index"; + private static final String SCRIPT_VECTOR_INDEX_NAME = "script_vector_index"; + private static final String SCRIPT_BYTE_INDEX_NAME = "script_byte_vector_index"; + private static final String BYTE_INDEX_NAME = "byte_vector_index"; + private static final String QUANTIZED_INDEX_NAME = "quantized_vector_index"; + private static final String FLOAT_VECTOR_SEARCH_VERSION = "8.4.0"; + private static final String BYTE_VECTOR_SEARCH_VERSION = "8.6.0"; + private static final String QUANTIZED_VECTOR_SEARCH_VERSION = "8.12.1"; + + public void testScriptByteVectorSearch() throws Exception { + assumeTrue("byte vector search is not supported on this version", getOldClusterTestVersion().onOrAfter(BYTE_VECTOR_SEARCH_VERSION)); + if (isOldCluster()) { + // create index and index 10 random floating point vectors + String mapping = """ + { + "properties": { + "vector": { + "type": "dense_vector", + "dims": 3, + "element_type": "byte", + "index": false + } + } + } + """; + createIndex(SCRIPT_BYTE_INDEX_NAME, Settings.EMPTY, mapping); + indexVectors(SCRIPT_BYTE_INDEX_NAME); + // refresh the index + client().performRequest(new Request("POST", "/" + SCRIPT_BYTE_INDEX_NAME + "/_refresh")); + } + // search with a script query + Request searchRequest = new Request("POST", "/" + SCRIPT_BYTE_INDEX_NAME + "/_search"); + searchRequest.setJsonEntity(""" + { + "query": { + "script_score": { + "query": { + "exists": { + "field": "vector" + } + }, + "script": { + "source": "cosineSimilarity(params.query, 'vector') + 1.0", + "params": { + "query": [4, 5, 6] + } + } + } + } + } + """); + Map response = search(searchRequest); + assertThat(extractValue(response, "hits.total.value"), equalTo(7)); + List> hits = extractValue(response, "hits.hits"); + assertThat(hits.get(0).get("_id"), equalTo("0")); + assertThat((double) hits.get(0).get("_score"), closeTo(1.9869276, 0.0001)); + } + + public void testScriptVectorSearch() throws Exception { + assumeTrue( + "Float vector search is not supported on this version", + getOldClusterTestVersion().onOrAfter(FLOAT_VECTOR_SEARCH_VERSION) + ); + if (isOldCluster()) { + // create index and index 10 random floating point vectors + String mapping = """ + { + "properties": { + "vector": { + "type": "dense_vector", + "dims": 3, + "index": false + } + } + } + """; + createIndex(SCRIPT_VECTOR_INDEX_NAME, Settings.EMPTY, mapping); + indexVectors(SCRIPT_VECTOR_INDEX_NAME); + // refresh the index + client().performRequest(new Request("POST", "/" + SCRIPT_VECTOR_INDEX_NAME + "/_refresh")); + } + // search with a script query + Request searchRequest = new Request("POST", "/" + SCRIPT_VECTOR_INDEX_NAME + "/_search"); + searchRequest.setJsonEntity(""" + { + "query": { + "script_score": { + "query": { + "exists": { + "field": "vector" + } + }, + "script": { + "source": "cosineSimilarity(params.query, 'vector') + 1.0", + "params": { + "query": [4, 5, 6] + } + } + } + } + } + """); + Map response = search(searchRequest); + assertThat(extractValue(response, "hits.total.value"), equalTo(7)); + List> hits = extractValue(response, "hits.hits"); + assertThat(hits.get(0).get("_id"), equalTo("0")); + assertThat((double) hits.get(0).get("_score"), closeTo(1.9869276, 0.0001)); + } + + public void testFloatVectorSearch() throws Exception { + assumeTrue( + "Float vector search is not supported on this version", + getOldClusterTestVersion().onOrAfter(FLOAT_VECTOR_SEARCH_VERSION) + ); + if (isOldCluster()) { + String mapping = """ + { + "properties": { + "vector": { + "type": "dense_vector", + "dims": 3, + "index": true, + "similarity": "l2_norm", + "index_options": { + "type": "hnsw", + "ef_construction": 100, + "m": 16 + } + } + } + } + """; + // create index and index 10 random floating point vectors + createIndex(FLOAT_INDEX_NAME, Settings.EMPTY, mapping); + indexVectors(FLOAT_INDEX_NAME); + // force merge the index + client().performRequest(new Request("POST", "/" + FLOAT_INDEX_NAME + "/_forcemerge?max_num_segments=1")); + } + // search with a script query + Request searchRequest = new Request("POST", "/" + FLOAT_INDEX_NAME + "/_search"); + searchRequest.setJsonEntity(""" + { + "query": { + "script_score": { + "query": { + "exists": { + "field": "vector" + } + }, + "script": { + "source": "cosineSimilarity(params.query, 'vector') + 1.0", + "params": { + "query": [4, 5, 6] + } + } + } + } + } + """); + Map response = search(searchRequest); + assertThat(extractValue(response, "hits.total.value"), equalTo(7)); + List> hits = extractValue(response, "hits.hits"); + assertThat(hits.get(0).get("_id"), equalTo("0")); + assertThat((double) hits.get(0).get("_score"), closeTo(1.9869276, 0.0001)); + + // search with knn + searchRequest = new Request("POST", "/" + FLOAT_INDEX_NAME + "/_search"); + searchRequest.setJsonEntity(""" + { + "knn": { + "field": "vector", + "query_vector": [4, 5, 6], + "k": 2, + "num_candidates": 5 + } + } + """); + response = search(searchRequest); + assertThat(extractValue(response, "hits.total.value"), equalTo(2)); + hits = extractValue(response, "hits.hits"); + assertThat(hits.get(0).get("_id"), equalTo("2")); + assertThat((double) hits.get(0).get("_score"), closeTo(0.028571429, 0.0001)); + } + + public void testByteVectorSearch() throws Exception { + assumeTrue("Byte vector search is not supported on this version", getOldClusterTestVersion().onOrAfter(BYTE_VECTOR_SEARCH_VERSION)); + if (isOldCluster()) { + String mapping = """ + { + "properties": { + "vector": { + "type": "dense_vector", + "dims": 3, + "element_type": "byte", + "index": true, + "similarity": "l2_norm", + "index_options": { + "type": "hnsw", + "ef_construction": 100, + "m": 16 + } + } + } + } + """; + // create index and index 10 random floating point vectors + createIndex(BYTE_INDEX_NAME, Settings.EMPTY, mapping); + indexVectors(BYTE_INDEX_NAME); + // refresh the index + // force merge the index + client().performRequest(new Request("POST", "/" + BYTE_INDEX_NAME + "/_forcemerge?max_num_segments=1")); + } + // search with a script query + Request searchRequest = new Request("POST", "/" + BYTE_INDEX_NAME + "/_search"); + searchRequest.setJsonEntity(""" + { + "query": { + "script_score": { + "query": { + "exists": { + "field": "vector" + } + }, + "script": { + "source": "cosineSimilarity(params.query, 'vector') + 1.0", + "params": { + "query": [4, 5, 6] + } + } + } + } + } + """); + Map response = search(searchRequest); + assertThat(extractValue(response, "hits.total.value"), equalTo(7)); + List> hits = extractValue(response, "hits.hits"); + assertThat(hits.get(0).get("_id"), equalTo("0")); + assertThat((double) hits.get(0).get("_score"), closeTo(1.9869276, 0.0001)); + + // search with knn + searchRequest = new Request("POST", "/" + BYTE_INDEX_NAME + "/_search"); + searchRequest.setJsonEntity(""" + { + "knn": { + "field": "vector", + "query_vector": [4, 5, 6], + "k": 2, + "num_candidates": 5 + } + } + """); + response = search(searchRequest); + assertThat(extractValue(response, "hits.total.value"), equalTo(2)); + hits = extractValue(response, "hits.hits"); + assertThat(hits.get(0).get("_id"), equalTo("2")); + assertThat((double) hits.get(0).get("_score"), closeTo(0.028571429, 0.0001)); + } + + public void testQuantizedVectorSearch() throws Exception { + assumeTrue( + "Quantized vector search is not supported on this version", + getOldClusterTestVersion().onOrAfter(QUANTIZED_VECTOR_SEARCH_VERSION) + ); + if (isOldCluster()) { + String mapping = """ + { + "properties": { + "vector": { + "type": "dense_vector", + "dims": 3, + "index": true, + "similarity": "cosine", + "index_options": { + "type": "int8_hnsw", + "ef_construction": 100, + "m": 16 + } + } + } + } + """; + // create index and index 10 random floating point vectors + createIndex(QUANTIZED_INDEX_NAME, Settings.EMPTY, mapping); + indexVectors(QUANTIZED_INDEX_NAME); + // force merge the index + client().performRequest(new Request("POST", "/" + QUANTIZED_INDEX_NAME + "/_forcemerge?max_num_segments=1")); + } + Request searchRequest = new Request("POST", "/" + QUANTIZED_INDEX_NAME + "/_search"); + searchRequest.setJsonEntity(""" + { + "query": { + "script_score": { + "query": { + "exists": { + "field": "vector" + } + }, + "script": { + "source": "cosineSimilarity(params.query, 'vector') + 1.0", + "params": { + "query": [4, 5, 6] + } + } + } + } + } + """); + Map response = search(searchRequest); + assertThat(extractValue(response, "hits.total.value"), equalTo(7)); + List> hits = extractValue(response, "hits.hits"); + assertThat(hits.get(0).get("_id"), equalTo("0")); + assertThat((double) hits.get(0).get("_score"), closeTo(1.9869276, 0.0001)); + + // search with knn + searchRequest = new Request("POST", "/" + QUANTIZED_INDEX_NAME + "/_search"); + searchRequest.setJsonEntity(""" + { + "knn": { + "field": "vector", + "query_vector": [4, 5, 6], + "k": 2, + "num_candidates": 5 + } + } + """); + response = search(searchRequest); + assertThat(extractValue(response, "hits.total.value"), equalTo(2)); + hits = extractValue(response, "hits.hits"); + assertThat(hits.get(0).get("_id"), equalTo("0")); + assertThat((double) hits.get(0).get("_score"), closeTo(0.9934857, 0.0001)); + } + + private void indexVectors(String indexName) throws Exception { + String[] vectors = new String[] { + "{\"vector\":[1, 1, 1]}", + "{\"vector\":[1, 1, 2]}", + "{\"vector\":[1, 1, 3]}", + "{\"vector\":[1, 2, 1]}", + "{\"vector\":[1, 3, 1]}", + "{\"vector\":[2, 1, 1]}", + "{\"vector\":[3, 1, 1]}", + "{}" }; + for (int i = 0; i < vectors.length; i++) { + Request indexRequest = new Request("PUT", "/" + indexName + "/_doc/" + i); + indexRequest.setJsonEntity(vectors[i]); + assertOK(client().performRequest(indexRequest)); + } + } + + private static Map search(Request request) throws IOException { + final Response response = client().performRequest(request); + assertOK(response); + return responseAsMap(response); + } + + @SuppressWarnings("unchecked") + private static T extractValue(Map map, String path) { + return (T) XContentMapValues.extractValue(path, map); + } +} From 92f7e078071fba021f9784c98a7cea6e76c5710d Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Wed, 10 Apr 2024 17:07:20 +0200 Subject: [PATCH 136/173] Fix docs plugin unit tests on windows (#107310) --- .../internal/doc/DocSnippetTaskSpec.groovy | 113 +----------------- .../RestTestsFromDocSnippetTaskSpec.groovy | 30 ++--- 2 files changed, 18 insertions(+), 125 deletions(-) diff --git a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/DocSnippetTaskSpec.groovy b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/DocSnippetTaskSpec.groovy index 89939645d0f9c..85ce3c1804474 100644 --- a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/DocSnippetTaskSpec.groovy +++ b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/DocSnippetTaskSpec.groovy @@ -14,6 +14,8 @@ import spock.lang.TempDir import org.gradle.api.InvalidUserDataException import org.gradle.testfixtures.ProjectBuilder +import static org.elasticsearch.gradle.internal.test.TestUtils.normalizeString + class DocSnippetTaskSpec extends Specification { @TempDir @@ -540,9 +542,9 @@ GET /_analyze ) def snippets = task().parseDocFile(tempDir, doc, []) expect: - snippets*.start == [3] - snippets*.language == ["console"] - snippets*.contents == ["""GET /_analyze + snippets[0].start == 3 + snippets[0].language == "console" + normalizeString(snippets[0].contents, tempDir) == """GET /_analyze { "tokenizer": "keyword", "char_filter": [ @@ -556,112 +558,9 @@ GET /_analyze } ], "text": "My license plate is ٢٥٠١٥" -} -"""] +}""" } - def "test parsing snippet from doc2"() { - given: - def doc = docFile( - """ -[role="xpack"] -[[ml-update-snapshot]] -= Update model snapshots API -++++ -Update model snapshots -++++ - -Updates certain properties of a snapshot. - -[[ml-update-snapshot-request]] -== {api-request-title} - -`POST _ml/anomaly_detectors//model_snapshots//_update` - -[[ml-update-snapshot-prereqs]] -== {api-prereq-title} - -Requires the `manage_ml` cluster privilege. This privilege is included in the -`machine_learning_admin` built-in role. - -[[ml-update-snapshot-path-parms]] -== {api-path-parms-title} - -``:: -(Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] - -``:: -(Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=snapshot-id] - -[[ml-update-snapshot-request-body]] -== {api-request-body-title} - -The following properties can be updated after the model snapshot is created: - -`description`:: -(Optional, string) A description of the model snapshot. - -`retain`:: -(Optional, Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=retain] - - -[[ml-update-snapshot-example]] -== {api-examples-title} - -[source,console] --------------------------------------------------- -POST -_ml/anomaly_detectors/it_ops_new_logs/model_snapshots/1491852978/_update -{ - "description": "Snapshot 1", - "retain": true -} --------------------------------------------------- -// TEST[skip:todo] - -When the snapshot is updated, you receive the following results: -[source,js] ----- -{ - "acknowledged": true, - "model": { - "job_id": "it_ops_new_logs", - "timestamp": 1491852978000, - "description": "Snapshot 1", -... - "retain": true - } -} ----- -""" - ) - def snippets = task().parseDocFile(tempDir, doc, []) - expect: - snippets*.start == [50, 62] - snippets*.language == ["console", "js"] - snippets*.contents == ["""POST -_ml/anomaly_detectors/it_ops_new_logs/model_snapshots/1491852978/_update -{ - "description": "Snapshot 1", - "retain": true -} -""", """{ - "acknowledged": true, - "model": { - "job_id": "it_ops_new_logs", - "timestamp": 1491852978000, - "description": "Snapshot 1", -... - "retain": true - } -} -"""] - } - - File docFile(String docContent) { def file = tempDir.toPath().resolve("mapping-charfilter.asciidoc").toFile() file.text = docContent diff --git a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/RestTestsFromDocSnippetTaskSpec.groovy b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/RestTestsFromDocSnippetTaskSpec.groovy index 6ef4726e1578a..6e86cba235886 100644 --- a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/RestTestsFromDocSnippetTaskSpec.groovy +++ b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/RestTestsFromDocSnippetTaskSpec.groovy @@ -16,6 +16,7 @@ import org.gradle.testfixtures.ProjectBuilder import static org.elasticsearch.gradle.internal.doc.RestTestsFromDocSnippetTask.replaceBlockQuote import static org.elasticsearch.gradle.internal.doc.RestTestsFromDocSnippetTask.shouldAddShardFailureCheck +import static org.elasticsearch.gradle.internal.test.TestUtils.normalizeString class RestTestsFromDocSnippetTaskSpec extends Specification { @@ -59,9 +60,7 @@ class RestTestsFromDocSnippetTaskSpec extends Specification { def build = ProjectBuilder.builder().build() given: def task = build.tasks.create("restTestFromSnippet", RestTestsFromDocSnippetTask) -// def task = build.tasks.create("restTestFromSnippet", RestTestsFromSnippetsTask) task.expectedUnconvertedCandidates = ["ml-update-snapshot.asciidoc", "reference/security/authorization/run-as-privilege.asciidoc"] -// docs() task.docs = build.fileTree(new File(tempDir, "docs")) task.testRoot.convention(build.getLayout().buildDirectory.dir("rest-tests")); @@ -72,7 +71,7 @@ class RestTestsFromDocSnippetTaskSpec extends Specification { then: restSpec.exists() - restSpec.text == """--- + normalizeString(restSpec.text, tempDir) == """--- "line_22": - skip: features: @@ -143,11 +142,10 @@ class RestTestsFromDocSnippetTaskSpec extends Specification { } }, "status": 400 - } -""" + }""" def restSpec2 = new File(task.testRoot.get().getAsFile(), "rest-api-spec/test/ml-update-snapshot.yml") restSpec2.exists() - restSpec2.text == """--- + normalizeString(restSpec2.text, tempDir) == """--- "line_50": - skip: features: @@ -167,11 +165,10 @@ class RestTestsFromDocSnippetTaskSpec extends Specification { "description": "Snapshot 1", "retain": true } - - is_false: _shards.failures -""" + - is_false: _shards.failures""" def restSpec3 = new File(task.testRoot.get().getAsFile(), "rest-api-spec/test/reference/sql/getting-started.yml") restSpec3.exists() - restSpec3.text == """--- + normalizeString(restSpec3.text, tempDir) == """--- "line_10": - skip: features: @@ -205,15 +202,13 @@ class RestTestsFromDocSnippetTaskSpec extends Specification { - is_false: _shards.failures - match: \$body: - / \\s+author \\s+\\| \\s+name \\s+\\| \\s+page_count \\s+\\| \\s+release_date\\s* - ---------------\\+---------------\\+---------------\\+------------------------\\s* - Dan \\s+Simmons \\s+\\|Hyperion \\s+\\|482 \\s+\\|1989-05-26T00:00:00.000Z\\s* - Frank \\s+Herbert \\s+\\|Dune \\s+\\|604 \\s+\\|1965-06-01T00:00:00.000Z\\s*/ -""" - + / /s+author /s+/| /s+name /s+/| /s+page_count /s+/| /s+release_date/s* + ---------------/+---------------/+---------------/+------------------------/s* + Dan /s+Simmons /s+/|Hyperion /s+/|482 /s+/|1989-05-26T00:00:00.000Z/s* + Frank /s+Herbert /s+/|Dune /s+/|604 /s+/|1965-06-01T00:00:00.000Z/s*/""" def restSpec4 = new File(task.testRoot.get().getAsFile(), "rest-api-spec/test/reference/security/authorization/run-as-privilege.yml") restSpec4.exists() - restSpec4.text == """--- + normalizeString(restSpec4.text, tempDir) == """--- "line_51": - skip: features: @@ -356,8 +351,7 @@ class RestTestsFromDocSnippetTaskSpec extends Specification { "full_name": "Monday Jaffe", "metadata": { "innovation" : 8} } - - is_false: _shards.failures -""" + - is_false: _shards.failures""" } File docFile(String fileName, String docContent) { From a06da4398957876683ab343fd9669dfe9b695512 Mon Sep 17 00:00:00 2001 From: Carlos Delgado <6339205+carlosdelest@users.noreply.github.com> Date: Wed, 10 Apr 2024 17:41:21 +0200 Subject: [PATCH 137/173] Update error message in sparse_vector for indices 8.0-8.10 (#107308) --- .../index/mapper/vectors/SparseVectorFieldMapper.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapper.java index 6532abed19044..7b1e20a6cdda3 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapper.java @@ -44,7 +44,7 @@ public class SparseVectorFieldMapper extends FieldMapper { static final String ERROR_MESSAGE_7X = "[sparse_vector] field type in old 7.x indices is allowed to " + "contain [sparse_vector] fields, but they cannot be indexed or searched."; - static final String ERROR_MESSAGE_8X = "The [sparse_vector] field type is not supported from 8.0 to 8.10 versions."; + static final String ERROR_MESSAGE_8X = "The [sparse_vector] field type is not supported on indices created on versions 8.0 to 8.10."; static final IndexVersion PREVIOUS_SPARSE_VECTOR_INDEX_VERSION = IndexVersions.V_8_0_0; static final IndexVersion NEW_SPARSE_VECTOR_INDEX_VERSION = IndexVersions.NEW_SPARSE_VECTOR; From f8e516eb9c5f5b5098593cdd1fce5241d4390773 Mon Sep 17 00:00:00 2001 From: Carlos Delgado <6339205+carlosdelest@users.noreply.github.com> Date: Wed, 10 Apr 2024 17:41:42 +0200 Subject: [PATCH 138/173] Update sparse_vector docs on index version availability (#107315) --- docs/reference/mapping/types/sparse-vector.asciidoc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/reference/mapping/types/sparse-vector.asciidoc b/docs/reference/mapping/types/sparse-vector.asciidoc index 17a193eef1d4d..6c7ad6550753e 100644 --- a/docs/reference/mapping/types/sparse-vector.asciidoc +++ b/docs/reference/mapping/types/sparse-vector.asciidoc @@ -26,6 +26,8 @@ PUT my-index See <> for a complete example on adding documents to a `sparse_vector` mapped field using ELSER. +NOTE: `sparse_vector` fields can not be included in indices that were *created* on {es} versions between 8.0 and 8.10 + NOTE: `sparse_vector` fields only support single-valued fields and strictly positive values. Multi-valued fields and negative values will be rejected. From cb464b6d6a14e83bbfe0eba3d9341030d5d8c179 Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Wed, 10 Apr 2024 11:54:46 -0400 Subject: [PATCH 139/173] Bump versions after 7.17.20 release --- .buildkite/pipelines/intake.yml | 2 +- .buildkite/pipelines/periodic-packaging.yml | 6 +++--- .buildkite/pipelines/periodic.yml | 10 +++++----- .ci/bwcVersions | 2 +- .ci/snapshotBwcVersions | 2 +- server/src/main/java/org/elasticsearch/Version.java | 1 + .../resources/org/elasticsearch/TransportVersions.csv | 1 + .../org/elasticsearch/index/IndexVersions.csv | 1 + 8 files changed, 14 insertions(+), 11 deletions(-) diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index 612838592712b..18a93c9b63a3e 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -48,7 +48,7 @@ steps: timeout_in_minutes: 300 matrix: setup: - BWC_VERSION: ["7.17.20", "8.13.3", "8.14.0"] + BWC_VERSION: ["7.17.21", "8.13.3", "8.14.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index 58dcf875ce297..c306e1d9f63cb 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -305,8 +305,8 @@ steps: env: BWC_VERSION: 7.16.3 - - label: "{{matrix.image}} / 7.17.20 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.20 + - label: "{{matrix.image}} / 7.17.21 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.21 timeout_in_minutes: 300 matrix: setup: @@ -319,7 +319,7 @@ steps: machineType: custom-16-32768 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 7.17.20 + BWC_VERSION: 7.17.21 - label: "{{matrix.image}} / 8.0.1 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.0.1 diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 3462e0fb95aba..3410436eda2bf 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -172,8 +172,8 @@ steps: buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.16.3 - - label: 7.17.20 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v7.17.20#bwcTest + - label: 7.17.21 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v7.17.21#bwcTest timeout_in_minutes: 300 agents: provider: gcp @@ -181,7 +181,7 @@ steps: machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 7.17.20 + BWC_VERSION: 7.17.21 - label: 8.0.1 / bwc command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.0.1#bwcTest timeout_in_minutes: 300 @@ -396,7 +396,7 @@ steps: setup: ES_RUNTIME_JAVA: - openjdk17 - BWC_VERSION: ["7.17.20", "8.13.3", "8.14.0"] + BWC_VERSION: ["7.17.21", "8.13.3", "8.14.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 @@ -438,7 +438,7 @@ steps: - graalvm-ce17 - openjdk17 - openjdk21 - BWC_VERSION: ["7.17.20", "8.13.3", "8.14.0"] + BWC_VERSION: ["7.17.21", "8.13.3", "8.14.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.ci/bwcVersions b/.ci/bwcVersions index d3e57196e1c89..46165da472e74 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -16,7 +16,7 @@ BWC_VERSION: - "7.14.2" - "7.15.2" - "7.16.3" - - "7.17.20" + - "7.17.21" - "8.0.1" - "8.1.3" - "8.2.3" diff --git a/.ci/snapshotBwcVersions b/.ci/snapshotBwcVersions index db131b89ffa4e..dfd238a041b1e 100644 --- a/.ci/snapshotBwcVersions +++ b/.ci/snapshotBwcVersions @@ -1,4 +1,4 @@ BWC_VERSION: - - "7.17.20" + - "7.17.21" - "8.13.3" - "8.14.0" diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 99e811c021845..88a1049a42557 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -120,6 +120,7 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_7_17_18 = new Version(7_17_18_99); public static final Version V_7_17_19 = new Version(7_17_19_99); public static final Version V_7_17_20 = new Version(7_17_20_99); + public static final Version V_7_17_21 = new Version(7_17_21_99); public static final Version V_8_0_0 = new Version(8_00_00_99); public static final Version V_8_0_1 = new Version(8_00_01_99); diff --git a/server/src/main/resources/org/elasticsearch/TransportVersions.csv b/server/src/main/resources/org/elasticsearch/TransportVersions.csv index 4bae460e3bce2..dbc170828fabc 100644 --- a/server/src/main/resources/org/elasticsearch/TransportVersions.csv +++ b/server/src/main/resources/org/elasticsearch/TransportVersions.csv @@ -67,6 +67,7 @@ 7.17.17,7171799 7.17.18,7171899 7.17.19,7171999 +7.17.20,7172099 8.0.0,8000099 8.0.1,8000199 8.1.0,8010099 diff --git a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv index 61cc2167a9048..f379ac81b9009 100644 --- a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv +++ b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv @@ -67,6 +67,7 @@ 7.17.17,7171799 7.17.18,7171899 7.17.19,7171999 +7.17.20,7172099 8.0.0,8000099 8.0.1,8000199 8.1.0,8010099 From d915b964ba131073e3c961589ced73f0163699d8 Mon Sep 17 00:00:00 2001 From: Craig Taverner Date: Wed, 10 Apr 2024 17:56:45 +0200 Subject: [PATCH 140/173] Rename ST_CENTROID to ST_CENTROID_AGG (#107226) * Rename ST_CENTROID to ST_CENTROID_AGG In order to allow development of a scalar ST_CENTROID function. * Fix table alignment --- .../functions/aggregation-functions.asciidoc | 2 +- ...roid.asciidoc => st_centroid_agg.asciidoc} | 6 +- .../resources/enrich-IT_tests_only.csv-spec | 4 +- .../src/main/resources/meta.csv-spec | 8 +- .../src/main/resources/spatial.csv-spec | 186 +++++++++--------- .../function/EsqlFunctionRegistry.java | 2 +- .../xpack/esql/plugin/EsqlFeatures.java | 4 +- .../optimizer/LogicalPlanOptimizerTests.java | 2 +- .../optimizer/PhysicalPlanOptimizerTests.java | 38 ++-- 9 files changed, 126 insertions(+), 126 deletions(-) rename docs/reference/esql/functions/{st_centroid.asciidoc => st_centroid_agg.asciidoc} (69%) diff --git a/docs/reference/esql/functions/aggregation-functions.asciidoc b/docs/reference/esql/functions/aggregation-functions.asciidoc index c040e7fe01327..2fdc8582d6bfb 100644 --- a/docs/reference/esql/functions/aggregation-functions.asciidoc +++ b/docs/reference/esql/functions/aggregation-functions.asciidoc @@ -29,6 +29,6 @@ include::median.asciidoc[] include::median-absolute-deviation.asciidoc[] include::min.asciidoc[] include::percentile.asciidoc[] -include::st_centroid.asciidoc[] +include::st_centroid_agg.asciidoc[] include::sum.asciidoc[] include::values.asciidoc[] diff --git a/docs/reference/esql/functions/st_centroid.asciidoc b/docs/reference/esql/functions/st_centroid_agg.asciidoc similarity index 69% rename from docs/reference/esql/functions/st_centroid.asciidoc rename to docs/reference/esql/functions/st_centroid_agg.asciidoc index e91a325a5597b..c980560f8f198 100644 --- a/docs/reference/esql/functions/st_centroid.asciidoc +++ b/docs/reference/esql/functions/st_centroid_agg.asciidoc @@ -1,6 +1,6 @@ [discrete] [[esql-agg-st-centroid]] -=== `ST_CENTROID` +=== `ST_CENTROID_AGG` experimental::[] @@ -8,11 +8,11 @@ Calculate the spatial centroid over a field with spatial point geometry type. [source.merge.styled,esql] ---- -include::{esql-specs}/spatial.csv-spec[tag=st_centroid-airports] +include::{esql-specs}/spatial.csv-spec[tag=st_centroid_agg-airports] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/spatial.csv-spec[tag=st_centroid-airports-result] +include::{esql-specs}/spatial.csv-spec[tag=st_centroid_agg-airports-result] |=== Supported types: diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-IT_tests_only.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-IT_tests_only.csv-spec index ee43efa69447b..468329e41fe38 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-IT_tests_only.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-IT_tests_only.csv-spec @@ -294,7 +294,7 @@ required_feature: esql.mv_warn FROM airports | ENRICH city_boundaries ON city_location WITH airport, region, city_boundary | EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) -| STATS city_centroid = ST_CENTROID(city_location), count = COUNT(city_location), min_wkt = MIN(boundary_wkt_length), max_wkt = MAX(boundary_wkt_length) +| STATS city_centroid = ST_CENTROID_AGG(city_location), count = COUNT(city_location), min_wkt = MIN(boundary_wkt_length), max_wkt = MAX(boundary_wkt_length) ; warning:Line 3:30: evaluation of [LENGTH(TO_STRING(city_boundary))] failed, treating result as null. Only first 20 failures recorded. warning:Line 3:30: java.lang.IllegalArgumentException: single-value function encountered multi-value @@ -325,7 +325,7 @@ FROM airports | ENRICH city_names ON city WITH airport, region, city_boundary | MV_EXPAND city_boundary | EVAL airport_in_city = ST_INTERSECTS(location, city_boundary) -| STATS count=COUNT(*), centroid=ST_CENTROID(location) BY airport_in_city +| STATS count=COUNT(*), centroid=ST_CENTROID_AGG(location) BY airport_in_city | SORT count ASC ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec index d0e18426f03ab..1a154bc6a61fa 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -64,7 +64,7 @@ double pi() "double sinh(angle:double|integer|long|unsigned_long)" "keyword split(string:keyword|text, delim:keyword|text)" "double sqrt(number:double|integer|long|unsigned_long)" -"geo_point|cartesian_point st_centroid(field:geo_point|cartesian_point)" +"geo_point|cartesian_point st_centroid_agg(field:geo_point|cartesian_point)" "boolean st_contains(geomA:geo_point|cartesian_point|geo_shape|cartesian_shape, geomB:geo_point|cartesian_point|geo_shape|cartesian_shape)" "boolean st_disjoint(geomA:geo_point|cartesian_point|geo_shape|cartesian_shape, geomB:geo_point|cartesian_point|geo_shape|cartesian_shape)" "boolean st_intersects(geomA:geo_point|cartesian_point|geo_shape|cartesian_shape, geomB:geo_point|cartesian_point|geo_shape|cartesian_shape)" @@ -174,7 +174,7 @@ sin |angle |"double|integer|long|unsigne sinh |angle |"double|integer|long|unsigned_long" |An angle, in radians. If `null`, the function returns `null`. split |[string, delim] |["keyword|text", "keyword|text"] |[, ] sqrt |number |"double|integer|long|unsigned_long" |[""] -st_centroid |field |"geo_point|cartesian_point" |[""] +st_centroid_ag|field |"geo_point|cartesian_point" |[""] st_contains |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Geometry column name or variable of geometry type, Geometry column name or variable of geometry type] st_disjoint |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Geometry column name or variable of geometry type, Geometry column name or variable of geometry type] st_intersects |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Geometry column name or variable of geometry type, Geometry column name or variable of geometry type] @@ -285,7 +285,7 @@ sin |Returns ths {wikipedia}/Sine_and_cosine[Sine] trigonometric funct sinh |Returns the {wikipedia}/Hyperbolic_functions[hyperbolic sine] of an angle. split |Split a single valued string into multiple strings. sqrt |Returns the square root of a number. -st_centroid |The centroid of a spatial field. +st_centroid_ag|The centroid of a spatial field. st_contains |Returns whether the first geometry contains the second geometry. st_disjoint |Returns whether the two geometries or geometry columns are disjoint. st_intersects |Returns whether the two geometries or geometry columns intersect. @@ -397,7 +397,7 @@ sin |double sinh |double |false |false |false split |keyword |[false, false] |false |false sqrt |double |false |false |false -st_centroid |"geo_point|cartesian_point" |false |false |true +st_centroid_ag|"geo_point|cartesian_point" |false |false |true st_contains |boolean |[false, false] |false |false st_disjoint |boolean |[false, false] |false |false st_intersects |boolean |[false, false] |false |false diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec index 843b2674967fe..26fcca423d28d 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec @@ -38,46 +38,46 @@ wkt:keyword ; centroidFromStringNested -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg row wkt = "POINT(42.97109629958868 14.7552534006536)" -| STATS c = ST_CENTROID(TO_GEOPOINT(wkt)); +| STATS c = ST_CENTROID_AGG(TO_GEOPOINT(wkt)); c:geo_point POINT(42.97109629958868 14.7552534006536) ; centroidFromString1 -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg ROW wkt = ["POINT(42.97109629958868 14.7552534006536)"] | MV_EXPAND wkt | EVAL pt = TO_GEOPOINT(wkt) -| STATS c = ST_CENTROID(pt); +| STATS c = ST_CENTROID_AGG(pt); c:geo_point POINT(42.97109629958868 14.7552534006536) ; centroidFromString2 -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg ROW wkt = ["POINT(42.97109629958868 14.7552534006536)", "POINT(75.80929149873555 22.72774917539209)"] | MV_EXPAND wkt | EVAL pt = TO_GEOPOINT(wkt) -| STATS c = ST_CENTROID(pt); +| STATS c = ST_CENTROID_AGG(pt); c:geo_point POINT(59.390193899162114 18.741501288022846) ; centroidFromString3 -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg ROW wkt = ["POINT(42.97109629958868 14.7552534006536)", "POINT(75.80929149873555 22.72774917539209)", "POINT(-0.030548143003023033 24.37553649504829)"] | MV_EXPAND wkt | EVAL pt = TO_GEOPOINT(wkt) -| STATS c = ST_CENTROID(pt); +| STATS c = ST_CENTROID_AGG(pt); c:geo_point POINT(39.58327988510707 20.619513023697994) @@ -89,7 +89,7 @@ required_feature: esql.st_x_y ROW wkt = ["POINT(42.97109629958868 14.7552534006536)", "POINT(75.80929149873555 22.72774917539209)", "POINT(-0.030548143003023033 24.37553649504829)"] | MV_EXPAND wkt | EVAL pt = TO_GEOPOINT(wkt) -| STATS c = ST_CENTROID(pt) +| STATS c = ST_CENTROID_AGG(pt) | EVAL x = ST_X(c), y = ST_Y(c); c:geo_point | x:double | y:double @@ -149,25 +149,25 @@ c:long | x:double | y:double # Tests for ST_CENTROID on GEO_POINT type centroidFromAirports -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg -// tag::st_centroid-airports[] +// tag::st_centroid_agg-airports[] FROM airports -| STATS centroid=ST_CENTROID(location) -// end::st_centroid-airports[] +| STATS centroid=ST_CENTROID_AGG(location) +// end::st_centroid_agg-airports[] ; -// tag::st_centroid-airports-result[] +// tag::st_centroid_agg-airports-result[] centroid:geo_point POINT(-0.030548143003023033 24.37553649504829) -// end::st_centroid-airports-result[] +// end::st_centroid_agg-airports-result[] ; centroidFromAirportsNested -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg FROM airports -| STATS centroid=ST_CENTROID(TO_GEOPOINT(location)) +| STATS centroid=ST_CENTROID_AGG(TO_GEOPOINT(location)) ; centroid:geo_point @@ -175,10 +175,10 @@ POINT (-0.03054810272375508 24.37553651570554) ; centroidFromAirportsCount -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg FROM airports -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:geo_point | count:long @@ -186,10 +186,10 @@ POINT(-0.030548143003023033 24.37553649504829) | 891 ; centroidFromAirportsCountGrouped -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg FROM airports -| STATS centroid=ST_CENTROID(location), count=COUNT() BY scalerank +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() BY scalerank | SORT scalerank DESC ; @@ -205,11 +205,11 @@ POINT(1.2588642098541771 24.379140841774642) | 63 | 2 ; centroidFromAirportsFiltered -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg FROM airports | WHERE scalerank == 9 -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:geo_point | count:long @@ -217,11 +217,11 @@ POINT(83.27726172452623 28.99289782286029) | 33 ; centroidFromAirportsCountGroupedCentroid -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg FROM airports -| STATS centroid=ST_CENTROID(location), count=COUNT() BY scalerank -| STATS centroid=ST_CENTROID(centroid), count=SUM(count) +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() BY scalerank +| STATS centroid=ST_CENTROID_AGG(centroid), count=SUM(count) ; centroid:geo_point | count:long @@ -229,10 +229,10 @@ POINT (7.572387259169772 26.836561792945492) | 891 ; centroidFromAirportsCountCityLocations -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg FROM airports -| STATS centroid=ST_CENTROID(city_location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(city_location), count=COUNT() ; centroid:geo_point | count:long @@ -240,10 +240,10 @@ POINT (1.3965610809060276 24.127649406297987) | 891 ; centroidFromAirportsCountGroupedCountry -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg FROM airports -| STATS centroid=ST_CENTROID(city_location), count=COUNT() BY country +| STATS centroid=ST_CENTROID_AGG(city_location), count=COUNT() BY country | SORT count DESC, country ASC | WHERE count >= 10 ; @@ -269,11 +269,11 @@ POINT (70.7946499697864 30.69746997440234) | 10 | Pakistan ; centroidFromAirportsFilteredCountry -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg FROM airports | WHERE country == "United States" -| STATS centroid=ST_CENTROID(city_location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(city_location), count=COUNT() ; centroid:geo_point | count:long @@ -281,11 +281,11 @@ POINT (-97.3333946136801 38.07953176370194) | 129 ; centroidFromAirportsCountGroupedCountryCentroid -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg FROM airports -| STATS centroid=ST_CENTROID(city_location), count=COUNT() BY country -| STATS centroid=ST_CENTROID(centroid), count=SUM(count) +| STATS centroid=ST_CENTROID_AGG(city_location), count=COUNT() BY country +| STATS centroid=ST_CENTROID_AGG(centroid), count=SUM(count) ; centroid:geo_point | count:long @@ -293,10 +293,10 @@ POINT (17.55538044598613 18.185558743854063) | 891 ; centroidFromAirportsCountryCount -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg FROM airports -| STATS airports=ST_CENTROID(location), cities=ST_CENTROID(city_location), count=COUNT() +| STATS airports=ST_CENTROID_AGG(location), cities=ST_CENTROID_AGG(city_location), count=COUNT() ; airports:geo_point | cities:geo_point | count:long @@ -304,13 +304,13 @@ POINT(-0.030548143003023033 24.37553649504829) | POINT (1.3965610809060276 24.12 ; centroidFromAirportsFilteredAndSorted -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg FROM airports | WHERE scalerank == 9 | SORT abbrev | WHERE length(name) > 12 -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:geo_point | count:long @@ -318,11 +318,11 @@ POINT(78.73736493755132 26.761841227998957) | 12 ; centroidFromAirportsAfterMvExpand -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg FROM airports | MV_EXPAND type -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:geo_point | count:long @@ -330,11 +330,11 @@ POINT(2.121611400672094 24.559172889205755) | 933 ; centroidFromAirportsGroupedAfterMvExpand -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg FROM airports | MV_EXPAND type -| STATS centroid=ST_CENTROID(location), count=COUNT() BY scalerank +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() BY scalerank | SORT scalerank DESC ; @@ -350,12 +350,12 @@ POINT(1.2588642098541771 24.379140841774642) | 63 | 2 ; centroidFromAirportsGroupedAfterMvExpandFiltered -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg FROM airports | WHERE scalerank == 9 | MV_EXPAND type -| STATS centroid=ST_CENTROID(location), count=COUNT() BY scalerank +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() BY scalerank ; centroid:geo_point | count:long | scalerank:i @@ -363,12 +363,12 @@ POINT(83.16847535921261 28.79002037679311) | 40 | 9 ; centroidFromAirportsAfterMvExpandFiltered -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg FROM airports | WHERE scalerank == 9 | MV_EXPAND type -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:geo_point | count:long @@ -376,11 +376,11 @@ POINT(83.16847535921261 28.79002037679311) | 40 ; centroidFromAirportsAfterKeywordPredicateCountryUK -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg FROM airports | WHERE country == "United Kingdom" -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:geo_point | count:long @@ -392,7 +392,7 @@ required_feature: esql.st_intersects FROM airports | WHERE ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((1.2305 60.8449, -1.582 61.6899, -10.7227 58.4017, -7.1191 55.3291, -7.9102 54.2139, -5.4492 54.0078, -5.2734 52.3756, -7.8223 49.6676, -5.0977 49.2678, 0.9668 50.5134, 2.5488 52.1065, 2.6367 54.0078, -0.9668 56.4625, 1.2305 60.8449))")) -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:geo_point | count:long @@ -404,7 +404,7 @@ required_feature: esql.st_contains_within FROM airports | WHERE ST_CONTAINS(TO_GEOSHAPE("POLYGON((1.2305 60.8449, -1.582 61.6899, -10.7227 58.4017, -7.1191 55.3291, -7.9102 54.2139, -5.4492 54.0078, -5.2734 52.3756, -7.8223 49.6676, -5.0977 49.2678, 0.9668 50.5134, 2.5488 52.1065, 2.6367 54.0078, -0.9668 56.4625, 1.2305 60.8449))"), location) -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:geo_point | count:long @@ -416,7 +416,7 @@ required_feature: esql.st_contains_within FROM airports | WHERE ST_WITHIN(location, TO_GEOSHAPE("POLYGON((1.2305 60.8449, -1.582 61.6899, -10.7227 58.4017, -7.1191 55.3291, -7.9102 54.2139, -5.4492 54.0078, -5.2734 52.3756, -7.8223 49.6676, -5.0977 49.2678, 0.9668 50.5134, 2.5488 52.1065, 2.6367 54.0078, -0.9668 56.4625, 1.2305 60.8449))")) -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:geo_point | count:long @@ -428,7 +428,7 @@ required_feature: esql.st_intersects FROM airports | WHERE country == "United Kingdom" -| STATS centroid = ST_CENTROID(location), count=COUNT() +| STATS centroid = ST_CENTROID_AGG(location), count=COUNT() | EVAL centroid_in_uk = ST_INTERSECTS(centroid, TO_GEOSHAPE("POLYGON((1.2305 60.8449, -1.582 61.6899, -10.7227 58.4017, -7.1191 55.3291, -7.9102 54.2139, -5.4492 54.0078, -5.2734 52.3756, -7.8223 49.6676, -5.0977 49.2678, 0.9668 50.5134, 2.5488 52.1065, 2.6367 54.0078, -0.9668 56.4625, 1.2305 60.8449))")) | EVAL centroid_in_iceland = ST_INTERSECTS(centroid, TO_GEOSHAPE("POLYGON ((-25.4883 65.5312, -23.4668 66.7746, -18.4131 67.4749, -13.0957 66.2669, -12.3926 64.4159, -20.1270 62.7346, -24.7852 63.3718, -25.4883 65.5312))")) | EVAL centroid_within_uk = ST_WITHIN(centroid, TO_GEOSHAPE("POLYGON((1.2305 60.8449, -1.582 61.6899, -10.7227 58.4017, -7.1191 55.3291, -7.9102 54.2139, -5.4492 54.0078, -5.2734 52.3756, -7.8223 49.6676, -5.0977 49.2678, 0.9668 50.5134, 2.5488 52.1065, 2.6367 54.0078, -0.9668 56.4625, 1.2305 60.8449))")) @@ -450,7 +450,7 @@ FROM airports | EVAL in_iceland = ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON ((-25.4883 65.5312, -23.4668 66.7746, -18.4131 67.4749, -13.0957 66.2669, -12.3926 64.4159, -20.1270 62.7346, -24.7852 63.3718, -25.4883 65.5312))")) | EVAL within_uk = ST_WITHIN(location, TO_GEOSHAPE("POLYGON((1.2305 60.8449, -1.582 61.6899, -10.7227 58.4017, -7.1191 55.3291, -7.9102 54.2139, -5.4492 54.0078, -5.2734 52.3756, -7.8223 49.6676, -5.0977 49.2678, 0.9668 50.5134, 2.5488 52.1065, 2.6367 54.0078, -0.9668 56.4625, 1.2305 60.8449))")) | EVAL within_iceland = ST_WITHIN(location, TO_GEOSHAPE("POLYGON ((-25.4883 65.5312, -23.4668 66.7746, -18.4131 67.4749, -13.0957 66.2669, -12.3926 64.4159, -20.1270 62.7346, -24.7852 63.3718, -25.4883 65.5312))")) -| STATS centroid = ST_CENTROID(location), count=COUNT() BY in_uk, in_iceland, within_uk, within_iceland +| STATS centroid = ST_CENTROID_AGG(location), count=COUNT() BY in_uk, in_iceland, within_uk, within_iceland | SORT count ASC ; @@ -465,7 +465,7 @@ required_feature: esql.st_intersects FROM airports | WHERE ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:geo_point | count:long @@ -477,7 +477,7 @@ required_feature: esql.st_intersects FROM airports | WHERE scalerank == 9 AND ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) AND country == "Yemen" -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:geo_point | count:long @@ -598,7 +598,7 @@ required_feature: esql.st_intersects FROM airports_mp | WHERE ST_INTERSECTS(location, city_location) -| STATS location=ST_CENTROID(location), city_location=ST_CENTROID(city_location), count=COUNT() +| STATS location=ST_CENTROID_AGG(location), city_location=ST_CENTROID_AGG(city_location), count=COUNT() ; location:geo_point | city_location:geo_point | count:long @@ -683,7 +683,7 @@ required_feature: esql.st_disjoint FROM airports_mp | WHERE ST_DISJOINT(location, city_location) -| STATS location=ST_CENTROID(location), city_location=ST_CENTROID(city_location), count=COUNT() +| STATS location=ST_CENTROID_AGG(location), city_location=ST_CENTROID_AGG(city_location), count=COUNT() ; location:geo_point | city_location:geo_point | count:long @@ -776,7 +776,7 @@ required_feature: esql.st_contains_within FROM airports_mp | WHERE ST_CONTAINS(location, city_location) -| STATS location=ST_CENTROID(location), city_location=ST_CENTROID(city_location), count=COUNT() +| STATS location=ST_CENTROID_AGG(location), city_location=ST_CENTROID_AGG(city_location), count=COUNT() ; location:geo_point | city_location:geo_point | count:long @@ -863,7 +863,7 @@ required_feature: esql.st_contains_within FROM airports_mp | WHERE ST_WITHIN(location, city_location) -| STATS location=ST_CENTROID(location), city_location=ST_CENTROID(city_location), count=COUNT() +| STATS location=ST_CENTROID_AGG(location), city_location=ST_CENTROID_AGG(city_location), count=COUNT() ; location:geo_point | city_location:geo_point | count:long @@ -963,46 +963,46 @@ wkt:keyword |pt:cartesian_point ; centroidCartesianFromStringNested -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg row wkt = "POINT(4297.10986328125 -1475.530029296875)" -| STATS c = ST_CENTROID(TO_CARTESIANPOINT(wkt)); +| STATS c = ST_CENTROID_AGG(TO_CARTESIANPOINT(wkt)); c:cartesian_point POINT(4297.10986328125 -1475.530029296875) ; centroidFromCartesianString1 -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg ROW wkt = ["POINT(4297.10986328125 -1475.530029296875)"] | MV_EXPAND wkt | EVAL pt = TO_CARTESIANPOINT(wkt) -| STATS c = ST_CENTROID(pt); +| STATS c = ST_CENTROID_AGG(pt); c:cartesian_point POINT(4297.10986328125 -1475.530029296875) ; centroidFromCartesianString2 -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg ROW wkt = ["POINT(4297.10986328125 -1475.530029296875)", "POINT(7580.93017578125 2272.77001953125)"] | MV_EXPAND wkt | EVAL pt = TO_CARTESIANPOINT(wkt) -| STATS c = ST_CENTROID(pt); +| STATS c = ST_CENTROID_AGG(pt); c:cartesian_point POINT(5939.02001953125 398.6199951171875) ; centroidFromCartesianString3 -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg ROW wkt = ["POINT(4297.10986328125 -1475.530029296875)", "POINT(7580.93017578125 2272.77001953125)", "POINT(-30.548143003023033 2437.553649504829)"] | MV_EXPAND wkt | EVAL pt = TO_CARTESIANPOINT(wkt) -| STATS c = ST_CENTROID(pt); +| STATS c = ST_CENTROID_AGG(pt); c:cartesian_point POINT(3949.163965353159 1078.2645465797348) @@ -1039,30 +1039,30 @@ ZAH | POINT (6779435.866395892 3436280.545331025) | Zahedan Int'l # Tests for ST_CENTROID on CARTESIAN_POINT type cartesianCentroidFromAirports -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg FROM airports_web -| STATS centroid=ST_CENTROID(location); +| STATS centroid=ST_CENTROID_AGG(location); centroid:cartesian_point POINT(-266681.67563861894 3053301.5120195406) ; cartesianCentroidFromAirportsNested -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg FROM airports_web -| STATS centroid=ST_CENTROID(TO_CARTESIANPOINT(location)); +| STATS centroid=ST_CENTROID_AGG(TO_CARTESIANPOINT(location)); centroid:cartesian_point POINT (-266681.66530554957 3053301.506061676) ; cartesianCentroidFromAirportsCount -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg FROM airports_web -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:cartesian_point | count:long @@ -1070,10 +1070,10 @@ POINT(-266681.67563861894 3053301.5120195406) | 849 ; cartesianCentroidFromAirportsCountGrouped -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg FROM airports_web -| STATS centroid=ST_CENTROID(location), count=COUNT() BY scalerank +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() BY scalerank | SORT scalerank DESC ; @@ -1089,11 +1089,11 @@ POINT(140136.12878224207 3081220.7881944445) | 63 | 2 ; cartesianCentroidFromAirportsFiltered -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg FROM airports_web | WHERE scalerank == 9 -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:cartesian_point | count:long @@ -1101,13 +1101,13 @@ POINT(9289013.153846154 3615537.0533353365) | 26 ; cartesianCentroidFromAirportsFilteredAndSorted -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg FROM airports_web | WHERE scalerank == 9 | SORT abbrev | WHERE length(name) > 12 -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:cartesian_point | count:long @@ -1115,11 +1115,11 @@ POINT(9003597.4375 3429344.0078125) | 8 ; cartesianCentroidFromAirportsCountGroupedCentroid -required_feature: esql.st_centroid +required_feature: esql.st_centroid_agg FROM airports_web -| STATS centroid=ST_CENTROID(location), count=COUNT() BY scalerank -| STATS centroid=ST_CENTROID(centroid), count=SUM(count) +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() BY scalerank +| STATS centroid=ST_CENTROID_AGG(centroid), count=SUM(count) ; centroid:cartesian_point | count:long @@ -1134,7 +1134,7 @@ required_feature: esql.st_intersects FROM airports_web | WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))")) -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:cartesian_point | count:long @@ -1233,7 +1233,7 @@ required_feature: esql.st_intersects FROM airports_web | WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("POINT(4783520.559160681 1661010.0197476079)")) -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:cartesian_point | count:long @@ -1245,7 +1245,7 @@ required_feature: esql.st_intersects FROM airports_web | WHERE ST_INTERSECTS(location, TO_CARTESIANPOINT("POINT(4783520.559160681 1661010.0197476079)")) -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:cartesian_point | count:long @@ -1402,7 +1402,7 @@ required_feature: esql.st_contains_within FROM airports_web | WHERE ST_CONTAINS(TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))"), location) -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:cartesian_point | count:long @@ -1439,7 +1439,7 @@ required_feature: esql.st_contains_within FROM airports_web | WHERE ST_CONTAINS(location, TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))")) -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:cartesian_point | count:long @@ -1528,7 +1528,7 @@ required_feature: esql.st_intersects FROM airports_web | WHERE ST_CONTAINS(location, TO_CARTESIANSHAPE("POINT(4783520.559160681 1661010.0197476079)")) -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:cartesian_point | count:long @@ -1540,7 +1540,7 @@ required_feature: esql.st_intersects FROM airports_web | WHERE ST_CONTAINS(location, TO_CARTESIANPOINT("POINT(4783520.559160681 1661010.0197476079)")) -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:cartesian_point | count:long @@ -1555,7 +1555,7 @@ required_feature: esql.st_contains_within FROM airports_web | WHERE ST_WITHIN(location, TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))")) -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:cartesian_point | count:long @@ -1654,7 +1654,7 @@ required_feature: esql.st_intersects FROM airports_web | WHERE ST_WITHIN(location, TO_CARTESIANSHAPE("POINT(4783520.559160681 1661010.0197476079)")) -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:cartesian_point | count:long @@ -1666,7 +1666,7 @@ required_feature: esql.st_intersects FROM airports_web | WHERE ST_WITHIN(location, TO_CARTESIANPOINT("POINT(4783520.559160681 1661010.0197476079)")) -| STATS centroid=ST_CENTROID(location), count=COUNT() +| STATS centroid=ST_CENTROID_AGG(location), count=COUNT() ; centroid:cartesian_point | count:long diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index 62688d753aeef..037b76801ca75 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -189,7 +189,7 @@ private FunctionDefinition[][] functions() { def(Now.class, Now::new, "now") }, // spatial new FunctionDefinition[] { - def(SpatialCentroid.class, SpatialCentroid::new, "st_centroid"), + def(SpatialCentroid.class, SpatialCentroid::new, "st_centroid_agg"), def(SpatialContains.class, SpatialContains::new, "st_contains"), def(SpatialDisjoint.class, SpatialDisjoint::new, "st_disjoint"), def(SpatialIntersects.class, SpatialIntersects::new, "st_intersects"), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java index 31c967fc3eee8..192c011c4494b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java @@ -68,7 +68,7 @@ public class EsqlFeatures implements FeatureSpecification { /** * Support for spatial aggregation {@code ST_CENTROID}. Done in #104269. */ - private static final NodeFeature ST_CENTROID = new NodeFeature("esql.st_centroid"); + private static final NodeFeature ST_CENTROID_AGG = new NodeFeature("esql.st_centroid_agg"); /** * Support for spatial aggregation {@code ST_INTERSECTS}. Done in #104907. @@ -111,7 +111,7 @@ public Set getFeatures() { FROM_OPTIONS, SPATIAL_POINTS_FROM_SOURCE, SPATIAL_SHAPES, - ST_CENTROID, + ST_CENTROID_AGG, ST_INTERSECTS, ST_CONTAINS_WITHIN, ST_DISJOINT diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 7de3308fcab16..32e46ee544d07 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -3077,7 +3077,7 @@ public void testIsNotNullConstraintForAliasedExpressions() { public void testSpatialTypesAndStatsUseDocValues() { var plan = planAirports(""" from test - | stats centroid = st_centroid(location) + | stats centroid = st_centroid_agg(location) """); var limit = as(plan, Limit.class); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index fb2362851e43c..f71161d64e130 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -2286,7 +2286,7 @@ public void testPartialAggFoldingOutputForSyntheticAgg() { public void testSpatialTypesAndStatsUseDocValues() { var plan = this.physicalPlan(""" from airports - | stats centroid = st_centroid(location) + | stats centroid = st_centroid_agg(location) """, airports); var limit = as(plan, LimitExec.class); @@ -2343,7 +2343,7 @@ public void testSpatialTypesAndStatsUseDocValues() { public void testSpatialTypesAndStatsUseDocValuesNested() { var plan = this.physicalPlan(""" from airports - | stats centroid = st_centroid(to_geopoint(location)) + | stats centroid = st_centroid_agg(to_geopoint(location)) """, airports); var limit = as(plan, LimitExec.class); @@ -2404,7 +2404,7 @@ public void testSpatialTypesAndStatsUseDocValuesNested() { public void testSpatialTypesAndStatsUseDocValuesNestedLiteral() { var plan = this.physicalPlan(""" row wkt = "POINT(42.97109629958868 14.7552534006536)" - | stats centroid = st_centroid(to_geopoint(wkt)) + | stats centroid = st_centroid_agg(to_geopoint(wkt)) """, airports); var limit = as(plan, LimitExec.class); @@ -2458,7 +2458,7 @@ public void testSpatialTypesAndStatsUseDocValuesNestedLiteral() { public void testSpatialTypesAndStatsUseDocValuesMultiAggregations() { var plan = this.physicalPlan(""" from airports - | stats centroid = st_centroid(location), count = COUNT() + | stats centroid = st_centroid_agg(location), count = COUNT() """, airports); var limit = as(plan, LimitExec.class); @@ -2524,7 +2524,7 @@ public void testSpatialTypesAndStatsUseDocValuesMultiAggregations() { public void testSpatialTypesAndStatsUseDocValuesMultiSpatialAggregations() { var plan = this.physicalPlan(""" FROM airports - | STATS airports=ST_CENTROID(location), cities=ST_CENTROID(city_location), count=COUNT() + | STATS airports=ST_CENTROID_AGG(location), cities=ST_CENTROID_AGG(city_location), count=COUNT() """, airports); var limit = as(plan, LimitExec.class); @@ -2590,7 +2590,7 @@ public void testSpatialTypesAndStatsUseDocValuesMultiAggregationsFiltered() { var plan = this.physicalPlan(""" FROM airports | WHERE scalerank == 9 - | STATS centroid=ST_CENTROID(location), count=COUNT() + | STATS centroid=ST_CENTROID_AGG(location), count=COUNT() """, airports); var limit = as(plan, LimitExec.class); @@ -2657,7 +2657,7 @@ public void testSpatialTypesAndStatsUseDocValuesMultiAggregationsFiltered() { public void testSpatialTypesAndStatsUseDocValuesMultiAggregationsGrouped() { var plan = this.physicalPlan(""" FROM airports - | STATS centroid=ST_CENTROID(location), count=COUNT() BY scalerank + | STATS centroid=ST_CENTROID_AGG(location), count=COUNT() BY scalerank """, airports); var limit = as(plan, LimitExec.class); @@ -2727,8 +2727,8 @@ public void testSpatialTypesAndStatsUseDocValuesMultiAggregationsGrouped() { public void testSpatialTypesAndStatsUseDocValuesMultiAggregationsGroupedAggregated() { var plan = this.physicalPlan(""" FROM airports - | STATS centroid=ST_CENTROID(location), count=COUNT() BY scalerank - | STATS centroid=ST_CENTROID(centroid), count=SUM(count) + | STATS centroid=ST_CENTROID_AGG(location), count=COUNT() BY scalerank + | STATS centroid=ST_CENTROID_AGG(centroid), count=SUM(count) """, airports); var limit = as(plan, LimitExec.class); @@ -2821,7 +2821,7 @@ public void testEnrichBeforeSpatialAggregationSupportsDocValues() { var plan = physicalPlan(""" from airports | enrich city_boundaries ON city_location WITH airport, region, city_boundary - | stats centroid = st_centroid(city_location) + | stats centroid = st_centroid_agg(city_location) """, airports); var limit = as(plan, LimitExec.class); @@ -3049,7 +3049,7 @@ public void testPushDownSpatialRelatesStringToSourceAndUseDocValuesForCentroid() new TestSpatialRelation(ShapeRelation.CONTAINS, airportsWeb, true, true), new TestSpatialRelation(ShapeRelation.CONTAINS, airportsWeb, false, true) }; for (TestSpatialRelation test : tests) { - var centroidExpr = "centroid=ST_CENTROID(location), count=COUNT()"; + var centroidExpr = "centroid=ST_CENTROID_AGG(location), count=COUNT()"; var plan = this.physicalPlan( "FROM " + test.index.index.name() + " | WHERE " + test.predicate() + " | STATS " + centroidExpr, test.index @@ -3152,11 +3152,11 @@ public void testPushSpatialIntersectsStringToSourceAndUseDocValuesForCentroid() for (String query : new String[] { """ FROM airports | WHERE ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) - | STATS centroid=ST_CENTROID(location), count=COUNT() + | STATS centroid=ST_CENTROID_AGG(location), count=COUNT() """, """ FROM airports | WHERE ST_INTERSECTS(TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))"), location) - | STATS centroid=ST_CENTROID(location), count=COUNT() + | STATS centroid=ST_CENTROID_AGG(location), count=COUNT() """ }) { var plan = this.physicalPlan(query, airports); @@ -3253,13 +3253,13 @@ public void testPushSpatialIntersectsStringToSourceCompoundPredicateAndUseDocVal | WHERE scalerank == 9 AND ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) AND type == "mid" - | STATS centroid=ST_CENTROID(location), count=COUNT() + | STATS centroid=ST_CENTROID_AGG(location), count=COUNT() """, """ FROM airports | WHERE scalerank == 9 AND ST_INTERSECTS(TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))"), location) AND type == "mid" - | STATS centroid=ST_CENTROID(location), count=COUNT() + | STATS centroid=ST_CENTROID_AGG(location), count=COUNT() """ }) { var plan = this.physicalPlan(query, airports); @@ -3340,7 +3340,7 @@ public void testIntersectsOnTwoPointFieldAndBothCentroidUsesDocValues() { String query = """ FROM airports | WHERE ST_INTERSECTS(location, city_location) - | STATS location=ST_CENTROID(location), city_location=ST_CENTROID(city_location), count=COUNT() + | STATS location=ST_CENTROID_AGG(location), city_location=ST_CENTROID_AGG(city_location), count=COUNT() """; var plan = this.physicalPlan(query, airports); @@ -3383,11 +3383,11 @@ public void testIntersectsOnTwoPointFieldAndOneCentroidUsesDocValues() { for (String query : new String[] { """ FROM airports | WHERE ST_INTERSECTS(location, city_location) - | STATS location=ST_CENTROID(location), count=COUNT() + | STATS location=ST_CENTROID_AGG(location), count=COUNT() """, """ FROM airports | WHERE ST_INTERSECTS(location, city_location) - | STATS city_location=ST_CENTROID(city_location), count=COUNT() + | STATS city_location=ST_CENTROID_AGG(city_location), count=COUNT() """ }) { var plan = this.physicalPlan(query, airports); @@ -3430,7 +3430,7 @@ public void testTwoIntersectsWithTwoCentroidsUsesDocValues() { FROM airports | WHERE ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) AND ST_INTERSECTS(city_location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) - | STATS location=ST_CENTROID(location), city_location=ST_CENTROID(city_location), count=COUNT() + | STATS location=ST_CENTROID_AGG(location), city_location=ST_CENTROID_AGG(city_location), count=COUNT() """; var plan = this.physicalPlan(query, airports); From afb492272a06431b3a0c7f8785fe7d10e114ca3a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Wed, 10 Apr 2024 17:57:18 +0200 Subject: [PATCH 141/173] [DOCS] Adds HuggingFace example to inference API tutorial (#107298) --- .../inference/put-inference.asciidoc | 15 +++- .../semantic-search-inference.asciidoc | 12 +-- .../infer-api-ingest-pipeline-widget.asciidoc | 17 +++++ .../infer-api-ingest-pipeline.asciidoc | 25 +++++++ .../infer-api-mapping-widget.asciidoc | 16 ++++ .../inference-api/infer-api-mapping.asciidoc | 31 ++++++++ .../infer-api-reindex-widget.asciidoc | 17 +++++ .../inference-api/infer-api-reindex.asciidoc | 23 ++++++ .../infer-api-requirements-widget.asciidoc | 17 +++++ .../infer-api-requirements.asciidoc | 6 ++ .../infer-api-search-widget.asciidoc | 17 +++++ .../inference-api/infer-api-search.asciidoc | 73 +++++++++++++++++++ .../infer-api-task-widget.asciidoc | 17 +++++ .../inference-api/infer-api-task.asciidoc | 30 ++++++++ 14 files changed, 310 insertions(+), 6 deletions(-) diff --git a/docs/reference/inference/put-inference.asciidoc b/docs/reference/inference/put-inference.asciidoc index 7d0ede82f70fa..9f7f6384a7389 100644 --- a/docs/reference/inference/put-inference.asciidoc +++ b/docs/reference/inference/put-inference.asciidoc @@ -346,7 +346,7 @@ Example response: ===== Hugging Face service The following example shows how to create an {infer} endpoint called -`hugging-face_embeddings` to perform a `text_embedding` task type. +`hugging-face-embeddings` to perform a `text_embedding` task type. [source,console] ------------------------------------------------------------ @@ -371,6 +371,19 @@ endpoint URL. Select the model you want to use on the new endpoint creation page task under the Advanced configuration section. Create the endpoint. Copy the URL after the endpoint initialization has been finished. +[discrete] +[[inference-example-hugging-face-supported-models]] +The list of supported models for the Hugging Face service: + +* https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2[all-MiniLM-L6-v2] +* https://huggingface.co/sentence-transformers/all-MiniLM-L12-v2[all-MiniLM-L12-v2] +* https://huggingface.co/sentence-transformers/all-mpnet-base-v2[all-mpnet-base-v2] +* https://huggingface.co/intfloat/e5-base-v2[e5-base-v2] +* https://huggingface.co/intfloat/e5-small-v2[e5-small-v2] +* https://huggingface.co/intfloat/multilingual-e5-base[multilingual-e5-base] +* https://huggingface.co/intfloat/multilingual-e5-small[multilingual-e5-small] + + [discrete] [[inference-example-eland]] ===== Models uploaded by Eland via the elasticsearch service diff --git a/docs/reference/search/search-your-data/semantic-search-inference.asciidoc b/docs/reference/search/search-your-data/semantic-search-inference.asciidoc index b5619f8dda7b9..53abf0f0458af 100644 --- a/docs/reference/search/search-your-data/semantic-search-inference.asciidoc +++ b/docs/reference/search/search-your-data/semantic-search-inference.asciidoc @@ -6,9 +6,11 @@ The instructions in this tutorial shows you how to use the {infer} API with various services to perform semantic search on your data. The following examples -use Cohere's `embed-english-v3.0` model and OpenAI's `text-embedding-ada-002` -second generation embedding model. You can use any Cohere and OpenAI models, -they are all supported by the {infer} API. +use Cohere's `embed-english-v3.0` model, the `all-mpnet-base-v2` model from +HuggingFace, and OpenAI's `text-embedding-ada-002` second generation embedding +model. You can use any Cohere and OpenAI models, they are all supported by the +{infer} API. For a list of supported models available on HuggingFace, refer to +<>. Click the name of the service you want to use on any of the widgets below to review the corresponding instructions. @@ -91,7 +93,7 @@ GET _tasks/ // TEST[skip:TBD] You can also cancel the reindexing process if you don't want to wait until the -reindexing process is fully complete which might take hours: +reindexing process is fully complete which might take hours for large data sets: [source,console] ---- @@ -104,7 +106,7 @@ POST _tasks//_cancel [[infer-semantic-search]] ==== Semantic search -After the dataset has been enriched with the embeddings, you can query the data +After the data set has been enriched with the embeddings, you can query the data using {ref}/knn-search.html#knn-semantic-search[semantic search]. Pass a `query_vector_builder` to the k-nearest neighbor (kNN) vector search API, and provide the query text and the model you have used to create the embeddings. diff --git a/docs/reference/tab-widgets/inference-api/infer-api-ingest-pipeline-widget.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-ingest-pipeline-widget.asciidoc index 44d2f60966caa..069dcb61f81b0 100644 --- a/docs/reference/tab-widgets/inference-api/infer-api-ingest-pipeline-widget.asciidoc +++ b/docs/reference/tab-widgets/inference-api/infer-api-ingest-pipeline-widget.asciidoc @@ -7,6 +7,12 @@ id="infer-api-ingest-cohere"> Cohere + + + + + +