diff --git a/docs/changelog/115091.yaml b/docs/changelog/115091.yaml new file mode 100644 index 0000000000000..762bcca5e8c52 --- /dev/null +++ b/docs/changelog/115091.yaml @@ -0,0 +1,7 @@ +pr: 115091 +summary: Added stricter range type checks and runtime warnings for ENRICH +area: ES|QL +type: bug +issues: + - 107357 + - 116799 diff --git a/docs/reference/esql/esql-enrich-data.asciidoc b/docs/reference/esql/esql-enrich-data.asciidoc index c48118d1c367a..ad34e29f1a55b 100644 --- a/docs/reference/esql/esql-enrich-data.asciidoc +++ b/docs/reference/esql/esql-enrich-data.asciidoc @@ -138,8 +138,33 @@ include::{es-ref-dir}/ingest/apis/enrich/execute-enrich-policy.asciidoc[tag=upda include::../ingest/enrich.asciidoc[tag=update-enrich-policy] -==== Limitations +==== Enrich Policy Types and Limitations +The {esql} `ENRICH` command supports all three enrich policy types: + +`geo_match`:: +Matches enrich data to incoming documents based on a <>. +For an example, see <>. + +`match`:: +Matches enrich data to incoming documents based on a <>. +For an example, see <>. + +`range`:: +Matches a number, date, or IP address in incoming documents to a range in the +enrich index based on a <>. For an example, +see <>. + // tag::limitations[] -The {esql} `ENRICH` command only supports enrich policies of type `match`. -Furthermore, `ENRICH` only supports enriching on a column of type `keyword`. +While all three enrich policy types are supported, there are some limitations to be aware of: + +* The `geo_match` enrich policy type only supports the `intersects` spatial relation. +* It is required that the `match_field` in the `ENRICH` command is of the correct type. +For example, if the enrich policy is of type `geo_match`, the `match_field` in the `ENRICH` +command must be of type `geo_point` or `geo_shape`. +Likewise, a `range` enrich policy requires a `match_field` of type `integer`, `long`, `date`, or `ip`, +depending on the type of the range field in the original enrich index. +* However, this constraint is relaxed for `range` policies when the `match_field` is of type `KEYWORD`. +In this case the field values will be parsed during query execution, row by row. +If any value fails to parse, the output values for that row will be set to `null`, +an appropriate warning will be produced and the query will continue to execute. // end::limitations[] diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index f7d864db7e17c..945e9e60ea6fd 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -202,6 +202,7 @@ static TransportVersion def(int id) { public static final TransportVersion INDEX_STATS_ADDITIONAL_FIELDS = def(8_793_00_0); public static final TransportVersion INDEX_STATS_ADDITIONAL_FIELDS_REVERT = def(8_794_00_0); public static final TransportVersion FAST_REFRESH_RCO_2 = def(8_795_00_0); + public static final TransportVersion ESQL_ENRICH_RUNTIME_WARNINGS = def(8_796_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/x-pack/plugin/build.gradle b/x-pack/plugin/build.gradle index e9978ab1aca6a..9bfaa14973159 100644 --- a/x-pack/plugin/build.gradle +++ b/x-pack/plugin/build.gradle @@ -208,5 +208,6 @@ tasks.named("yamlRestTestV7CompatTransform").configure({ task -> task.skipTest("esql/80_text/reverse text", "The output type changed from TEXT to KEYWORD.") task.skipTest("esql/80_text/values function", "The output type changed from TEXT to KEYWORD.") task.skipTest("privileges/11_builtin/Test get builtin privileges" ,"unnecessary to test compatibility") + task.skipTest("esql/61_enrich_ip/Invalid IP strings", "We switched from exceptions to null+warnings for ENRICH runtime errors") }) diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java index 347e6b43099fc..e980b1509813e 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java @@ -591,6 +591,13 @@ public DataType noText() { return isString(this) ? KEYWORD : this; } + public boolean isDate() { + return switch (this) { + case DATETIME, DATE_NANOS -> true; + default -> false; + }; + } + /** * Named parameters with default values. It's just easier to do this with * a builder in java.... diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/EnrichQuerySourceOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/EnrichQuerySourceOperator.java index 6631a41d1d95e..44537c198840d 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/EnrichQuerySourceOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/EnrichQuerySourceOperator.java @@ -21,6 +21,7 @@ import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.SourceOperator; +import org.elasticsearch.compute.operator.Warnings; import org.elasticsearch.core.Releasables; import java.io.IOException; @@ -37,17 +38,25 @@ public final class EnrichQuerySourceOperator extends SourceOperator { private int queryPosition = -1; private final IndexReader indexReader; private final IndexSearcher searcher; + private final Warnings warnings; private final int maxPageSize; // using smaller pages enables quick cancellation and reduces sorting costs public static final int DEFAULT_MAX_PAGE_SIZE = 256; - public EnrichQuerySourceOperator(BlockFactory blockFactory, int maxPageSize, QueryList queryList, IndexReader indexReader) { + public EnrichQuerySourceOperator( + BlockFactory blockFactory, + int maxPageSize, + QueryList queryList, + IndexReader indexReader, + Warnings warnings + ) { this.blockFactory = blockFactory; this.maxPageSize = maxPageSize; this.queryList = queryList; this.indexReader = indexReader; this.searcher = new IndexSearcher(indexReader); + this.warnings = warnings; } @Override @@ -72,12 +81,18 @@ public Page getOutput() { } int totalMatches = 0; do { - Query query = nextQuery(); - if (query == null) { - assert isFinished(); - break; + Query query; + try { + query = nextQuery(); + if (query == null) { + assert isFinished(); + break; + } + query = searcher.rewrite(new ConstantScoreQuery(query)); + } catch (Exception e) { + warnings.registerException(e); + continue; } - query = searcher.rewrite(new ConstantScoreQuery(query)); final var weight = searcher.createWeight(query, ScoreMode.COMPLETE_NO_SCORES, 1.0f); if (weight == null) { continue; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/lookup/EnrichQuerySourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/lookup/EnrichQuerySourceOperatorTests.java index fa0c6f30ca4e6..a19335db3bc57 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/lookup/EnrichQuerySourceOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/lookup/EnrichQuerySourceOperatorTests.java @@ -32,6 +32,8 @@ import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.Warnings; import org.elasticsearch.core.IOUtils; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; @@ -120,7 +122,8 @@ public void testQueries() throws Exception { // 3 -> [] -> [] // 4 -> [a1] -> [3] // 5 -> [] -> [] - EnrichQuerySourceOperator queryOperator = new EnrichQuerySourceOperator(blockFactory, 128, queryList, reader); + var warnings = Warnings.createWarnings(DriverContext.WarningsMode.IGNORE, 0, 0, "test enrich"); + EnrichQuerySourceOperator queryOperator = new EnrichQuerySourceOperator(blockFactory, 128, queryList, reader, warnings); Page p0 = queryOperator.getOutput(); assertNotNull(p0); assertThat(p0.getPositionCount(), equalTo(6)); @@ -187,7 +190,8 @@ public void testRandomMatchQueries() throws Exception { MappedFieldType uidField = new KeywordFieldMapper.KeywordFieldType("uid"); var queryList = QueryList.rawTermQueryList(uidField, mock(SearchExecutionContext.class), inputTerms); int maxPageSize = between(1, 256); - EnrichQuerySourceOperator queryOperator = new EnrichQuerySourceOperator(blockFactory, maxPageSize, queryList, reader); + var warnings = Warnings.createWarnings(DriverContext.WarningsMode.IGNORE, 0, 0, "test enrich"); + EnrichQuerySourceOperator queryOperator = new EnrichQuerySourceOperator(blockFactory, maxPageSize, queryList, reader, warnings); Map> actualPositions = new HashMap<>(); while (queryOperator.isFinished() == false) { Page page = queryOperator.getOutput(); diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java index cff9604053903..5c0c13b48df3b 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java @@ -183,7 +183,8 @@ public void testLookupIndex() throws IOException { DataType.KEYWORD, "lookup", "data", - List.of(new Alias(Source.EMPTY, "l", new ReferenceAttribute(Source.EMPTY, "l", DataType.LONG))) + List.of(new Alias(Source.EMPTY, "l", new ReferenceAttribute(Source.EMPTY, "l", DataType.LONG))), + Source.EMPTY ); DriverContext driverContext = driverContext(); try ( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index a41e43a859e67..fbb3060891a34 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -277,6 +277,11 @@ public enum Cap { */ RANGEQUERY_FOR_DATETIME, + /** + * Enforce strict type checking on ENRICH range types, and warnings for KEYWORD parsing at runtime. Done in #115091. + */ + ENRICH_STRICT_RANGE_TYPES, + /** * Fix for non-unique attribute names in ROW and logical plans. * https://github.com/elastic/elasticsearch/issues/110541 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java index d2f90bd6c1e16..497efe64d9e9d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java @@ -41,6 +41,7 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.compute.operator.OutputOperator; +import org.elasticsearch.compute.operator.Warnings; import org.elasticsearch.compute.operator.lookup.EnrichQuerySourceOperator; import org.elasticsearch.compute.operator.lookup.MergePositionsOperator; import org.elasticsearch.compute.operator.lookup.QueryList; @@ -78,6 +79,7 @@ import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; +import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.planner.EsPhysicalOperationProviders; import org.elasticsearch.xpack.esql.planner.PlannerUtils; @@ -166,6 +168,10 @@ abstract class AbstractLookupService list releasables.add(mergePositionsOperator); SearchExecutionContext searchExecutionContext = searchContext.getSearchExecutionContext(); QueryList queryList = queryList(request, searchExecutionContext, inputBlock, request.inputDataType); + var warnings = Warnings.createWarnings( + DriverContext.WarningsMode.COLLECT, + request.source.source().getLineNumber(), + request.source.source().getColumnNumber(), + request.source.text() + ); var queryOperator = new EnrichQuerySourceOperator( driverContext.blockFactory(), EnrichQuerySourceOperator.DEFAULT_MAX_PAGE_SIZE, queryList, - searchExecutionContext.getIndexReader() + searchExecutionContext.getIndexReader(), + warnings ); releasables.add(queryOperator); var extractFieldsOperator = extractFieldsOperator(searchContext, driverContext, request.extractFields); @@ -450,13 +463,22 @@ abstract static class Request { final DataType inputDataType; final Page inputPage; final List extractFields; + final Source source; - Request(String sessionId, String index, DataType inputDataType, Page inputPage, List extractFields) { + Request( + String sessionId, + String index, + DataType inputDataType, + Page inputPage, + List extractFields, + Source source + ) { this.sessionId = sessionId; this.index = index; this.inputDataType = inputDataType; this.inputPage = inputPage; this.extractFields = extractFields; + this.source = source; } } @@ -470,6 +492,7 @@ abstract static class TransportRequest extends org.elasticsearch.transport.Trans final DataType inputDataType; final Page inputPage; final List extractFields; + final Source source; // TODO: Remove this workaround once we have Block RefCount final Page toRelease; final RefCounted refs = AbstractRefCounted.of(this::releasePage); @@ -480,7 +503,8 @@ abstract static class TransportRequest extends org.elasticsearch.transport.Trans DataType inputDataType, Page inputPage, Page toRelease, - List extractFields + List extractFields, + Source source ) { this.sessionId = sessionId; this.shardId = shardId; @@ -488,6 +512,7 @@ abstract static class TransportRequest extends org.elasticsearch.transport.Trans this.inputPage = inputPage; this.toRelease = toRelease; this.extractFields = extractFields; + this.source = source; } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupOperator.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupOperator.java index 6e5845fae33b7..df608a04632a2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupOperator.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupOperator.java @@ -16,9 +16,11 @@ import org.elasticsearch.compute.operator.AsyncOperator; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.Operator; +import org.elasticsearch.compute.operator.ResponseHeadersCollector; import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; +import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import java.io.IOException; @@ -35,6 +37,8 @@ public final class EnrichLookupOperator extends AsyncOperator { private final String matchType; private final String matchField; private final List enrichFields; + private final ResponseHeadersCollector responseHeadersCollector; + private final Source source; private long totalTerms = 0L; public record Factory( @@ -47,7 +51,8 @@ public record Factory( String enrichIndex, String matchType, String matchField, - List enrichFields + List enrichFields, + Source source ) implements OperatorFactory { @Override public String describe() { @@ -75,7 +80,8 @@ public Operator get(DriverContext driverContext) { enrichIndex, matchType, matchField, - enrichFields + enrichFields, + source ); } } @@ -91,7 +97,8 @@ public EnrichLookupOperator( String enrichIndex, String matchType, String matchField, - List enrichFields + List enrichFields, + Source source ) { super(driverContext, maxOutstandingRequests); this.sessionId = sessionId; @@ -103,6 +110,8 @@ public EnrichLookupOperator( this.matchType = matchType; this.matchField = matchField; this.enrichFields = enrichFields; + this.source = source; + this.responseHeadersCollector = new ResponseHeadersCollector(enrichLookupService.getThreadContext()); } @Override @@ -116,9 +125,14 @@ protected void performAsync(Page inputPage, ActionListener listener) { matchType, matchField, new Page(inputBlock), - enrichFields + enrichFields, + source + ); + enrichLookupService.lookupAsync( + request, + parentTask, + ActionListener.runBefore(listener.map(inputPage::appendPage), responseHeadersCollector::collect) ); - enrichLookupService.lookupAsync(request, parentTask, listener.map(inputPage::appendPage)); } @Override @@ -140,6 +154,7 @@ public String toString() { protected void doClose() { // TODO: Maybe create a sub-task as the parent task of all the lookup tasks // then cancel it when this operator terminates early (e.g., have enough result). + responseHeadersCollector.finish(); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java index 2d85b46e33a8c..50a1ffce4841f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java @@ -18,6 +18,8 @@ import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.lookup.QueryList; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.RangeFieldMapper; +import org.elasticsearch.index.mapper.RangeType; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.search.SearchService; @@ -27,6 +29,7 @@ import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.action.EsqlQueryAction; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; +import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; @@ -71,13 +74,15 @@ protected TransportRequest transportRequest(EnrichLookupService.Request request, request.matchField, request.inputPage, null, - request.extractFields + request.extractFields, + request.source ); } @Override protected QueryList queryList(TransportRequest request, SearchExecutionContext context, Block inputBlock, DataType inputDataType) { MappedFieldType fieldType = context.getFieldType(request.matchField); + validateTypes(inputDataType, fieldType); return switch (request.matchType) { case "match", "range" -> termQueryList(fieldType, context, inputBlock, inputDataType); case "geo_match" -> QueryList.geoShapeQueryList(fieldType, context, inputBlock); @@ -85,6 +90,33 @@ protected QueryList queryList(TransportRequest request, SearchExecutionContext c }; } + private static void validateTypes(DataType inputDataType, MappedFieldType fieldType) { + if (fieldType instanceof RangeFieldMapper.RangeFieldType rangeType) { + // For range policy types, the ENRICH index field type will be one of a list of supported range types, + // which need to match the input data type (eg. ip-range -> ip, date-range -> date, etc.) + if (rangeTypesCompatible(rangeType.rangeType(), inputDataType) == false) { + throw new EsqlIllegalArgumentException( + "ENRICH range and input types are incompatible: range[" + rangeType.rangeType() + "], input[" + inputDataType + "]" + ); + } + } + // For match policies, the ENRICH index field will always be KEYWORD, and input type will be converted to KEYWORD. + // For geo_match, type validation is done earlier, in the Analyzer. + } + + private static boolean rangeTypesCompatible(RangeType rangeType, DataType inputDataType) { + if (inputDataType.noText() == DataType.KEYWORD) { + // We allow runtime parsing of string types to numeric types + return true; + } + return switch (rangeType) { + case INTEGER, LONG -> inputDataType.isWholeNumber(); + case IP -> inputDataType == DataType.IP; + case DATE -> inputDataType.isDate(); + default -> rangeType.isNumeric() == inputDataType.isNumeric(); + }; + } + public static class Request extends AbstractLookupService.Request { private final String matchType; private final String matchField; @@ -96,9 +128,10 @@ public static class Request extends AbstractLookupService.Request { String matchType, String matchField, Page inputPage, - List extractFields + List extractFields, + Source source ) { - super(sessionId, index, inputDataType, inputPage, extractFields); + super(sessionId, index, inputDataType, inputPage, extractFields, source); this.matchType = matchType; this.matchField = matchField; } @@ -116,9 +149,10 @@ protected static class TransportRequest extends AbstractLookupService.TransportR String matchField, Page inputPage, Page toRelease, - List extractFields + List extractFields, + Source source ) { - super(sessionId, shardId, inputDataType, inputPage, toRelease, extractFields); + super(sessionId, shardId, inputDataType, inputPage, toRelease, extractFields, source); this.matchType = matchType; this.matchField = matchField; } @@ -138,6 +172,10 @@ static TransportRequest readFrom(StreamInput in, BlockFactory blockFactory) thro } PlanStreamInput planIn = new PlanStreamInput(in, in.namedWriteableRegistry(), null); List extractFields = planIn.readNamedWriteableCollectionAsList(NamedExpression.class); + var source = Source.EMPTY; + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_ENRICH_RUNTIME_WARNINGS)) { + source = Source.readFrom(planIn); + } TransportRequest result = new TransportRequest( sessionId, shardId, @@ -146,7 +184,8 @@ static TransportRequest readFrom(StreamInput in, BlockFactory blockFactory) thro matchField, inputPage, inputPage, - extractFields + extractFields, + source ); result.setParentTask(parentTaskId); return result; @@ -165,6 +204,9 @@ public void writeTo(StreamOutput out) throws IOException { out.writeWriteable(inputPage); PlanStreamOutput planOut = new PlanStreamOutput(out, null); planOut.writeNamedWriteableCollection(extractFields); + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_ENRICH_RUNTIME_WARNINGS)) { + source.writeTo(planOut); + } } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexOperator.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexOperator.java index 836b400c54f8c..f09f7d0e23e7b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexOperator.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexOperator.java @@ -19,6 +19,7 @@ import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; +import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import java.io.IOException; @@ -36,7 +37,8 @@ public record Factory( DataType inputDataType, String lookupIndex, String matchField, - List loadFields + List loadFields, + Source source ) implements OperatorFactory { @Override public String describe() { @@ -63,7 +65,8 @@ public Operator get(DriverContext driverContext) { inputDataType, lookupIndex, matchField, - loadFields + loadFields, + source ); } } @@ -76,6 +79,7 @@ public Operator get(DriverContext driverContext) { private final String lookupIndex; private final String matchField; private final List loadFields; + private final Source source; private long totalTerms = 0L; public LookupFromIndexOperator( @@ -88,7 +92,8 @@ public LookupFromIndexOperator( DataType inputDataType, String lookupIndex, String matchField, - List loadFields + List loadFields, + Source source ) { super(driverContext, maxOutstandingRequests); this.sessionId = sessionId; @@ -99,6 +104,7 @@ public LookupFromIndexOperator( this.lookupIndex = lookupIndex; this.matchField = matchField; this.loadFields = loadFields; + this.source = source; } @Override @@ -111,7 +117,8 @@ protected void performAsync(Page inputPage, ActionListener listener) { inputDataType, matchField, new Page(inputBlock), - loadFields + loadFields, + source ); lookupService.lookupAsync(request, parentTask, listener.map(inputPage::appendPage)); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java index ef204e88c234f..849e8e890e248 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.enrich; +import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -25,6 +26,7 @@ import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilegeResolver; import org.elasticsearch.xpack.esql.action.EsqlQueryAction; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; +import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; @@ -68,7 +70,8 @@ protected TransportRequest transportRequest(LookupFromIndexService.Request reque request.inputPage, null, request.extractFields, - request.matchField + request.matchField, + request.source ); } @@ -87,9 +90,10 @@ public static class Request extends AbstractLookupService.Request { DataType inputDataType, String matchField, Page inputPage, - List extractFields + List extractFields, + Source source ) { - super(sessionId, index, inputDataType, inputPage, extractFields); + super(sessionId, index, inputDataType, inputPage, extractFields, source); this.matchField = matchField; } } @@ -104,9 +108,10 @@ protected static class TransportRequest extends AbstractLookupService.TransportR Page inputPage, Page toRelease, List extractFields, - String matchField + String matchField, + Source source ) { - super(sessionId, shardId, inputDataType, inputPage, toRelease, extractFields); + super(sessionId, shardId, inputDataType, inputPage, toRelease, extractFields, source); this.matchField = matchField; } @@ -122,6 +127,10 @@ static TransportRequest readFrom(StreamInput in, BlockFactory blockFactory) thro PlanStreamInput planIn = new PlanStreamInput(in, in.namedWriteableRegistry(), null); List extractFields = planIn.readNamedWriteableCollectionAsList(NamedExpression.class); String matchField = in.readString(); + var source = Source.EMPTY; + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_ENRICH_RUNTIME_WARNINGS)) { + source = Source.readFrom(planIn); + } TransportRequest result = new TransportRequest( sessionId, shardId, @@ -129,7 +138,8 @@ static TransportRequest readFrom(StreamInput in, BlockFactory blockFactory) thro inputPage, inputPage, extractFields, - matchField + matchField, + source ); result.setParentTask(parentTaskId); return result; @@ -145,6 +155,9 @@ public void writeTo(StreamOutput out) throws IOException { PlanStreamOutput planOut = new PlanStreamOutput(out, null); planOut.writeNamedWriteableCollection(extractFields); out.writeString(matchField); + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_ENRICH_RUNTIME_WARNINGS)) { + source.writeTo(planOut); + } } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java index 1e1cc3b86a9d5..47e5b9acfbf9d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java @@ -29,6 +29,7 @@ import org.elasticsearch.xpack.esql.Column; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.NameId; +import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.session.Configuration; @@ -160,7 +161,7 @@ public Block[] readCachedBlockArray() throws IOException { @Override public String sourceText() { - return configuration.query(); + return configuration == null ? Source.EMPTY.text() : configuration.query(); } static void throwOnNullOptionalRead(Class type) throws IOException { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index eacc7f97a6f1c..ab0730f5289ac 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -498,7 +498,8 @@ private PhysicalOperation planEnrich(EnrichExec enrich, LocalExecutionPlannerCon enrichIndex, enrich.matchType(), enrich.policyMatchField(), - enrich.enrichFields() + enrich.enrichFields(), + enrich.source() ), layout ); @@ -609,7 +610,8 @@ private PhysicalOperation planLookupJoin(LookupJoinExec join, LocalExecutionPlan matchFields.get(0).type(), localSourceExec.index().name(), join.matchFields().get(0).name(), - join.addedFields().stream().map(f -> (NamedExpression) f).toList() + join.addedFields().stream().map(f -> (NamedExpression) f).toList(), + join.source() ), layout ); diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/61_enrich_ip.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/61_enrich_ip.yml index 076bf116292d0..3f2bcb4ed7f4d 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/61_enrich_ip.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/61_enrich_ip.yml @@ -162,14 +162,38 @@ teardown: --- "Invalid IP strings": - requires: - cluster_features: ["gte_v8.14.0"] - reason: "IP range ENRICH support was added in 8.14.0" + capabilities: + - method: POST + path: /_query + parameters: [method, path, parameters, capabilities] + capabilities: [enrich_strict_range_types] + reason: "Runtime range type checking was added" + test_runner_features: [capabilities, allowed_warnings_regex, warnings_regex] - do: - catch: /'invalid_[\d\.]+' is not an IP string literal/ + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + - "Line (1:68|-1:-1): evaluation of \\[(ENRICH networks-policy ON ip_str|)\\] failed, treating result as null. Only first 20 failures recorded." + - "Line (1:68|-1:-1): java.lang.IllegalArgumentException: 'invalid_' is not an IP string literal." + esql.query: body: - query: 'FROM events | eval ip_str = concat("invalid_", to_string(ip)) | ENRICH networks-policy ON ip_str | sort @timestamp | KEEP ip, name, department, message' + query: 'FROM events | eval ip_str = mv_concat("invalid_", to_string(ip)) | ENRICH networks-policy ON ip_str | sort @timestamp | KEEP ip, name, department, message' + + - match: { columns.0.name: "ip" } + - match: { columns.0.type: "ip" } + - match: { columns.1.name: "name" } + - match: { columns.1.type: "keyword" } + - match: { columns.2.name: "department" } + - match: { columns.2.type: "keyword" } + - match: { columns.3.name: "message" } + - match: { columns.3.type: "keyword" } + + - length: { values: 4 } + - match: { values.0: [ "10.100.0.21", null, null, "network connected" ] } + - match: { values.1: [ [ "10.100.0.21", "10.101.0.107" ], null, null, "sending messages" ] } + - match: { values.2: [ "10.101.0.107" , null, null, "network disconnected" ] } + - match: { values.3: [ "13.101.0.114" , null, null, "authentication failed" ] } --- "IP": diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/63_enrich_int_range.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/63_enrich_int_range.yml new file mode 100644 index 0000000000000..4d84a10507504 --- /dev/null +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/63_enrich_int_range.yml @@ -0,0 +1,199 @@ +--- +setup: + - requires: + capabilities: + - method: POST + path: /_query + parameters: [method, path, parameters, capabilities] + capabilities: [enrich_strict_range_types] + reason: "Strict range type checking was added" + test_runner_features: [capabilities, allowed_warnings_regex, warnings_regex] + + - do: + indices.create: + index: ages + body: + settings: + index.number_of_shards: 1 + index.routing.rebalance.enable: "none" + mappings: + properties: + age_range: + type: "integer_range" + description: + type: "keyword" + + - do: + bulk: + index: ages + refresh: true + body: + - { "index": { } } + - { "age_range": { "gte": 0, "lt": 2 }, "description": "Baby" } + - { "index": { } } + - { "age_range": { "gte": 2, "lt": 4 }, "description": "Toddler" } + - { "index": { } } + - { "age_range": { "gte": 3, "lt": 5 }, "description": "Preschooler" } + - { "index": { } } + - { "age_range": { "gte": 5, "lt": 12 }, "description": "Child" } + - { "index": { } } + - { "age_range": { "gte": 13, "lt": 20 }, "description": "Adolescent" } + - { "index": { } } + - { "age_range": { "gte": 20, "lt": 40 }, "description": "Young Adult" } + - { "index": { } } + - { "age_range": { "gte": 40, "lt": 60 }, "description": "Middle-aged" } + - { "index": { } } + - { "age_range": { "gte": 60, "lt": 80 }, "description": "Senior" } + - { "index": { } } + - { "age_range": { "gte": 80, "lt": 100 }, "description": "Elderly" } + - { "index": { } } + - { "age_range": { "gte": 100, "lt": 200 }, "description": "Incredible" } + - do: + cluster.health: + wait_for_no_initializing_shards: true + wait_for_events: languid + + - do: + enrich.put_policy: + name: ages-policy + body: + range: + indices: [ "ages" ] + match_field: "age_range" + enrich_fields: [ "description" ] + + - do: + enrich.execute_policy: + name: ages-policy + + - do: + indices.create: + index: employees + body: + mappings: + properties: + name: + type: keyword + age: + type: integer + ak: + type: keyword + salary: + type: double + + - do: + bulk: + index: employees + refresh: true + body: + - { "index": { } } + - { "name": "Joe Soap", "age": 36, "ak": "36", "salary": 55.55 } + - { "index": { } } + - { "name": "Jane Doe", "age": 31, "ak": "31", "salary": 55.55 } + - { "index": { } } + - { "name": "Jane Immortal", "age": -1, "ak": "immortal", "salary": 55.55 } + - { "index": { } } + - { "name": "Magic Mike", "age": 44, "ak": "44", "salary": 55.55 } + - { "index": { } } + - { "name": "Anon Ymous", "age": 61, "ak": "61", "salary": 55.55 } + +--- +teardown: + - do: + enrich.delete_policy: + name: ages-policy + +--- +"ages": + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM employees | ENRICH ages-policy ON age | STATS count=COUNT(*) BY description | SORT count DESC, description ASC' + + - match: { columns.0.name: "count" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "description" } + - match: { columns.1.type: "keyword" } + + - length: { values: 4 } + - match: { values.0: [ 2, "Young Adult" ] } + - match: { values.1: [ 1, "Middle-aged" ] } + - match: { values.2: [ 1, "Senior" ] } + - match: { values.3: [ 1, null ] } + +--- +"ages as typecast keywords": + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + - "Line 1:29: evaluation of \\[ak::integer\\] failed, treating result as null. Only first 20 failures recorded." + - "Line 1:29: org.elasticsearch.xpack.esql.core.InvalidArgumentException: Cannot parse number \\[immortal\\]" + esql.query: + body: + query: 'FROM employees | EVAL aki = ak::integer | ENRICH ages-policy ON aki | STATS count=COUNT(*) BY description | SORT count DESC, description ASC' + + - match: { columns.0.name: "count" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "description" } + - match: { columns.1.type: "keyword" } + + - length: { values: 4 } + - match: { values.0: [ 2, "Young Adult" ] } + - match: { values.1: [ 1, "Middle-aged" ] } + - match: { values.2: [ 1, "Senior" ] } + - match: { values.3: [ 1, null ] } + +--- +"ages as keywords": + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + - "Line (1:18|-1:-1): evaluation of \\[(ENRICH ages-policy ON ak|)\\] failed, treating result as null. Only first 20 failures recorded." + - 'Line (1:18|-1:-1): java.lang.NumberFormatException: For input string: \\"immortal\\"' + esql.query: + body: + query: 'FROM employees | ENRICH ages-policy ON ak | STATS count=COUNT(*) BY description | SORT count DESC, description ASC' + + - match: { columns.0.name: "count" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "description" } + - match: { columns.1.type: "keyword" } + + - length: { values: 4 } + - match: { values.0: [ 2, "Young Adult" ] } + - match: { values.1: [ 1, "Middle-aged" ] } + - match: { values.2: [ 1, "Senior" ] } + - match: { values.3: [ 1, null ] } + +--- +"Invalid age as keyword": + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + - "Line (1:18|-1:-1): evaluation of \\[(ENRICH ages-policy ON name|)\\] failed, treating result as null. Only first 20 failures recorded." + - 'Line (1:18|-1:-1): java.lang.NumberFormatException: For input string: \\"Joe Soap\\"' + - 'Line (1:18|-1:-1): java.lang.NumberFormatException: For input string: \\"Jane Doe\\"' + - 'Line (1:18|-1:-1): java.lang.NumberFormatException: For input string: \\"Jane Immortal\\"' + - 'Line (1:18|-1:-1): java.lang.NumberFormatException: For input string: \\"Magic Mike\\"' + - 'Line (1:18|-1:-1): java.lang.NumberFormatException: For input string: \\"Anon Ymous\\"' + esql.query: + body: + query: 'FROM employees | ENRICH ages-policy ON name | STATS count=COUNT(*) BY description | SORT count DESC, description ASC' + + - match: { columns.0.name: "count" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "description" } + - match: { columns.1.type: "keyword" } + + - length: { values: 1 } + - match: { values.0: [ 5, null ] } + +--- +"Invalid age as double": + - do: + catch: /ENRICH range and input types are incompatible. range\[INTEGER\], input\[DOUBLE\]/ + esql.query: + body: + query: 'FROM employees | ENRICH ages-policy ON salary | STATS count=COUNT(*) BY description | SORT count DESC, description ASC' diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/64_enrich_int_match.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/64_enrich_int_match.yml new file mode 100644 index 0000000000000..ef11e5098f5c2 --- /dev/null +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/64_enrich_int_match.yml @@ -0,0 +1,222 @@ +--- +setup: + - requires: + capabilities: + - method: POST + path: /_query + parameters: [method, path, parameters, capabilities] + capabilities: [enrich_strict_range_types] + reason: "Strict range type checking was added" + test_runner_features: [capabilities, allowed_warnings_regex, warnings_regex] + + - do: + indices.create: + index: decades + body: + settings: + index.number_of_shards: 1 + index.routing.rebalance.enable: "none" + mappings: + properties: + decade: + type: "integer" + description: + type: "keyword" + + - do: + bulk: + index: decades + refresh: true + body: + - { "index": { } } + - { "decade": 1900, "description": "Gay Nineties" } + - { "index": { } } + - { "decade": 1910, "description": "Teens" } + - { "index": { } } + - { "decade": 1920, "description": "Roaring Twenties" } + - { "index": { } } + - { "decade": 1930, "description": "Dirty Thirties" } + - { "index": { } } + - { "decade": 1940, "description": "War Years" } + - { "index": { } } + - { "decade": 1950, "description": "Fabulous Fifties" } + - { "index": { } } + - { "decade": 1960, "description": "Swinging Sixties" } + - { "index": { } } + - { "decade": 1970, "description": "Me Decade" } + - { "index": { } } + - { "decade": 1980, "description": "Decade of Greed" } + - { "index": { } } + - { "decade": 1990, "description": "Nineties" } + - { "index": { } } + - { "decade": 2000, "description": "Aughts" } + - { "index": { } } + - { "decade": 2010, "description": "Digital Age" } + - { "index": { } } + - { "decade": 2020, "description": "Roaring Twenties 2.0" } + - do: + cluster.health: + wait_for_no_initializing_shards: true + wait_for_events: languid + + - do: + enrich.put_policy: + name: decades-policy + body: + match: + indices: [ "decades" ] + match_field: "decade" + enrich_fields: [ "description" ] + + - do: + enrich.execute_policy: + name: decades-policy + + - do: + indices.create: + index: songs + body: + mappings: + properties: + title: + type: keyword + year: + type: integer + singer: + type: keyword + + - do: + bulk: + index: songs + refresh: true + body: + - { "index": { } } + - { "singer": "Louis Armstrong", "title": "What a Wonderful World", "year": 1967 } + - { "index": { } } + - { "singer": "The Beatles", "title": "Hey Jude", "year": 1968 } + - { "index": { } } + - { "singer": "Elvis Presley", "title": "Jailhouse Rock", "year": 1957 } + - { "index": { } } + - { "singer": "Billie Holiday", "title": "Strange Fruit", "year": 1939 } + - { "index": { } } + - { "singer": "Frank Sinatra", "title": "Fly Me to the Moon", "year": 1964 } + - { "index": { } } + - { "singer": "Bob Dylan", "title": "Blowin' in the Wind", "year": 1963 } + - { "index": { } } + - { "singer": "Queen", "title": "Bohemian Rhapsody", "year": 1975 } + - { "index": { } } + - { "singer": "ABBA", "title": "Dancing Queen", "year": 1976 } + - { "index": { } } + - { "singer": "Michael Jackson", "title": "Thriller", "year": 1982 } + - { "index": { } } + - { "singer": "Nirvana", "title": "Smells Like Teen Spirit", "year": 1991 } + - { "index": { } } + - { "singer": "Whitney Houston", "title": "I Will Always Love You", "year": 1992 } + - { "index": { } } + - { "singer": "Aretha Franklin", "title": "Respect", "year": 1967 } + - { "index": { } } + - { "singer": "Chuck Berry", "title": "Johnny B. Goode", "year": 1958 } + - { "index": { } } + - { "singer": "Madonna", "title": "Like a Prayer", "year": 1989 } + - { "index": { } } + - { "singer": "The Rolling Stones", "title": "(I Can't Get No) Satisfaction", "year": 1965 } + - { "index": { } } + - { "singer": "Beyoncé", "title": "Single Ladies (Put a Ring on It)", "year": 2008 } + - { "index": { } } + - { "singer": "Adele", "title": "Rolling in the Deep", "year": 2010 } + - { "index": { } } + - { "singer": "Lady Gaga", "title": "Bad Romance", "year": 2009 } + - { "index": { } } + - { "singer": "Billie Eilish", "title": "Bad Guy", "year": 2019 } + - { "index": { } } + - { "singer": "Taylor Swift", "title": "Anti-Hero", "year": 2022 } + +--- +teardown: + - do: + enrich.delete_policy: + name: decades-policy + +--- +"decades": + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM songs | EVAL decade = (10*FLOOR(year/10))::integer | ENRICH decades-policy ON decade | STATS count=COUNT(*) BY description | SORT count DESC, description ASC' + + - match: { columns.0.name: "count" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "description" } + - match: { columns.1.type: "keyword" } + + - length: { values: 9 } + - match: { values.0: [ 6, "Swinging Sixties" ] } + - match: { values.1: [ 2, "Aughts" ] } + - match: { values.2: [ 2, "Decade of Greed" ] } + - match: { values.3: [ 2, "Digital Age" ] } + - match: { values.4: [ 2, "Fabulous Fifties" ] } + - match: { values.5: [ 2, "Me Decade" ] } + - match: { values.6: [ 2, "Nineties" ] } + - match: { values.7: [ 1, "Dirty Thirties" ] } + - match: { values.8: [ 1, "Roaring Twenties 2.0" ] } + +--- +"decades as typecast keywords": + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM songs | EVAL decade = (10*FLOOR(year/10))::keyword | ENRICH decades-policy ON decade | STATS count=COUNT(*) BY description | SORT count DESC, description ASC' + + - match: { columns.0.name: "count" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "description" } + - match: { columns.1.type: "keyword" } + + - length: { values: 9 } + - match: { values.0: [ 6, "Swinging Sixties" ] } + - match: { values.1: [ 2, "Aughts" ] } + - match: { values.2: [ 2, "Decade of Greed" ] } + - match: { values.3: [ 2, "Digital Age" ] } + - match: { values.4: [ 2, "Fabulous Fifties" ] } + - match: { values.5: [ 2, "Me Decade" ] } + - match: { values.6: [ 2, "Nineties" ] } + - match: { values.7: [ 1, "Dirty Thirties" ] } + - match: { values.8: [ 1, "Roaring Twenties 2.0" ] } + +--- +"Invalid decade as keyword": + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM songs | ENRICH decades-policy ON singer | STATS count=COUNT(*) BY description | SORT count DESC, description ASC' + + - match: { columns.0.name: "count" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "description" } + - match: { columns.1.type: "keyword" } + + - length: { values: 1 } + - match: { values.0: [ 20, null ] } + +--- +"Invalid decade as double": + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM songs | EVAL decade = 10.0*FLOOR(year/10) | ENRICH decades-policy ON decade | STATS count=COUNT(*) BY description | SORT count DESC, description ASC' + + - match: { columns.0.name: "count" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "description" } + - match: { columns.1.type: "keyword" } + + - length: { values: 1 } + - match: { values.0: [ 20, null ] }