void assertResponses( + Consumerconsumer, + RequestBuilder ... searchRequestBuilder + ) { + List> futures = new ArrayList<>(searchRequestBuilder.length); + for (RequestBuilder builder : searchRequestBuilder) { + futures.add(builder.execute()); + } + Throwable tr = null; + for (Futuref : futures) { + try { + var res = f.get(); + try { + consumer.accept(res); + } finally { + res.decRef(); + } + } catch (Throwable t) { + tr = ExceptionsHelper.useOrSuppress(tr, t); + } + } + if (tr != null) { + throw new AssertionError(tr); + } + } + public static void assertResponse( RequestBuildersearchRequestBuilder, Consumerconsumer diff --git a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/BlobCacheMetrics.java b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/BlobCacheMetrics.java index a253b6bdd2360..0fb4267745cb8 100644 --- a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/BlobCacheMetrics.java +++ b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/BlobCacheMetrics.java @@ -115,24 +115,16 @@ public LongHistogram getCacheMissLoadTimes() { * * @param bytesCopied The number of bytes copied * @param copyTimeNanos The time taken to copy the bytes in nanoseconds - * @param index The index being loaded - * @param shardId The ID of the shard being loaded * @param cachePopulationReason The reason for the cache being populated * @param cachePopulationSource The source from which the data is being loaded */ public void recordCachePopulationMetrics( int bytesCopied, long copyTimeNanos, - String index, - int shardId, CachePopulationReason cachePopulationReason, CachePopulationSource cachePopulationSource ) { Map metricAttributes = Map.of( - INDEX_ATTRIBUTE_KEY, - index, - SHARD_ID_ATTRIBUTE_KEY, - shardId, CACHE_POPULATION_REASON_ATTRIBUTE_KEY, cachePopulationReason.name(), CACHE_POPULATION_SOURCE_ATTRIBUTE_KEY, diff --git a/x-pack/plugin/blob-cache/src/test/java/org/elasticsearch/blobcache/BlobCacheMetricsTests.java b/x-pack/plugin/blob-cache/src/test/java/org/elasticsearch/blobcache/BlobCacheMetricsTests.java index ea9d0b7356f0e..435798ba93a8b 100644 --- a/x-pack/plugin/blob-cache/src/test/java/org/elasticsearch/blobcache/BlobCacheMetricsTests.java +++ b/x-pack/plugin/blob-cache/src/test/java/org/elasticsearch/blobcache/BlobCacheMetricsTests.java @@ -30,15 +30,11 @@ public void createMetrics() { public void testRecordCachePopulationMetricsRecordsThroughput() { int mebiBytesSent = randomIntBetween(1, 4); int secondsTaken = randomIntBetween(1, 5); - String indexName = randomIdentifier(); - int shardId = randomIntBetween(0, 10); BlobCacheMetrics.CachePopulationReason cachePopulationReason = randomFrom(BlobCacheMetrics.CachePopulationReason.values()); CachePopulationSource cachePopulationSource = randomFrom(CachePopulationSource.values()); metrics.recordCachePopulationMetrics( Math.toIntExact(ByteSizeValue.ofMb(mebiBytesSent).getBytes()), TimeUnit.SECONDS.toNanos(secondsTaken), - indexName, - shardId, cachePopulationReason, cachePopulationSource ); @@ -48,32 +44,28 @@ public void testRecordCachePopulationMetricsRecordsThroughput() { .getMeasurements(InstrumentType.DOUBLE_HISTOGRAM, "es.blob_cache.population.throughput.histogram") .get(0); assertEquals(throughputMeasurement.getDouble(), (double) mebiBytesSent / secondsTaken, 0.0); - assertExpectedAttributesPresent(throughputMeasurement, shardId, indexName, cachePopulationReason, cachePopulationSource); + assertExpectedAttributesPresent(throughputMeasurement, cachePopulationReason, cachePopulationSource); // bytes counter Measurement totalBytesMeasurement = recordingMeterRegistry.getRecorder() .getMeasurements(InstrumentType.LONG_COUNTER, "es.blob_cache.population.bytes.total") .get(0); assertEquals(totalBytesMeasurement.getLong(), ByteSizeValue.ofMb(mebiBytesSent).getBytes()); - assertExpectedAttributesPresent(totalBytesMeasurement, shardId, indexName, cachePopulationReason, cachePopulationSource); + assertExpectedAttributesPresent(totalBytesMeasurement, cachePopulationReason, cachePopulationSource); // time counter Measurement totalTimeMeasurement = recordingMeterRegistry.getRecorder() .getMeasurements(InstrumentType.LONG_COUNTER, "es.blob_cache.population.time.total") .get(0); assertEquals(totalTimeMeasurement.getLong(), TimeUnit.SECONDS.toMillis(secondsTaken)); - assertExpectedAttributesPresent(totalTimeMeasurement, shardId, indexName, cachePopulationReason, cachePopulationSource); + assertExpectedAttributesPresent(totalTimeMeasurement, cachePopulationReason, cachePopulationSource); } private static void assertExpectedAttributesPresent( Measurement measurement, - int shardId, - String indexName, BlobCacheMetrics.CachePopulationReason cachePopulationReason, CachePopulationSource cachePopulationSource ) { - assertEquals(measurement.attributes().get(BlobCacheMetrics.SHARD_ID_ATTRIBUTE_KEY), shardId); - assertEquals(measurement.attributes().get(BlobCacheMetrics.INDEX_ATTRIBUTE_KEY), indexName); assertEquals(measurement.attributes().get(BlobCacheMetrics.CACHE_POPULATION_REASON_ATTRIBUTE_KEY), cachePopulationReason.name()); assertEquals(measurement.attributes().get(BlobCacheMetrics.CACHE_POPULATION_SOURCE_ATTRIBUTE_KEY), cachePopulationSource.name()); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/common/time/RemainingTime.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/common/time/RemainingTime.java index 33a3f2424c90c..4772277ae2375 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/common/time/RemainingTime.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/common/time/RemainingTime.java @@ -18,8 +18,13 @@ public interface RemainingTime extends Supplier { * Create a {@link Supplier} that returns a decreasing {@link TimeValue} on each invocation, representing the amount of time until * the call times out. The timer starts when this method is called and counts down from remainingTime to 0. * currentTime should return the most up-to-date system time, for example Instant.now() or Clock.instant(). + * {@link TimeValue#MAX_VALUE} is a special case where the remaining time is always TimeValue.MAX_VALUE. */ static RemainingTime from(Supplier currentTime, TimeValue remainingTime) { + if (remainingTime.equals(TimeValue.MAX_VALUE)) { + return () -> TimeValue.MAX_VALUE; + } + var timeout = currentTime.get().plus(remainingTime.duration(), remainingTime.timeUnit().toChronoUnit()); var maxRemainingTime = remainingTime.nanos(); return () -> { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/common/time/RemainingTimeTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/common/time/RemainingTimeTests.java index 3a948608f6ae3..1e6bc2a51f6e9 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/common/time/RemainingTimeTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/common/time/RemainingTimeTests.java @@ -32,6 +32,11 @@ public void testRemainingTimeMaxValue() { assertThat(remainingTime.get(), Matchers.equalTo(TimeValue.ZERO)); } + public void testMaxTime() { + var remainingTime = RemainingTime.from(Instant::now, TimeValue.MAX_VALUE); + assertThat(remainingTime.get(), Matchers.equalTo(TimeValue.MAX_VALUE)); + } + // always add the first value, which is read when RemainingTime.from is called, then add the test values private Supplier times(Instant... instants) { var startTime = Stream.of(Instant.now()); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java index 9708a3ea0db85..347e6b43099fc 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java @@ -29,7 +29,6 @@ import java.util.function.Function; import static java.util.stream.Collectors.toMap; -import static java.util.stream.Collectors.toUnmodifiableMap; import static org.elasticsearch.xpack.esql.core.util.PlanStreamInput.readCachedStringWithVersionCheck; import static org.elasticsearch.xpack.esql.core.util.PlanStreamOutput.writeCachedStringWithVersionCheck; @@ -276,7 +275,7 @@ public enum DataType { private static final Collection STRING_TYPES = DataType.types().stream().filter(DataType::isString).toList(); - private static final Map NAME_TO_TYPE = TYPES.stream().collect(toUnmodifiableMap(DataType::typeName, t -> t)); + private static final Map NAME_TO_TYPE; private static final Map ES_TO_TYPE; @@ -287,6 +286,10 @@ public enum DataType { map.put("point", DataType.CARTESIAN_POINT); map.put("shape", DataType.CARTESIAN_SHAPE); ES_TO_TYPE = Collections.unmodifiableMap(map); + // DATETIME has different esType and typeName, add an entry in NAME_TO_TYPE with date as key + map = TYPES.stream().collect(toMap(DataType::typeName, t -> t)); + map.put("date", DataType.DATETIME); + NAME_TO_TYPE = Collections.unmodifiableMap(map); } private static final Map NAME_OR_ALIAS_TO_TYPE; diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/build.gradle b/x-pack/plugin/esql/qa/server/multi-clusters/build.gradle index aa19371685ce1..77497597a18c6 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/build.gradle +++ b/x-pack/plugin/esql/qa/server/multi-clusters/build.gradle @@ -15,6 +15,7 @@ apply plugin: 'elasticsearch.bwc-test' dependencies { javaRestTestImplementation project(xpackModule('esql:qa:testFixtures')) javaRestTestImplementation project(xpackModule('esql:qa:server')) + javaRestTestImplementation project(xpackModule('esql')) } def supportedVersion = bwcVersion -> { diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java index 62391c8ca001a..60eecbb7658b7 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java @@ -45,6 +45,10 @@ import static org.elasticsearch.xpack.esql.CsvTestUtils.isEnabled; import static org.elasticsearch.xpack.esql.CsvTestsDataLoader.ENRICH_SOURCE_INDICES; import static org.elasticsearch.xpack.esql.EsqlTestUtils.classpathResources; +import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.INLINESTATS; +import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.INLINESTATS_V2; +import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_PLANNING_V1; +import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.METADATA_FIELDS_REMOTE_TEST; import static org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase.Mode.SYNC; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doAnswer; @@ -101,16 +105,25 @@ public MultiClusterSpecIT( @Override protected void shouldSkipTest(String testName) throws IOException { + boolean remoteMetadata = testCase.requiredCapabilities.contains(METADATA_FIELDS_REMOTE_TEST.capabilityName()); + if (remoteMetadata) { + // remove the capability from the test to enable it + testCase.requiredCapabilities = testCase.requiredCapabilities.stream() + .filter(c -> c.equals("metadata_fields_remote_test") == false) + .toList(); + } super.shouldSkipTest(testName); checkCapabilities(remoteClusterClient(), remoteFeaturesService(), testName, testCase); - assumeFalse("can't test with _index metadata", hasIndexMetadata(testCase.query)); + // Do not run tests including "METADATA _index" unless marked with metadata_fields_remote_test, + // because they may produce inconsistent results with multiple clusters. + assumeFalse("can't test with _index metadata", (remoteMetadata == false) && hasIndexMetadata(testCase.query)); assumeTrue( "Test " + testName + " is skipped on " + Clusters.oldVersion(), isEnabled(testName, instructions, Clusters.oldVersion()) ); - assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains("inlinestats")); - assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains("inlinestats_v2")); - assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains("join_planning_v1")); + assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(INLINESTATS.capabilityName())); + assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(INLINESTATS_V2.capabilityName())); + assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(JOIN_PLANNING_V1.capabilityName())); } private TestFeatureService remoteFeaturesService() throws IOException { @@ -151,6 +164,9 @@ protected RestClient buildClient(Settings settings, HttpHost[] localHosts) throw return twoClients(localClient, remoteClient); } + // These indices are used in metadata tests so we want them on remote only for consistency + public static final List METADATA_INDICES = List.of("employees", "apps", "ul_logs"); + /** * Creates a new mock client that dispatches every request to both the local and remote clusters, excluding _bulk and _query requests. * - '_bulk' requests are randomly sent to either the local or remote cluster to populate data. Some spec tests, such as AVG, @@ -166,6 +182,8 @@ static RestClient twoClients(RestClient localClient, RestClient remoteClient) th String endpoint = request.getEndpoint(); if (endpoint.startsWith("/_query")) { return localClient.performRequest(request); + } else if (endpoint.endsWith("/_bulk") && METADATA_INDICES.stream().anyMatch(i -> endpoint.equals("/" + i + "/_bulk"))) { + return remoteClient.performRequest(request); } else if (endpoint.endsWith("/_bulk") && ENRICH_SOURCE_INDICES.stream().noneMatch(i -> endpoint.equals("/" + i + "/_bulk"))) { return bulkClient.performRequest(request); } else { @@ -203,6 +221,9 @@ static Request[] cloneRequests(Request orig, int numClones) throws IOException { return clones; } + /** + * Convert FROM employees ... => FROM *:employees,employees + */ static CsvSpecReader.CsvTestCase convertToRemoteIndices(CsvSpecReader.CsvTestCase testCase) { String query = testCase.query; String[] commands = query.split("\\|"); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/bucket.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/bucket.csv-spec index b8569ead94509..3be3decaf351c 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/bucket.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/bucket.csv-spec @@ -716,3 +716,47 @@ FROM employees 2 |1985-10-01T00:00:00.000Z 4 |1985-11-01T00:00:00.000Z ; + +bucketByWeekInString +required_capability: implicit_casting_string_literal_to_temporal_amount +FROM employees +| WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z" +| STATS hires_per_week = COUNT(*) BY week = BUCKET(hire_date, "1 week") +| SORT week +; + + hires_per_week:long | week:date +2 |1985-02-18T00:00:00.000Z +1 |1985-05-13T00:00:00.000Z +1 |1985-07-08T00:00:00.000Z +1 |1985-09-16T00:00:00.000Z +2 |1985-10-14T00:00:00.000Z +4 |1985-11-18T00:00:00.000Z +; + +bucketByMinuteInString +required_capability: implicit_casting_string_literal_to_temporal_amount + +FROM sample_data +| STATS min = min(@timestamp), max = MAX(@timestamp) BY bucket = BUCKET(@timestamp, "30 minutes") +| SORT min +; + + min:date | max:date | bucket:date +2023-10-23T12:15:03.360Z|2023-10-23T12:27:28.948Z|2023-10-23T12:00:00.000Z +2023-10-23T13:33:34.937Z|2023-10-23T13:55:01.543Z|2023-10-23T13:30:00.000Z +; + +bucketByMonthInString +required_capability: implicit_casting_string_literal_to_temporal_amount + +FROM sample_data +| EVAL adjusted = CASE(TO_LONG(@timestamp) % 2 == 0, @timestamp + 1 month, @timestamp + 2 years) +| STATS c = COUNT(*) BY b = BUCKET(adjusted, "1 month") +| SORT c +; + +c:long |b:date +3 |2025-10-01T00:00:00.000Z +4 |2023-11-01T00:00:00.000Z +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec index 237c6a9af197f..7e7c561fac3a5 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec @@ -1286,3 +1286,108 @@ ROW a = GREATEST(TO_DATETIME("1957-05-23T00:00:00Z"), TO_DATETIME("1958-02-19T00 a:datetime 1958-02-19T00:00:00 ; + +evalDateTruncMonthInString +required_capability: implicit_casting_string_literal_to_temporal_amount + +FROM employees +| SORT hire_date +| EVAL x = date_trunc("1 month", hire_date) +| KEEP emp_no, hire_date, x +| LIMIT 5; + +emp_no:integer | hire_date:date | x:date +10009 | 1985-02-18T00:00:00.000Z | 1985-02-01T00:00:00.000Z +10048 | 1985-02-24T00:00:00.000Z | 1985-02-01T00:00:00.000Z +10098 | 1985-05-13T00:00:00.000Z | 1985-05-01T00:00:00.000Z +10076 | 1985-07-09T00:00:00.000Z | 1985-07-01T00:00:00.000Z +10061 | 1985-09-17T00:00:00.000Z | 1985-09-01T00:00:00.000Z +; + +evalDateTruncHourInString +required_capability: implicit_casting_string_literal_to_temporal_amount + +FROM employees +| SORT hire_date +| EVAL x = date_trunc("240 hours", hire_date) +| KEEP emp_no, hire_date, x +| LIMIT 5; + +emp_no:integer | hire_date:date | x:date +10009 | 1985-02-18T00:00:00.000Z | 1985-02-11T00:00:00.000Z +10048 | 1985-02-24T00:00:00.000Z | 1985-02-21T00:00:00.000Z +10098 | 1985-05-13T00:00:00.000Z | 1985-05-12T00:00:00.000Z +10076 | 1985-07-09T00:00:00.000Z | 1985-07-01T00:00:00.000Z +10061 | 1985-09-17T00:00:00.000Z | 1985-09-09T00:00:00.000Z +; + +evalDateTruncDayInString +required_capability: implicit_casting_string_literal_to_temporal_amount + +FROM sample_data +| SORT @timestamp ASC +| EVAL t = DATE_TRUNC("1 day", @timestamp) +| KEEP t; + +t:date +2023-10-23T00:00:00 +2023-10-23T00:00:00 +2023-10-23T00:00:00 +2023-10-23T00:00:00 +2023-10-23T00:00:00 +2023-10-23T00:00:00 +2023-10-23T00:00:00 +; + +evalDateTruncMinuteInString +required_capability: implicit_casting_string_literal_to_temporal_amount + +FROM sample_data +| SORT @timestamp ASC +| EVAL t = DATE_TRUNC("1 minute", @timestamp) +| KEEP t; + +t:date +2023-10-23T12:15:00 +2023-10-23T12:27:00 +2023-10-23T13:33:00 +2023-10-23T13:51:00 +2023-10-23T13:52:00 +2023-10-23T13:53:00 +2023-10-23T13:55:00 +; + +evalDateTruncDayInStringNull +required_capability: implicit_casting_string_literal_to_temporal_amount + +FROM employees +| WHERE emp_no == 10040 +| EVAL x = date_trunc("1 day", birth_date) +| KEEP emp_no, birth_date, x; + +emp_no:integer | birth_date:date | x:date +10040 | null | null +; + +evalDateTruncYearInString +required_capability: implicit_casting_string_literal_to_temporal_amount + +ROW a = 1 +| EVAL year_hired = DATE_TRUNC("1 year", "1991-06-26T00:00:00.000Z") +; + +a:integer | year_hired:date +1 | 1991-01-01T00:00:00.000Z +; + +filteringWithTemporalAmountInString +required_capability: implicit_casting_string_literal_to_temporal_amount + +FROM employees +| SORT emp_no +| WHERE birth_date < "2024-01-01" - 70 years +| STATS cnt = count(*); + +cnt:long +19 +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata-remote.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata-remote.csv-spec new file mode 100644 index 0000000000000..4d7ee9b1b5af6 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata-remote.csv-spec @@ -0,0 +1,151 @@ +simpleKeep +required_capability: metadata_fields +required_capability: metadata_fields_remote_test +from employees metadata _index, _version | sort _index desc, emp_no | limit 2 | keep emp_no, _index, _version; + +emp_no:integer |_index:keyword |_version:long +10001 |remote_cluster:employees |1 +10002 |remote_cluster:employees |1 +; + +aliasWithSameName +required_capability: metadata_fields +required_capability: metadata_fields_remote_test +from employees metadata _index, _version | sort _index desc, emp_no | limit 2 | eval _index = _index, _version = _version | keep emp_no, _index, _version; + +emp_no:integer |_index:keyword |_version:long +10001 |remote_cluster:employees |1 +10002 |remote_cluster:employees |1 +; + +inComparison +required_capability: metadata_fields +required_capability: metadata_fields_remote_test +from employees metadata _index, _version | sort emp_no | where _index == "remote_cluster:employees" | where _version == 1 | keep emp_no | limit 2; + +emp_no:integer +10001 +10002 +; + +metaIndexInAggs +required_capability: metadata_fields +required_capability: metadata_fields_remote_test +FROM employees METADATA _index, _id +| STATS max = MAX(emp_no) BY _index | SORT _index; + +max:integer |_index:keyword +10100 |remote_cluster:employees +; + +metaIndexAliasedInAggs +required_capability: metadata_fields +required_capability: metadata_fields_remote_test +from employees metadata _index | eval _i = _index | stats max = max(emp_no) by _i | SORT _i; + +max:integer |_i:keyword +10100 |remote_cluster:employees +; + +metaVersionInAggs +required_capability: metadata_fields +required_capability: metadata_fields_remote_test +from employees metadata _version | stats min = min(emp_no) by _version; + +min:integer |_version:long +10001 |1 +; + +metaVersionAliasedInAggs +required_capability: metadata_fields +required_capability: metadata_fields_remote_test +from employees metadata _version | eval _v = _version | stats min = min(emp_no) by _v; + +min:integer |_v:long +10001 |1 +; + +inAggsAndAsGroups +required_capability: metadata_fields +required_capability: metadata_fields_remote_test +from employees metadata _index, _version | stats max = max(_version) by _index | SORT _index; + +max:long |_index:keyword +1 |remote_cluster:employees +; + +inAggsAndAsGroupsAliased +required_capability: metadata_fields +required_capability: metadata_fields_remote_test +from employees metadata _index, _version | eval _i = _index, _v = _version | stats max = max(_v) by _i | SORT _i; + +max:long |_i:keyword +1 |remote_cluster:employees +; + +inFunction +required_capability: metadata_fields +required_capability: metadata_fields_remote_test +from employees metadata _index, _version | sort emp_no | where length(_index) == length("remote_cluster:employees") | where abs(_version) == 1 | keep emp_no | limit 2; + +emp_no:integer +10001 +10002 +; + +inArithmetics +required_capability: metadata_fields +required_capability: metadata_fields_remote_test +from employees metadata _index, _version | eval i = _version + 2 | stats min = min(emp_no) by i; + +min:integer |i:long +10001 |3 +; + +inSort +required_capability: metadata_fields +required_capability: metadata_fields_remote_test +from employees metadata _index, _version | sort _version, _index desc, emp_no | keep emp_no, _version, _index | limit 2; + +emp_no:integer |_version:long |_index:keyword +10001 |1 |remote_cluster:employees +10002 |1 |remote_cluster:employees +; + +withMvFunction +required_capability: metadata_fields +required_capability: metadata_fields_remote_test +from employees metadata _version | eval i = mv_avg(_version) + 2 | stats min = min(emp_no) by i; + +min:integer |i:double +10001 |3.0 +; + +overwritten +required_capability: metadata_fields +required_capability: metadata_fields_remote_test +from employees metadata _index, _version | sort emp_no | eval _index = 3, _version = "version" | keep emp_no, _index, _version | limit 3; + +emp_no:integer |_index:integer |_version:keyword +10001 |3 |version +10002 |3 |version +10003 |3 |version +; + +multipleIndices +required_capability: metadata_fields +required_capability: metadata_fields_remote_test +FROM ul_logs, apps METADATA _index, _version +| WHERE id IN (13, 14) AND _version == 1 +| EVAL key = CONCAT(_index, "_", TO_STR(id)) +| SORT id, _index +| KEEP id, _index, _version, key +; + + id:long |_index:keyword |_version:long |key:keyword +13 |remote_cluster:apps |1 |remote_cluster:apps_13 +13 |remote_cluster:ul_logs |1 |remote_cluster:ul_logs_13 +14 |remote_cluster:apps |1 |remote_cluster:apps_14 +14 |remote_cluster:ul_logs |1 |remote_cluster:ul_logs_14 + +; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 0d6af0ec3bbb1..d2bee9c67af5b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -476,12 +476,20 @@ public enum Cap { ADD_LIMIT_INSIDE_MV_EXPAND, DELAY_DEBUG_FN(Build.current().isSnapshot()), + + /** Capability for remote metadata test */ + METADATA_FIELDS_REMOTE_TEST(false), /** * WIP on Join planning * - Introduce BinaryPlan and co * - Refactor INLINESTATS and LOOKUP as a JOIN block */ - JOIN_PLANNING_V1(Build.current().isSnapshot()); + JOIN_PLANNING_V1(Build.current().isSnapshot()), + + /** + * Support implicit casting from string literal to DATE_PERIOD or TIME_DURATION. + */ + IMPLICIT_CASTING_STRING_LITERAL_TO_TEMPORAL_AMOUNT; private final boolean enabled; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java index 9c173795d0ab1..562d42a94483f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.esql.analysis; import org.elasticsearch.common.logging.HeaderWarning; -import org.elasticsearch.common.logging.LoggerMessageFormat; import org.elasticsearch.compute.data.Block; import org.elasticsearch.logging.Logger; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; @@ -31,7 +30,6 @@ import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; import org.elasticsearch.xpack.esql.core.expression.UnresolvedAttribute; import org.elasticsearch.xpack.esql.core.expression.UnresolvedStar; -import org.elasticsearch.xpack.esql.core.expression.function.scalar.ScalarFunction; import org.elasticsearch.xpack.esql.core.expression.predicate.BinaryOperator; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -49,6 +47,7 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionDefinition; import org.elasticsearch.xpack.esql.expression.function.UnresolvedFunction; import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute; +import org.elasticsearch.xpack.esql.expression.function.grouping.GroupingFunction; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.AbstractConvertFunction; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.FoldablesConvertFunction; @@ -61,6 +60,7 @@ import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.EsqlArithmeticOperation; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; import org.elasticsearch.xpack.esql.index.EsIndex; +import org.elasticsearch.xpack.esql.parser.ParsingException; import org.elasticsearch.xpack.esql.plan.TableIdentifier; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Drop; @@ -86,6 +86,8 @@ import org.elasticsearch.xpack.esql.stats.FeatureMetric; import org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter; +import java.time.Duration; +import java.time.temporal.TemporalAmount; import java.util.ArrayList; import java.util.Arrays; import java.util.BitSet; @@ -107,6 +109,7 @@ import static org.elasticsearch.xpack.core.enrich.EnrichPolicy.GEO_MATCH_TYPE; import static org.elasticsearch.xpack.esql.core.type.DataType.BOOLEAN; import static org.elasticsearch.xpack.esql.core.type.DataType.DATETIME; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATE_PERIOD; import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; import static org.elasticsearch.xpack.esql.core.type.DataType.FLOAT; import static org.elasticsearch.xpack.esql.core.type.DataType.GEO_POINT; @@ -116,9 +119,11 @@ import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; import static org.elasticsearch.xpack.esql.core.type.DataType.LONG; import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT; +import static org.elasticsearch.xpack.esql.core.type.DataType.TIME_DURATION; import static org.elasticsearch.xpack.esql.core.type.DataType.VERSION; import static org.elasticsearch.xpack.esql.core.type.DataType.isTemporalAmount; import static org.elasticsearch.xpack.esql.stats.FeatureMetric.LIMIT; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.maybeParseTemporalAmount; /** * This class is part of the planner. Resolves references (such as variable and index names) and performs implicit casting. @@ -142,9 +147,14 @@ public class Analyzer extends ParameterizedRuleExecutor ( "Resolution", + /* + * ImplicitCasting must be before ResolveRefs. Because a reference is created for a Bucket in Aggregate's aggregates, + * resolving this reference before implicit casting may cause this reference to have customMessage=true, it prevents further + * attempts to resolve this reference. + */ + new ImplicitCasting(), new ResolveRefs(), - new ResolveUnionTypes(), // Must be after ResolveRefs, so union types can be found - new ImplicitCasting() + new ResolveUnionTypes() // Must be after ResolveRefs, so union types can be found ); var finish = new Batch<>("Finish Analysis", Limiter.ONCE, new AddImplicitLimit(), new UnionTypesCleanup()); rules = List.of(init, resolution, finish); @@ -952,13 +962,15 @@ private BitSet gatherPreAnalysisMetrics(LogicalPlan plan, BitSet b) { } /** - * Cast string literals in ScalarFunction, EsqlArithmeticOperation, BinaryComparison and In to desired data types. + * Cast string literals in ScalarFunction, EsqlArithmeticOperation, BinaryComparison, In and GroupingFunction to desired data types. * For example, the string literals in the following expressions will be cast implicitly to the field data type on the left hand side. * date > "2024-08-21" * date in ("2024-08-21", "2024-08-22", "2024-08-23") * date = "2024-08-21" + 3 days * ip == "127.0.0.1" * version != "1.0" + * bucket(dateField, "1 month") + * date_trunc("1 minute", dateField) * * If the inputs to Coalesce are mixed numeric types, cast the rest of the numeric field or value to the first numeric data type if * applicable. For example, implicit casting converts: @@ -972,15 +984,18 @@ private BitSet gatherPreAnalysisMetrics(LogicalPlan plan, BitSet b) { private static class ImplicitCasting extends ParameterizedRule { @Override public LogicalPlan apply(LogicalPlan plan, AnalyzerContext context) { - return plan.transformExpressionsUp(ScalarFunction.class, e -> ImplicitCasting.cast(e, context.functionRegistry())); + return plan.transformExpressionsUp( + org.elasticsearch.xpack.esql.core.expression.function.Function.class, + e -> ImplicitCasting.cast(e, context.functionRegistry()) + ); } - private static Expression cast(ScalarFunction f, EsqlFunctionRegistry registry) { + private static Expression cast(org.elasticsearch.xpack.esql.core.expression.function.Function f, EsqlFunctionRegistry registry) { if (f instanceof In in) { return processIn(in); } - if (f instanceof EsqlScalarFunction esf) { - return processScalarFunction(esf, registry); + if (f instanceof EsqlScalarFunction || f instanceof GroupingFunction) { // exclude AggregateFunction until it is needed + return processScalarOrGroupingFunction(f, registry); } if (f instanceof EsqlArithmeticOperation || f instanceof BinaryComparison) { return processBinaryOperator((BinaryOperator) f); @@ -988,7 +1003,10 @@ private static Expression cast(ScalarFunction f, EsqlFunctionRegistry registry) return f; } - private static Expression processScalarFunction(EsqlScalarFunction f, EsqlFunctionRegistry registry) { + private static Expression processScalarOrGroupingFunction( + org.elasticsearch.xpack.esql.core.expression.function.Function f, + EsqlFunctionRegistry registry + ) { List args = f.arguments(); List targetDataTypes = registry.getDataTypeForStringLiteralConversion(f.getClass()); if (targetDataTypes == null || targetDataTypes.isEmpty()) { @@ -1011,9 +1029,11 @@ private static Expression processScalarFunction(EsqlScalarFunction f, EsqlFuncti } if (targetDataType != DataType.NULL && targetDataType != DataType.UNSUPPORTED) { Expression e = castStringLiteral(arg, targetDataType); - childrenChanged = true; - newChildren.add(e); - continue; + if (e != arg) { + childrenChanged = true; + newChildren.add(e); + continue; + } } } } else if (dataType.isNumeric() && canCastMixedNumericTypes(f) && castNumericArgs) { @@ -1095,7 +1115,7 @@ private static Expression processIn(In in) { return childrenChanged ? in.replaceChildren(newChildren) : in; } - private static boolean canCastMixedNumericTypes(EsqlScalarFunction f) { + private static boolean canCastMixedNumericTypes(org.elasticsearch.xpack.esql.core.expression.function.Function f) { return f instanceof Coalesce; } @@ -1142,19 +1162,37 @@ private static boolean supportsStringImplicitCasting(DataType type) { return type == DATETIME || type == IP || type == VERSION || type == BOOLEAN; } - public static Expression castStringLiteral(Expression from, DataType target) { + private static UnresolvedAttribute unresolvedAttribute(Expression value, String type, Exception e) { + String message = format( + "Cannot convert string [{}] to [{}], error [{}]", + value.fold(), + type, + (e instanceof ParsingException pe) ? pe.getErrorMessage() : e.getMessage() + ); + return new UnresolvedAttribute(value.source(), String.valueOf(value.fold()), message); + } + + private static Expression castStringLiteralToTemporalAmount(Expression from) { + try { + TemporalAmount result = maybeParseTemporalAmount(from.fold().toString().strip()); + if (result == null) { + return from; + } + DataType target = result instanceof Duration ? TIME_DURATION : DATE_PERIOD; + return new Literal(from.source(), result, target); + } catch (Exception e) { + return unresolvedAttribute(from, DATE_PERIOD + " or " + TIME_DURATION, e); + } + } + + private static Expression castStringLiteral(Expression from, DataType target) { assert from.foldable(); try { - Object to = EsqlDataTypeConverter.convert(from.fold(), target); - return new Literal(from.source(), to, target); + return isTemporalAmount(target) + ? castStringLiteralToTemporalAmount(from) + : new Literal(from.source(), EsqlDataTypeConverter.convert(from.fold(), target), target); } catch (Exception e) { - String message = LoggerMessageFormat.format( - "Cannot convert string [{}] to [{}], error [{}]", - from.fold(), - target, - e.getMessage() - ); - return new UnresolvedAttribute(from.source(), String.valueOf(from.fold()), message); + return unresolvedAttribute(from, target.toString(), e); } } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java index 632f52d163349..7be07a7659f66 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java @@ -688,7 +688,7 @@ private static void checkFullTextQueryFunctions(LogicalPlan plan, Set f plan, condition, Match.class, - lp -> (lp instanceof Limit == false), + lp -> (lp instanceof Limit == false) && (lp instanceof Aggregate == false), m -> "[" + m.functionName() + "] " + m.functionType(), failures ); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index d1aef0e46caca..ca02441d2e1ad 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -160,27 +160,30 @@ import static org.elasticsearch.xpack.esql.core.type.DataType.CARTESIAN_POINT; import static org.elasticsearch.xpack.esql.core.type.DataType.CARTESIAN_SHAPE; import static org.elasticsearch.xpack.esql.core.type.DataType.DATETIME; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATE_PERIOD; import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; import static org.elasticsearch.xpack.esql.core.type.DataType.GEO_POINT; import static org.elasticsearch.xpack.esql.core.type.DataType.GEO_SHAPE; import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; import static org.elasticsearch.xpack.esql.core.type.DataType.IP; -import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; import static org.elasticsearch.xpack.esql.core.type.DataType.LONG; -import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT; +import static org.elasticsearch.xpack.esql.core.type.DataType.TIME_DURATION; import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; import static org.elasticsearch.xpack.esql.core.type.DataType.UNSUPPORTED; import static org.elasticsearch.xpack.esql.core.type.DataType.VERSION; +import static org.elasticsearch.xpack.esql.core.type.DataType.isString; public class EsqlFunctionRegistry { - private static final Map , List > dataTypesForStringLiteralConversion = new LinkedHashMap<>(); + private static final Map , List > DATA_TYPES_FOR_STRING_LITERAL_CONVERSIONS = new LinkedHashMap<>(); - private static final Map dataTypeCastingPriority; + private static final Map DATA_TYPE_CASTING_PRIORITY; static { List typePriorityList = Arrays.asList( DATETIME, + DATE_PERIOD, + TIME_DURATION, DOUBLE, LONG, INTEGER, @@ -194,9 +197,9 @@ public class EsqlFunctionRegistry { UNSIGNED_LONG, UNSUPPORTED ); - dataTypeCastingPriority = new HashMap<>(); + DATA_TYPE_CASTING_PRIORITY = new HashMap<>(); for (int i = 0; i < typePriorityList.size(); i++) { - dataTypeCastingPriority.put(typePriorityList.get(i), i); + DATA_TYPE_CASTING_PRIORITY.put(typePriorityList.get(i), i); } } @@ -257,7 +260,7 @@ public Collection listFunctions(String pattern) { .collect(toList()); } - private FunctionDefinition[][] functions() { + private static FunctionDefinition[][] functions() { return new FunctionDefinition[][] { // grouping functions new FunctionDefinition[] { def(Bucket.class, Bucket::new, "bucket", "bin"), }, @@ -437,6 +440,11 @@ public static String normalizeName(String name) { } public record ArgSignature(String name, String[] type, String description, boolean optional, DataType targetDataType) { + + public ArgSignature(String name, String[] type, String description, boolean optional) { + this(name, type, description, optional, UNSUPPORTED); + } + @Override public String toString() { return "ArgSignature{" @@ -477,17 +485,24 @@ public List argDescriptions() { } } - public static DataType getTargetType(String[] names) { + /** + * Build a list target data types, which is used by ImplicitCasting to convert string literals to a target data type. + */ + private static DataType getTargetType(String[] names) { List types = new ArrayList<>(); for (String name : names) { - types.add(DataType.fromEs(name)); - } - if (types.contains(KEYWORD) || types.contains(TEXT)) { - return UNSUPPORTED; + DataType type = DataType.fromTypeName(name); + if (type != null && type != UNSUPPORTED) { // A type should not be null or UNSUPPORTED, just a sanity check here + // If the function takes strings as input, there is no need to cast a string literal to it. + // Return UNSUPPORTED means that ImplicitCasting doesn't support this argument, and it will be skipped by ImplicitCasting. + if (isString(type)) { + return UNSUPPORTED; + } + types.add(type); + } } - return types.stream() - .min((dt1, dt2) -> dataTypeCastingPriority.get(dt1).compareTo(dataTypeCastingPriority.get(dt2))) + .min((dt1, dt2) -> DATA_TYPE_CASTING_PRIORITY.get(dt1).compareTo(DATA_TYPE_CASTING_PRIORITY.get(dt2))) .orElse(UNSUPPORTED); } @@ -559,7 +574,7 @@ private void buildDataTypesForStringLiteralConversion(FunctionDefinition[]... gr for (FunctionDefinition[] group : groupFunctions) { for (FunctionDefinition def : group) { FunctionDescription signature = description(def); - dataTypesForStringLiteralConversion.put( + DATA_TYPES_FOR_STRING_LITERAL_CONVERSIONS.put( def.clazz(), signature.args().stream().map(EsqlFunctionRegistry.ArgSignature::targetDataType).collect(Collectors.toList()) ); @@ -568,7 +583,7 @@ private void buildDataTypesForStringLiteralConversion(FunctionDefinition[]... gr } public List getDataTypeForStringLiteralConversion(Class extends Function> clazz) { - return dataTypesForStringLiteralConversion.get(clazz); + return DATA_TYPES_FOR_STRING_LITERAL_CONVERSIONS.get(clazz); } private static class SnapshotFunctionRegistry extends EsqlFunctionRegistry { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java index c9c292769b570..4bfc9ac5d848f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java @@ -274,27 +274,11 @@ public static TemporalAmount parseTemporalAmount(Object val, DataType expectedTy return null; } StringBuilder value = new StringBuilder(); - StringBuilder qualifier = new StringBuilder(); - StringBuilder nextBuffer = value; - boolean lastWasSpace = false; - for (char c : str.trim().toCharArray()) { - if (c == ' ') { - if (lastWasSpace == false) { - nextBuffer = nextBuffer == value ? qualifier : null; - } - lastWasSpace = true; - continue; - } - if (nextBuffer == null) { - throw new ParsingException(Source.EMPTY, errorMessage, val, expectedType); - } - nextBuffer.append(c); - lastWasSpace = false; - } - - if ((value.isEmpty() || qualifier.isEmpty()) == false) { + StringBuilder temporalUnit = new StringBuilder(); + separateValueAndTemporalUnitForTemporalAmount(str.strip(), value, temporalUnit, errorMessage, expectedType.toString()); + if ((value.isEmpty() || temporalUnit.isEmpty()) == false) { try { - TemporalAmount result = parseTemporalAmount(Integer.parseInt(value.toString()), qualifier.toString(), Source.EMPTY); + TemporalAmount result = parseTemporalAmount(Integer.parseInt(value.toString()), temporalUnit.toString(), Source.EMPTY); if (DataType.DATE_PERIOD == expectedType && result instanceof Period || DataType.TIME_DURATION == expectedType && result instanceof Duration) { return result; @@ -312,6 +296,48 @@ public static TemporalAmount parseTemporalAmount(Object val, DataType expectedTy throw new ParsingException(Source.EMPTY, errorMessage, val, expectedType); } + public static TemporalAmount maybeParseTemporalAmount(String str) { + // The string literal can be either Date_Period or Time_Duration, derive the data type from its temporal unit + String errorMessage = "Cannot parse [{}] to {}"; + String expectedTypes = DATE_PERIOD + " or " + TIME_DURATION; + StringBuilder value = new StringBuilder(); + StringBuilder temporalUnit = new StringBuilder(); + separateValueAndTemporalUnitForTemporalAmount(str, value, temporalUnit, errorMessage, expectedTypes); + if ((value.isEmpty() || temporalUnit.isEmpty()) == false) { + try { + return parseTemporalAmount(Integer.parseInt(value.toString()), temporalUnit.toString(), Source.EMPTY); + } catch (NumberFormatException ex) { + throw new ParsingException(Source.EMPTY, errorMessage, str, expectedTypes); + } + } + return null; + } + + private static void separateValueAndTemporalUnitForTemporalAmount( + String temporalAmount, + StringBuilder value, + StringBuilder temporalUnit, + String errorMessage, + String expectedType + ) { + StringBuilder nextBuffer = value; + boolean lastWasSpace = false; + for (char c : temporalAmount.toCharArray()) { + if (c == ' ') { + if (lastWasSpace == false) { + nextBuffer = nextBuffer == value ? temporalUnit : null; + } + lastWasSpace = true; + continue; + } + if (nextBuffer == null) { + throw new ParsingException(Source.EMPTY, errorMessage, temporalAmount, expectedType); + } + nextBuffer.append(c); + lastWasSpace = false; + } + } + /** * Converts arbitrary object to the desired data type. * @@ -394,10 +420,10 @@ public static DataType commonType(DataType left, DataType right) { } // generally supporting abbreviations from https://en.wikipedia.org/wiki/Unit_of_time - public static TemporalAmount parseTemporalAmount(Number value, String qualifier, Source source) throws InvalidArgumentException, + public static TemporalAmount parseTemporalAmount(Number value, String temporalUnit, Source source) throws InvalidArgumentException, ArithmeticException, ParsingException { try { - return switch (INTERVALS.valueOf(qualifier.toUpperCase(Locale.ROOT))) { + return switch (INTERVALS.valueOf(temporalUnit.toUpperCase(Locale.ROOT))) { case MILLISECOND, MILLISECONDS, MS -> Duration.ofMillis(safeToLong(value)); case SECOND, SECONDS, SEC, S -> Duration.ofSeconds(safeToLong(value)); case MINUTE, MINUTES, MIN -> Duration.ofMinutes(safeToLong(value)); @@ -410,7 +436,7 @@ public static TemporalAmount parseTemporalAmount(Number value, String qualifier, case YEAR, YEARS, YR, Y -> Period.ofYears(safeToInt(safeToLong(value))); }; } catch (IllegalArgumentException e) { - throw new ParsingException(source, "Unexpected time interval qualifier: '{}'", qualifier); + throw new ParsingException(source, "Unexpected temporal unit: '{}'", temporalUnit); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index 0a34d6cd848bb..0e0c2de11fac3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -1183,6 +1183,10 @@ public void testMatchFunctionNotAllowedAfterCommands() throws Exception { "1:24: [MATCH] function cannot be used after LIMIT", error("from test | limit 10 | where match(first_name, \"Anna\")") ); + assertEquals( + "1:47: [MATCH] function cannot be used after STATS", + error("from test | STATS c = AVG(salary) BY gender | where match(gender, \"F\")") + ); } public void testMatchFunctionAndOperatorHaveCorrectErrorMessages() throws Exception { @@ -1667,6 +1671,72 @@ public void testToDatePeriodToTimeDurationWithInvalidType() { ); } + public void testIntervalAsString() { + // DateTrunc + for (String interval : List.of("1 minu", "1 dy", "1.5 minutes", "0.5 days", "minutes 1", "day 5")) { + assertThat( + error("from types | EVAL x = date_trunc(\"" + interval + "\", \"1991-06-26T00:00:00.000Z\")"), + containsString("1:35: Cannot convert string [" + interval + "] to [DATE_PERIOD or TIME_DURATION]") + ); + assertThat( + error("from types | EVAL x = \"1991-06-26T00:00:00.000Z\", y = date_trunc(\"" + interval + "\", x::datetime)"), + containsString("1:67: Cannot convert string [" + interval + "] to [DATE_PERIOD or TIME_DURATION]") + ); + } + for (String interval : List.of("1", "0.5", "invalid")) { + assertThat( + error("from types | EVAL x = date_trunc(\"" + interval + "\", \"1991-06-26T00:00:00.000Z\")"), + containsString( + "1:24: first argument of [date_trunc(\"" + + interval + + "\", \"1991-06-26T00:00:00.000Z\")] must be [dateperiod or timeduration], found value [\"" + + interval + + "\"] type [keyword]" + ) + ); + assertThat( + error("from types | EVAL x = \"1991-06-26T00:00:00.000Z\", y = date_trunc(\"" + interval + "\", x::datetime)"), + containsString( + "1:56: first argument of [date_trunc(\"" + + interval + + "\", x::datetime)] " + + "must be [dateperiod or timeduration], found value [\"" + + interval + + "\"] type [keyword]" + ) + ); + } + + // Bucket + assertEquals( + "1:52: Cannot convert string [1 yar] to [DATE_PERIOD or TIME_DURATION], error [Unexpected temporal unit: 'yar']", + error("from test | stats max(emp_no) by bucket(hire_date, \"1 yar\")") + ); + assertEquals( + "1:52: Cannot convert string [1 hur] to [DATE_PERIOD or TIME_DURATION], error [Unexpected temporal unit: 'hur']", + error("from test | stats max(emp_no) by bucket(hire_date, \"1 hur\")") + ); + assertEquals( + "1:58: Cannot convert string [1 mu] to [DATE_PERIOD or TIME_DURATION], error [Unexpected temporal unit: 'mu']", + error("from test | stats max = max(emp_no) by bucket(hire_date, \"1 mu\") | sort max ") + ); + assertEquals( + "1:34: second argument of [bucket(hire_date, \"1\")] must be [integral, date_period or time_duration], " + + "found value [\"1\"] type [keyword]", + error("from test | stats max(emp_no) by bucket(hire_date, \"1\")") + ); + assertEquals( + "1:40: second argument of [bucket(hire_date, \"1\")] must be [integral, date_period or time_duration], " + + "found value [\"1\"] type [keyword]", + error("from test | stats max = max(emp_no) by bucket(hire_date, \"1\") | sort max ") + ); + assertEquals( + "1:68: second argument of [bucket(y, \"1\")] must be [integral, date_period or time_duration], " + + "found value [\"1\"] type [keyword]", + error("from test | eval x = emp_no, y = hire_date | stats max = max(x) by bucket(y, \"1\") | sort max ") + ); + } + private void query(String query) { defaultAnalyzer.analyze(parser.createStatement(query)); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 6a552f400d36e..181b8d52bf888 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -879,8 +879,7 @@ public static void renderDocs() throws IOException { "elseValue", trueValue.type(), "The value that's returned when no condition evaluates to `true`.", - true, - EsqlFunctionRegistry.getTargetType(trueValue.type()) + true ); description = new EsqlFunctionRegistry.FunctionDescription( description.name(), @@ -1085,8 +1084,7 @@ private static void renderDocsForOperators(String name) throws IOException { String[] type = paramInfo == null ? new String[] { "?" } : paramInfo.type(); String desc = paramInfo == null ? "" : paramInfo.description().replace('\n', ' '); boolean optional = paramInfo == null ? false : paramInfo.optional(); - DataType targetDataType = EsqlFunctionRegistry.getTargetType(type); - args.add(new EsqlFunctionRegistry.ArgSignature(paramName, type, desc, optional, targetDataType)); + args.add(new EsqlFunctionRegistry.ArgSignature(paramName, type, desc, optional)); } } renderKibanaFunctionDefinition(name, functionInfo, args, likeOrInOperator(name)); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java index 67b4dd71260aa..0177747d27243 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java @@ -431,7 +431,7 @@ public void testDatePeriodLiterals() { } public void testUnknownNumericQualifier() { - assertParsingException(() -> whereExpression("1 decade"), "Unexpected time interval qualifier: 'decade'"); + assertParsingException(() -> whereExpression("1 decade"), "Unexpected temporal unit: 'decade'"); } public void testQualifiedDecimalLiteral() { diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/26_aggs_bucket.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/26_aggs_bucket.yml index ea7684fb69a09..9fbe69ac05f0a 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/26_aggs_bucket.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/26_aggs_bucket.yml @@ -234,3 +234,58 @@ - match: { values.2.1: "2024-08-01T00:00:00.000Z" } - match: { values.3.0: 1 } - match: { values.3.1: "2024-09-01T00:00:00.000Z" } + +--- +"Datetime interval as string": + - requires: + test_runner_features: [allowed_warnings_regex, capabilities] + capabilities: + - method: POST + path: /_query + parameters: [ ] + capabilities: [ implicit_casting_string_literal_to_temporal_amount ] + reason: "interval in parameters as string" + + - do: + indices.create: + index: test_bucket + body: + mappings: + properties: + ts : + type : date + + - do: + bulk: + refresh: true + body: + - { "index": { "_index": "test_bucket" } } + - { "ts": "2024-06-16" } + - { "index": { "_index": "test_bucket" } } + - { "ts": "2024-07-16" } + - { "index": { "_index": "test_bucket" } } + - { "ts": "2024-08-16" } + - { "index": { "_index": "test_bucket" } } + - { "ts": "2024-09-16" } + + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM test_bucket | STATS c = COUNT(*) BY b = BUCKET(ts, ?bucket) | SORT b' + params: [{"bucket" : "1 month"}] + + - match: { columns.0.name: c } + - match: { columns.0.type: long } + - match: { columns.1.name: b } + - match: { columns.1.type: date } + - length: { values: 4 } + - match: { values.0.0: 1 } + - match: { values.0.1: "2024-06-01T00:00:00.000Z" } + - match: { values.1.0: 1 } + - match: { values.1.1: "2024-07-01T00:00:00.000Z" } + - match: { values.2.0: 1 } + - match: { values.2.1: "2024-08-01T00:00:00.000Z" } + - match: { values.3.0: 1 } + - match: { values.3.1: "2024-09-01T00:00:00.000Z" }