From 38a34d969d530c4b3954c7adbf80dbe10373c93e Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 13 Dec 2024 08:27:09 +1100 Subject: [PATCH 01/32] Mute org.elasticsearch.reservedstate.service.FileSettingsServiceTests testInvalidJSON #116521 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index b750c0777ce34..95beeb7aa8f8d 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -314,6 +314,9 @@ tests: - class: org.elasticsearch.packaging.test.DockerTests method: test011SecurityEnabledStatus issue: https://github.com/elastic/elasticsearch/issues/118517 +- class: org.elasticsearch.reservedstate.service.FileSettingsServiceTests + method: testInvalidJSON + issue: https://github.com/elastic/elasticsearch/issues/116521 # Examples: # From 0cc08a9196f75b3bcd630a55331578b1fc335b74 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Thu, 12 Dec 2024 22:39:21 +0100 Subject: [PATCH 02/32] Speedup Injector during concurrent node starts (#118588) Lets simplify this logic a little and lock on the injector instance instead of the class. Locking on the class actually wastes lots of time during test runs it turns out, especially with multi-cluster tests. --- .../elasticsearch/injection/guice/Binder.java | 4 +- .../injection/guice/BindingProcessor.java | 1 - .../injection/guice/InjectorBuilder.java | 3 +- .../injection/guice/Provider.java | 2 - .../elasticsearch/injection/guice/Scope.java | 59 -------------- .../elasticsearch/injection/guice/Scopes.java | 78 ++++--------------- .../internal/AbstractBindingBuilder.java | 2 +- .../injection/guice/internal/Scoping.java | 66 ++-------------- 8 files changed, 24 insertions(+), 191 deletions(-) delete mode 100644 server/src/main/java/org/elasticsearch/injection/guice/Scope.java diff --git a/server/src/main/java/org/elasticsearch/injection/guice/Binder.java b/server/src/main/java/org/elasticsearch/injection/guice/Binder.java index c34bebd10c2e1..d59edfce89183 100644 --- a/server/src/main/java/org/elasticsearch/injection/guice/Binder.java +++ b/server/src/main/java/org/elasticsearch/injection/guice/Binder.java @@ -65,9 +65,7 @@ * *

The {@link Provider} you use here does not have to be a "factory"; that * is, a provider which always creates each instance it provides. - * However, this is generally a good practice to follow. You can then use - * Guice's concept of {@link Scope scopes} to guide when creation should happen - * -- "letting Guice work for you". + * However, this is generally a good practice to follow. * *

  *     bind(Service.class).annotatedWith(Red.class).to(ServiceImpl.class);
diff --git a/server/src/main/java/org/elasticsearch/injection/guice/BindingProcessor.java b/server/src/main/java/org/elasticsearch/injection/guice/BindingProcessor.java index 9223261ec2dd5..677f111c764a4 100644 --- a/server/src/main/java/org/elasticsearch/injection/guice/BindingProcessor.java +++ b/server/src/main/java/org/elasticsearch/injection/guice/BindingProcessor.java @@ -218,7 +218,6 @@ private void putBinding(BindingImpl binding) { MembersInjector.class, Module.class, Provider.class, - Scope.class, TypeLiteral.class ); // TODO(jessewilson): fix BuiltInModule, then add Stage diff --git a/server/src/main/java/org/elasticsearch/injection/guice/InjectorBuilder.java b/server/src/main/java/org/elasticsearch/injection/guice/InjectorBuilder.java index 99d42faf6a803..fe9ac309e23f4 100644 --- a/server/src/main/java/org/elasticsearch/injection/guice/InjectorBuilder.java +++ b/server/src/main/java/org/elasticsearch/injection/guice/InjectorBuilder.java @@ -20,6 +20,7 @@ import org.elasticsearch.injection.guice.internal.Errors; import org.elasticsearch.injection.guice.internal.ErrorsException; import org.elasticsearch.injection.guice.internal.InternalContext; +import org.elasticsearch.injection.guice.internal.Scoping; import org.elasticsearch.injection.guice.internal.Stopwatch; import org.elasticsearch.injection.guice.spi.Dependency; @@ -154,7 +155,7 @@ public static void loadEagerSingletons(InjectorImpl injector, Errors errors) { } private static void loadEagerSingletons(InjectorImpl injector, final Errors errors, BindingImpl binding) { - if (binding.getScoping().isEagerSingleton()) { + if (binding.getScoping() == Scoping.EAGER_SINGLETON) { try { injector.callInContext(new ContextualCallable() { final Dependency dependency = Dependency.get(binding.getKey()); diff --git a/server/src/main/java/org/elasticsearch/injection/guice/Provider.java b/server/src/main/java/org/elasticsearch/injection/guice/Provider.java index 692617239ea74..6de9d8ff9dc85 100644 --- a/server/src/main/java/org/elasticsearch/injection/guice/Provider.java +++ b/server/src/main/java/org/elasticsearch/injection/guice/Provider.java @@ -28,8 +28,6 @@ * instances, instances you wish to safely mutate and discard, instances which are out of scope * (e.g. using a {@code @RequestScoped} object from within a {@code @SessionScoped} object), or * instances that will be initialized lazily. - *
  • A custom {@link Scope} is implemented as a decorator of {@code Provider}, which decides - * when to delegate to the backing provider and when to provide the instance some other way. *
  • The {@link Injector} offers access to the {@code Provider} it uses to fulfill requests * for a given key, via the {@link Injector#getProvider} methods. * diff --git a/server/src/main/java/org/elasticsearch/injection/guice/Scope.java b/server/src/main/java/org/elasticsearch/injection/guice/Scope.java deleted file mode 100644 index 681fc17bc6353..0000000000000 --- a/server/src/main/java/org/elasticsearch/injection/guice/Scope.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright (C) 2006 Google Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.elasticsearch.injection.guice; - -/** - * A scope is a level of visibility that instances provided by Guice may have. - * By default, an instance created by the {@link Injector} has no scope, - * meaning it has no state from the framework's perspective -- the - * {@code Injector} creates it, injects it once into the class that required it, - * and then immediately forgets it. Associating a scope with a particular - * binding allows the created instance to be "remembered" and possibly used - * again for other injections. - *

    - * An example of a scope is {@link Scopes#SINGLETON}. - * - * @author crazybob@google.com (Bob Lee) - */ -public interface Scope { - - /** - * Scopes a provider. The returned provider returns objects from this scope. - * If an object does not exist in this scope, the provider can use the given - * unscoped provider to retrieve one. - *

    - * Scope implementations are strongly encouraged to override - * {@link Object#toString} in the returned provider and include the backing - * provider's {@code toString()} output. - * - * @param unscoped locates an instance when one doesn't already exist in this - * scope. - * @return a new provider which only delegates to the given unscoped provider - * when an instance of the requested object doesn't already exist in this - * scope - */ - Provider scope(Provider unscoped); - - /** - * A short but useful description of this scope. For comparison, the standard - * scopes that ship with guice use the descriptions - * {@code "Scopes.SINGLETON"}, {@code "ServletScopes.SESSION"} and - * {@code "ServletScopes.REQUEST"}. - */ - @Override - String toString(); -} diff --git a/server/src/main/java/org/elasticsearch/injection/guice/Scopes.java b/server/src/main/java/org/elasticsearch/injection/guice/Scopes.java index d5b61407b4975..5f05d0337654c 100644 --- a/server/src/main/java/org/elasticsearch/injection/guice/Scopes.java +++ b/server/src/main/java/org/elasticsearch/injection/guice/Scopes.java @@ -19,8 +19,6 @@ import org.elasticsearch.injection.guice.internal.InternalFactory; import org.elasticsearch.injection.guice.internal.Scoping; -import java.util.Locale; - /** * Built-in scope implementations. * @@ -31,29 +29,27 @@ public class Scopes { private Scopes() {} /** - * One instance per {@link Injector}. + * Scopes an internal factory. */ - public static final Scope SINGLETON = new Scope() { - @Override - public Provider scope(final Provider creator) { - return new Provider() { + static InternalFactory scope(InjectorImpl injector, InternalFactory creator, Scoping scoping) { + return switch (scoping) { + case UNSCOPED -> creator; + case EAGER_SINGLETON -> new InternalFactoryToProviderAdapter<>(Initializables.of(new Provider<>() { private volatile T instance; - // DCL on a volatile is safe as of Java 5, which we obviously require. @Override - @SuppressWarnings("DoubleCheckedLocking") public T get() { if (instance == null) { /* - * Use a pretty coarse lock. We don't want to run into deadlocks - * when two threads try to load circularly-dependent objects. - * Maybe one of these days we will identify independent graphs of - * objects and offer to load them in parallel. - */ - synchronized (InjectorImpl.class) { + * Use a pretty coarse lock. We don't want to run into deadlocks + * when two threads try to load circularly-dependent objects. + * Maybe one of these days we will identify independent graphs of + * objects and offer to load them in parallel. + */ + synchronized (injector) { if (instance == null) { - instance = creator.get(); + instance = new ProviderToInternalFactoryAdapter<>(injector, creator).get(); } } } @@ -62,54 +58,10 @@ public T get() { @Override public String toString() { - return String.format(Locale.ROOT, "%s[%s]", creator, SINGLETON); + return creator + "[SINGLETON]"; } - }; - } - - @Override - public String toString() { - return "Scopes.SINGLETON"; - } - }; - - /** - * No scope; the same as not applying any scope at all. Each time the - * Injector obtains an instance of an object with "no scope", it injects this - * instance then immediately forgets it. When the next request for the same - * binding arrives it will need to obtain the instance over again. - *

    - * This exists only in case a class has been annotated with a scope - * annotation and you need to override this to "no scope" in your binding. - * - * @since 2.0 - */ - public static final Scope NO_SCOPE = new Scope() { - @Override - public Provider scope(Provider unscoped) { - return unscoped; - } - - @Override - public String toString() { - return "Scopes.NO_SCOPE"; - } - }; - - /** - * Scopes an internal factory. - */ - static InternalFactory scope(InjectorImpl injector, InternalFactory creator, Scoping scoping) { - - if (scoping.isNoScope()) { - return creator; - } - - Scope scope = scoping.getScopeInstance(); - - // TODO: use diamond operator once JI-9019884 is fixed - Provider scoped = scope.scope(new ProviderToInternalFactoryAdapter(injector, creator)); - return new InternalFactoryToProviderAdapter<>(Initializables.of(scoped)); + })); + }; } } diff --git a/server/src/main/java/org/elasticsearch/injection/guice/internal/AbstractBindingBuilder.java b/server/src/main/java/org/elasticsearch/injection/guice/internal/AbstractBindingBuilder.java index 28053c5f1d557..ee54c8aa93520 100644 --- a/server/src/main/java/org/elasticsearch/injection/guice/internal/AbstractBindingBuilder.java +++ b/server/src/main/java/org/elasticsearch/injection/guice/internal/AbstractBindingBuilder.java @@ -77,7 +77,7 @@ protected void checkNotScoped() { return; } - if (binding.getScoping().isExplicitlyScoped()) { + if (binding.getScoping() != Scoping.UNSCOPED) { binder.addError(SCOPE_ALREADY_SET); } } diff --git a/server/src/main/java/org/elasticsearch/injection/guice/internal/Scoping.java b/server/src/main/java/org/elasticsearch/injection/guice/internal/Scoping.java index fcb03f34f4204..e1c04ea8e348f 100644 --- a/server/src/main/java/org/elasticsearch/injection/guice/internal/Scoping.java +++ b/server/src/main/java/org/elasticsearch/injection/guice/internal/Scoping.java @@ -16,8 +16,7 @@ package org.elasticsearch.injection.guice.internal; -import org.elasticsearch.injection.guice.Scope; -import org.elasticsearch.injection.guice.Scopes; +import org.elasticsearch.injection.guice.Injector; /** * References a scope, either directly (as a scope instance), or indirectly (as a scope annotation). @@ -25,69 +24,14 @@ * * @author jessewilson@google.com (Jesse Wilson) */ -public abstract class Scoping { - +public enum Scoping { /** * No scoping annotation has been applied. Note that this is different from {@code * in(Scopes.NO_SCOPE)}, where the 'NO_SCOPE' has been explicitly applied. */ - public static final Scoping UNSCOPED = new Scoping() { - - @Override - public Scope getScopeInstance() { - return Scopes.NO_SCOPE; - } - - @Override - public String toString() { - return Scopes.NO_SCOPE.toString(); - } - - }; - - public static final Scoping EAGER_SINGLETON = new Scoping() { - - @Override - public Scope getScopeInstance() { - return Scopes.SINGLETON; - } - - @Override - public String toString() { - return "eager singleton"; - } - - }; - + UNSCOPED, /** - * Returns true if this scope was explicitly applied. If no scope was explicitly applied then the - * scoping annotation will be used. + * One instance per {@link Injector}. */ - public boolean isExplicitlyScoped() { - return this != UNSCOPED; - } - - /** - * Returns true if this is the default scope. In this case a new instance will be provided for - * each injection. - */ - public boolean isNoScope() { - return getScopeInstance() == Scopes.NO_SCOPE; - } - - /** - * Returns true if this scope is a singleton that should be loaded eagerly in {@code stage}. - */ - public boolean isEagerSingleton() { - return this == EAGER_SINGLETON; - } - - /** - * Returns the scope instance, or {@code null} if that isn't known for this instance. - */ - public Scope getScopeInstance() { - return null; - } - - private Scoping() {} + EAGER_SINGLETON } From c449da8a8a955c127e832f4730a66109be0ed017 Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Thu, 12 Dec 2024 15:14:56 -0700 Subject: [PATCH 03/32] Include hidden indices in DeprecationInfoAction (#118035) This fixes an issue where the deprecation API wouldn't include hidden indices by default. Resolves #118020 --- docs/changelog/118035.yaml | 6 ++++++ .../xpack/deprecation/DeprecationInfoAction.java | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/118035.yaml diff --git a/docs/changelog/118035.yaml b/docs/changelog/118035.yaml new file mode 100644 index 0000000000000..fdeaa184723b9 --- /dev/null +++ b/docs/changelog/118035.yaml @@ -0,0 +1,6 @@ +pr: 118035 +summary: Include hidden indices in `DeprecationInfoAction` +area: Indices APIs +type: bug +issues: + - 118020 diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java index 87d0bfb93e18c..7ad0758d99832 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java @@ -366,7 +366,7 @@ private static ClusterState removeSkippedSettings(ClusterState state, String[] i public static class Request extends MasterNodeReadRequest implements IndicesRequest.Replaceable { - private static final IndicesOptions INDICES_OPTIONS = IndicesOptions.fromOptions(false, true, true, true); + private static final IndicesOptions INDICES_OPTIONS = IndicesOptions.fromOptions(false, true, true, true, true); private String[] indices; public Request(TimeValue masterNodeTimeout, String... indices) { From 2ab4d3d5ee27443deed6b71185bb7edb9d06b4da Mon Sep 17 00:00:00 2001 From: Svilen Mihaylov Date: Thu, 12 Dec 2024 21:25:57 -0500 Subject: [PATCH 04/32] Remove "use_field_mapping" in FieldFormat (#118513) The method in which it was parsed was unused. --- .../search/fetch/subphase/FieldAndFormat.java | 27 ------------------- 1 file changed, 27 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FieldAndFormat.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FieldAndFormat.java index f623b3040f1c5..ef8769b688c64 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FieldAndFormat.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FieldAndFormat.java @@ -12,8 +12,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.core.Nullable; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; @@ -30,9 +28,6 @@ * display values of this field. */ public final class FieldAndFormat implements Writeable, ToXContentObject { - private static final String USE_DEFAULT_FORMAT = "use_field_mapping"; - private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(FetchDocValuesPhase.class); - public static final ParseField FIELD_FIELD = new ParseField("field"); public static final ParseField FORMAT_FIELD = new ParseField("format"); public static final ParseField INCLUDE_UNMAPPED_FIELD = new ParseField("include_unmapped"); @@ -48,28 +43,6 @@ public final class FieldAndFormat implements Writeable, ToXContentObject { PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), INCLUDE_UNMAPPED_FIELD); } - private static CheckedFunction ignoreUseFieldMappingStringParser() { - return (p) -> { - if (p.currentToken() == XContentParser.Token.VALUE_NULL) { - return null; - } else { - String text = p.text(); - if (text.equals(USE_DEFAULT_FORMAT)) { - DEPRECATION_LOGGER.compatibleCritical( - "explicit_default_format", - "[" - + USE_DEFAULT_FORMAT - + "] is a special format that was only used to " - + "ease the transition to 7.x. It has become the default and shouldn't be set explicitly anymore." - ); - return null; - } else { - return text; - } - } - }; - } - /** * Parse a {@link FieldAndFormat} from some {@link XContent}. */ From 344cf15fb146ac7adb9189e54782bc38fd0e1bd9 Mon Sep 17 00:00:00 2001 From: Nick Tindall Date: Fri, 13 Dec 2024 15:40:40 +1100 Subject: [PATCH 05/32] Add undeclared Azure settings, modify test to exercise them (#118634) --- docs/changelog/118634.yaml | 5 ++++ .../azure/AzureBlobStoreRepositoryTests.java | 23 +++++++++++++++++++ .../azure/AzureClientProvider.java | 5 ++++ .../azure/AzureRepositoryPlugin.java | 4 ++++ 4 files changed, 37 insertions(+) create mode 100644 docs/changelog/118634.yaml diff --git a/docs/changelog/118634.yaml b/docs/changelog/118634.yaml new file mode 100644 index 0000000000000..d798d94b72075 --- /dev/null +++ b/docs/changelog/118634.yaml @@ -0,0 +1,5 @@ +pr: 118634 +summary: "Add undeclared Azure settings, modify test to exercise them" +area: Snapshot/Restore +type: bug +issues: [] diff --git a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java index bc1f07fda6240..f3101890d8185 100644 --- a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java +++ b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.repositories.RepositoriesService; @@ -41,6 +42,7 @@ import org.elasticsearch.telemetry.Measurement; import org.elasticsearch.telemetry.TestTelemetryPlugin; import org.elasticsearch.test.BackgroundIndexer; +import org.elasticsearch.threadpool.ThreadPool; import java.io.ByteArrayInputStream; import java.io.IOException; @@ -53,6 +55,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Predicate; import java.util.regex.Pattern; import java.util.stream.Collectors; @@ -75,6 +78,8 @@ public class AzureBlobStoreRepositoryTests extends ESMockAPIBasedRepositoryInteg protected static final String DEFAULT_ACCOUNT_NAME = "account"; protected static final Predicate LIST_PATTERN = Pattern.compile("GET /[a-zA-Z0-9]+/[a-zA-Z0-9]+\\?.+").asMatchPredicate(); protected static final Predicate GET_BLOB_PATTERN = Pattern.compile("GET /[a-zA-Z0-9]+/[a-zA-Z0-9]+/.+").asMatchPredicate(); + private static final AtomicInteger MAX_CONNECTION_SETTING = new AtomicInteger(-1); + private static final AtomicInteger EVENT_LOOP_THREAD_COUNT_SETTING = new AtomicInteger(-1); @Override protected String repositoryType() { @@ -132,9 +137,17 @@ protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { // see com.azure.storage.blob.BlobUrlParts.parseIpUrl final String endpoint = "ignored;DefaultEndpointsProtocol=http;BlobEndpoint=" + httpServerUrl() + "/" + accountName; + + // The first node configured sets these for all nodes + MAX_CONNECTION_SETTING.compareAndSet(-1, randomIntBetween(10, 30)); + EVENT_LOOP_THREAD_COUNT_SETTING.compareAndSet(-1, randomIntBetween(1, 3)); return Settings.builder() .put(super.nodeSettings(nodeOrdinal, otherSettings)) .put(AzureStorageSettings.ENDPOINT_SUFFIX_SETTING.getConcreteSettingForNamespace("test").getKey(), endpoint) + .put(AzureClientProvider.EVENT_LOOP_THREAD_COUNT.getKey(), EVENT_LOOP_THREAD_COUNT_SETTING.get()) + .put(AzureClientProvider.MAX_OPEN_CONNECTIONS.getKey(), MAX_CONNECTION_SETTING.get()) + .put(AzureClientProvider.MAX_IDLE_TIME.getKey(), TimeValue.timeValueSeconds(randomIntBetween(10, 30))) + .put(AzureClientProvider.OPEN_CONNECTION_TIMEOUT.getKey(), TimeValue.timeValueSeconds(randomIntBetween(10, 30))) .setSecureSettings(secureSettings) .build(); } @@ -262,6 +275,16 @@ private boolean isPutBlockList(String request) { } } + public void testSettingsTakeEffect() { + AzureClientProvider azureClientProvider = internalCluster().getInstance(AzureClientProvider.class); + assertEquals(MAX_CONNECTION_SETTING.get(), azureClientProvider.getConnectionProvider().maxConnections()); + ThreadPool nodeThreadPool = internalCluster().getInstance(ThreadPool.class); + assertEquals( + EVENT_LOOP_THREAD_COUNT_SETTING.get(), + nodeThreadPool.info(AzureRepositoryPlugin.NETTY_EVENT_LOOP_THREAD_POOL_NAME).getMax() + ); + } + public void testLargeBlobCountDeletion() throws Exception { int numberOfBlobs = randomIntBetween(257, 2000); try (BlobStore store = newBlobStore()) { diff --git a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureClientProvider.java b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureClientProvider.java index f92bbcbdd716d..a9ae9db19a613 100644 --- a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureClientProvider.java +++ b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureClientProvider.java @@ -265,6 +265,11 @@ protected void doStop() { @Override protected void doClose() {} + // visible for testing + ConnectionProvider getConnectionProvider() { + return connectionProvider; + } + static class RequestMetrics { private volatile long totalRequestTimeNanos = 0; private volatile int requestCount; diff --git a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepositoryPlugin.java b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepositoryPlugin.java index 4556e63378fea..3b945c8118804 100644 --- a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepositoryPlugin.java +++ b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepositoryPlugin.java @@ -97,6 +97,10 @@ AzureStorageService createAzureStorageService(Settings settingsToUse, AzureClien @Override public List> getSettings() { return Arrays.asList( + AzureClientProvider.EVENT_LOOP_THREAD_COUNT, + AzureClientProvider.MAX_OPEN_CONNECTIONS, + AzureClientProvider.OPEN_CONNECTION_TIMEOUT, + AzureClientProvider.MAX_IDLE_TIME, AzureStorageSettings.ACCOUNT_SETTING, AzureStorageSettings.KEY_SETTING, AzureStorageSettings.SAS_TOKEN_SETTING, From 1b4f5eb36d06bd0ef8f517b4f8653e5dc253ecd7 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Fri, 13 Dec 2024 08:55:43 +0100 Subject: [PATCH 06/32] [Build] Fix Concurrency issue in buildparams access (#117552) Also provide caching support for buildparams provider * Extract BuildParameterExtension public api into interface * Make tests better readable * Fix test flakyness --- .../internal/PublishPluginFuncTest.groovy | 2 +- .../internal/ElasticsearchJavaBasePlugin.java | 2 +- .../internal/ElasticsearchTestBasePlugin.java | 5 +- .../InternalDistributionBwcSetupPlugin.java | 2 +- .../InternalDistributionDownloadPlugin.java | 5 +- .../internal/InternalTestClustersPlugin.java | 3 +- .../info/BuildParameterExtension.java | 211 ++++----------- .../info/DefaultBuildParameterExtension.java | 245 ++++++++++++++++++ .../internal/info/GlobalBuildInfoPlugin.java | 7 +- .../ThirdPartyAuditPrecommitPlugin.java | 4 +- .../SnykDependencyMonitoringGradlePlugin.java | 4 +- .../internal/test/TestWithSslPlugin.java | 2 +- .../AbstractYamlRestCompatTestPlugin.java | 3 +- .../info/BuildParameterExtensionSpec.groovy | 112 ++++++++ .../fixtures/AbstractGradleFuncTest.groovy | 2 +- 15 files changed, 419 insertions(+), 190 deletions(-) create mode 100644 build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/DefaultBuildParameterExtension.java create mode 100644 build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/info/BuildParameterExtensionSpec.groovy diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/PublishPluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/PublishPluginFuncTest.groovy index a199ff9d3eac5..65f124e5f88e8 100644 --- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/PublishPluginFuncTest.groovy +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/PublishPluginFuncTest.groovy @@ -439,7 +439,7 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest { // scm info only added for internal builds internalBuild() buildFile << """ - buildParams.getGitOriginProperty().set("https://some-repo.com/repo.git") + buildParams.setGitOrigin("https://some-repo.com/repo.git") apply plugin:'elasticsearch.java' apply plugin:'elasticsearch.publish' diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaBasePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaBasePlugin.java index c897b142da2fb..ee0eb3f6eb2bf 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaBasePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaBasePlugin.java @@ -132,7 +132,7 @@ private static void disableTransitiveDependenciesForSourceSet(Project project, S public void configureCompile(Project project) { project.getExtensions().getExtraProperties().set("compactProfile", "full"); JavaPluginExtension java = project.getExtensions().getByType(JavaPluginExtension.class); - if (buildParams.getJavaToolChainSpec().isPresent()) { + if (buildParams.getJavaToolChainSpec().getOrNull() != null) { java.toolchain(buildParams.getJavaToolChainSpec().get()); } java.setSourceCompatibility(buildParams.getMinimumRuntimeVersion()); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java index 720d6a7c2efb6..240b55dedf7ce 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java @@ -13,7 +13,6 @@ import org.elasticsearch.gradle.OS; import org.elasticsearch.gradle.internal.conventions.util.Util; -import org.elasticsearch.gradle.internal.info.BuildParameterExtension; import org.elasticsearch.gradle.internal.info.GlobalBuildInfoPlugin; import org.elasticsearch.gradle.internal.test.ErrorReportingTestListener; import org.elasticsearch.gradle.internal.test.SimpleCommandLineArgumentProvider; @@ -27,7 +26,6 @@ import org.gradle.api.artifacts.Configuration; import org.gradle.api.file.FileCollection; import org.gradle.api.plugins.JavaPlugin; -import org.gradle.api.provider.Property; import org.gradle.api.provider.ProviderFactory; import org.gradle.api.tasks.SourceSet; import org.gradle.api.tasks.SourceSetContainer; @@ -56,8 +54,7 @@ public abstract class ElasticsearchTestBasePlugin implements Plugin { @Override public void apply(Project project) { project.getRootProject().getPlugins().apply(GlobalBuildInfoPlugin.class); - Property buildParams = loadBuildParams(project); - + var buildParams = loadBuildParams(project); project.getPluginManager().apply(GradleTestPolicySetupPlugin.class); // for fips mode check project.getRootProject().getPluginManager().apply(GlobalBuildInfoPlugin.class); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java index c17127f9bbfcf..da26cb66122ad 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java @@ -66,7 +66,7 @@ public void apply(Project project) { project.getRootProject().getPluginManager().apply(GlobalBuildInfoPlugin.class); project.getPlugins().apply(JvmToolchainsPlugin.class); toolChainService = project.getExtensions().getByType(JavaToolchainService.class); - BuildParameterExtension buildParams = loadBuildParams(project).get(); + var buildParams = loadBuildParams(project).get(); Boolean isCi = buildParams.isCi(); buildParams.getBwcVersions().forPreviousUnreleased((BwcVersions.UnreleasedVersionInfo unreleasedVersion) -> { configureBwcProject( diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java index ec694de8ec597..ba587aa4bd979 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java @@ -20,7 +20,6 @@ import org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes; import org.elasticsearch.gradle.internal.docker.DockerSupportPlugin; import org.elasticsearch.gradle.internal.docker.DockerSupportService; -import org.elasticsearch.gradle.internal.info.BuildParameterExtension; import org.elasticsearch.gradle.internal.info.GlobalBuildInfoPlugin; import org.elasticsearch.gradle.util.GradleUtils; import org.gradle.api.GradleException; @@ -49,7 +48,7 @@ public void apply(Project project) { // this is needed for isInternal project.getRootProject().getPluginManager().apply(GlobalBuildInfoPlugin.class); project.getRootProject().getPluginManager().apply(DockerSupportPlugin.class); - BuildParameterExtension buildParams = loadBuildParams(project).get(); + var buildParams = loadBuildParams(project).get(); DistributionDownloadPlugin distributionDownloadPlugin = project.getPlugins().apply(DistributionDownloadPlugin.class); Provider dockerSupport = GradleUtils.getBuildService( @@ -61,7 +60,7 @@ public void apply(Project project) { ); registerInternalDistributionResolutions( DistributionDownloadPlugin.getRegistrationsContainer(project), - buildParams.getBwcVersionsProperty() + buildParams.getBwcVersionsProvider() ); } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalTestClustersPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalTestClustersPlugin.java index 7e7ffad12a9a5..c618fe6c2e1bf 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalTestClustersPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalTestClustersPlugin.java @@ -10,7 +10,6 @@ package org.elasticsearch.gradle.internal; import org.elasticsearch.gradle.VersionProperties; -import org.elasticsearch.gradle.internal.info.BuildParameterExtension; import org.elasticsearch.gradle.internal.info.GlobalBuildInfoPlugin; import org.elasticsearch.gradle.testclusters.ElasticsearchCluster; import org.elasticsearch.gradle.testclusters.TestClustersPlugin; @@ -26,7 +25,7 @@ public class InternalTestClustersPlugin implements Plugin { public void apply(Project project) { project.getPlugins().apply(InternalDistributionDownloadPlugin.class); project.getRootProject().getRootProject().getPlugins().apply(GlobalBuildInfoPlugin.class); - BuildParameterExtension buildParams = loadBuildParams(project).get(); + var buildParams = loadBuildParams(project).get(); project.getRootProject().getPluginManager().apply(InternalReaperPlugin.class); TestClustersPlugin testClustersPlugin = project.getPlugins().apply(TestClustersPlugin.class); testClustersPlugin.setRuntimeJava(buildParams.getRuntimeJavaHome()); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/BuildParameterExtension.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/BuildParameterExtension.java index 5531194e0abde..e80dc6ef1b44c 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/BuildParameterExtension.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/BuildParameterExtension.java @@ -13,175 +13,58 @@ import org.gradle.api.Action; import org.gradle.api.JavaVersion; import org.gradle.api.Task; -import org.gradle.api.provider.Property; import org.gradle.api.provider.Provider; -import org.gradle.api.provider.ProviderFactory; import org.gradle.jvm.toolchain.JavaToolchainSpec; import java.io.File; -import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.util.List; import java.util.Random; -import java.util.concurrent.atomic.AtomicReference; - -public abstract class BuildParameterExtension { - private final Provider inFipsJvm; - private final Provider runtimeJavaHome; - private final Boolean isRuntimeJavaHomeSet; - private final List javaVersions; - private final JavaVersion minimumCompilerVersion; - private final JavaVersion minimumRuntimeVersion; - private final JavaVersion gradleJavaVersion; - private final Provider runtimeJavaVersion; - private final Provider> javaToolChainSpec; - private final Provider runtimeJavaDetails; - private final String gitRevision; - private transient AtomicReference buildDate = new AtomicReference<>(); - private final String testSeed; - private final Boolean isCi; - private final Integer defaultParallel; - private final Boolean isSnapshotBuild; - - public BuildParameterExtension( - ProviderFactory providers, - Provider runtimeJavaHome, - Provider> javaToolChainSpec, - Provider runtimeJavaVersion, - boolean isRuntimeJavaHomeSet, - Provider runtimeJavaDetails, - List javaVersions, - JavaVersion minimumCompilerVersion, - JavaVersion minimumRuntimeVersion, - JavaVersion gradleJavaVersion, - String gitRevision, - String gitOrigin, - ZonedDateTime buildDate, - String testSeed, - boolean isCi, - int defaultParallel, - final boolean isSnapshotBuild, - Provider bwcVersions - ) { - this.inFipsJvm = providers.systemProperty("tests.fips.enabled").map(BuildParameterExtension::parseBoolean); - this.runtimeJavaHome = runtimeJavaHome; - this.javaToolChainSpec = javaToolChainSpec; - this.runtimeJavaVersion = runtimeJavaVersion; - this.isRuntimeJavaHomeSet = isRuntimeJavaHomeSet; - this.runtimeJavaDetails = runtimeJavaDetails; - this.javaVersions = javaVersions; - this.minimumCompilerVersion = minimumCompilerVersion; - this.minimumRuntimeVersion = minimumRuntimeVersion; - this.gradleJavaVersion = gradleJavaVersion; - this.gitRevision = gitRevision; - this.testSeed = testSeed; - this.isCi = isCi; - this.defaultParallel = defaultParallel; - this.isSnapshotBuild = isSnapshotBuild; - this.getBwcVersionsProperty().set(bwcVersions); - this.getGitOriginProperty().set(gitOrigin); - } - - private static boolean parseBoolean(String s) { - if (s == null) { - return false; - } - return Boolean.parseBoolean(s); - } - - public boolean getInFipsJvm() { - return inFipsJvm.getOrElse(false); - } - - public Provider getRuntimeJavaHome() { - return runtimeJavaHome; - } - - public void withFipsEnabledOnly(Task task) { - task.onlyIf("FIPS mode disabled", task1 -> getInFipsJvm() == false); - } - - public Boolean getIsRuntimeJavaHomeSet() { - return isRuntimeJavaHomeSet; - } - - public List getJavaVersions() { - return javaVersions; - } - - public JavaVersion getMinimumCompilerVersion() { - return minimumCompilerVersion; - } - - public JavaVersion getMinimumRuntimeVersion() { - return minimumRuntimeVersion; - } - - public JavaVersion getGradleJavaVersion() { - return gradleJavaVersion; - } - - public Provider getRuntimeJavaVersion() { - return runtimeJavaVersion; - } - - public Provider> getJavaToolChainSpec() { - return javaToolChainSpec; - } - - public Provider getRuntimeJavaDetails() { - return runtimeJavaDetails; - } - - public String getGitRevision() { - return gitRevision; - } - - public String getGitOrigin() { - return getGitOriginProperty().get(); - } - - public ZonedDateTime getBuildDate() { - ZonedDateTime value = buildDate.get(); - if (value == null) { - value = ZonedDateTime.now(ZoneOffset.UTC); - if (buildDate.compareAndSet(null, value) == false) { - // If another thread initialized it first, return the initialized value - value = buildDate.get(); - } - } - return value; - } - - public String getTestSeed() { - return testSeed; - } - - public Boolean isCi() { - return isCi; - } - - public Integer getDefaultParallel() { - return defaultParallel; - } - - public Boolean isSnapshotBuild() { - return isSnapshotBuild; - } - - public BwcVersions getBwcVersions() { - return getBwcVersionsProperty().get(); - } - - public abstract Property getBwcVersionsProperty(); - - public abstract Property getGitOriginProperty(); - - public Random getRandom() { - return new Random(Long.parseUnsignedLong(testSeed.split(":")[0], 16)); - } - - public Boolean isGraalVmRuntime() { - return runtimeJavaDetails.get().toLowerCase().contains("graalvm"); - } + +public interface BuildParameterExtension { + String EXTENSION_NAME = "buildParams"; + + boolean getInFipsJvm(); + + Provider getRuntimeJavaHome(); + + void withFipsEnabledOnly(Task task); + + Boolean getIsRuntimeJavaHomeSet(); + + List getJavaVersions(); + + JavaVersion getMinimumCompilerVersion(); + + JavaVersion getMinimumRuntimeVersion(); + + JavaVersion getGradleJavaVersion(); + + Provider getRuntimeJavaVersion(); + + Provider> getJavaToolChainSpec(); + + Provider getRuntimeJavaDetails(); + + String getGitRevision(); + + String getGitOrigin(); + + ZonedDateTime getBuildDate(); + + String getTestSeed(); + + Boolean isCi(); + + Integer getDefaultParallel(); + + Boolean isSnapshotBuild(); + + BwcVersions getBwcVersions(); + + Provider getBwcVersionsProvider(); + + Random getRandom(); + + Boolean isGraalVmRuntime(); } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/DefaultBuildParameterExtension.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/DefaultBuildParameterExtension.java new file mode 100644 index 0000000000000..faac406d974c6 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/DefaultBuildParameterExtension.java @@ -0,0 +1,245 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.gradle.internal.info; + +import org.elasticsearch.gradle.internal.BwcVersions; +import org.gradle.api.Action; +import org.gradle.api.JavaVersion; +import org.gradle.api.Task; +import org.gradle.api.provider.Provider; +import org.gradle.api.provider.ProviderFactory; +import org.gradle.jvm.toolchain.JavaToolchainSpec; + +import java.io.File; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.util.List; +import java.util.Random; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Supplier; + +public abstract class DefaultBuildParameterExtension implements BuildParameterExtension { + private final Provider inFipsJvm; + private final Provider runtimeJavaHome; + private final Boolean isRuntimeJavaHomeSet; + private final List javaVersions; + private final JavaVersion minimumCompilerVersion; + private final JavaVersion minimumRuntimeVersion; + private final JavaVersion gradleJavaVersion; + private final Provider runtimeJavaVersion; + private final Provider> javaToolChainSpec; + private final Provider runtimeJavaDetails; + private final String gitRevision; + + private transient AtomicReference buildDate = new AtomicReference<>(); + private final String testSeed; + private final Boolean isCi; + private final Integer defaultParallel; + private final Boolean isSnapshotBuild; + + // not final for testing + private Provider bwcVersions; + private String gitOrigin; + + public DefaultBuildParameterExtension( + ProviderFactory providers, + Provider runtimeJavaHome, + Provider> javaToolChainSpec, + Provider runtimeJavaVersion, + boolean isRuntimeJavaHomeSet, + Provider runtimeJavaDetails, + List javaVersions, + JavaVersion minimumCompilerVersion, + JavaVersion minimumRuntimeVersion, + JavaVersion gradleJavaVersion, + String gitRevision, + String gitOrigin, + String testSeed, + boolean isCi, + int defaultParallel, + final boolean isSnapshotBuild, + Provider bwcVersions + ) { + this.inFipsJvm = providers.systemProperty("tests.fips.enabled").map(DefaultBuildParameterExtension::parseBoolean); + this.runtimeJavaHome = cache(providers, runtimeJavaHome); + this.javaToolChainSpec = cache(providers, javaToolChainSpec); + this.runtimeJavaVersion = cache(providers, runtimeJavaVersion); + this.isRuntimeJavaHomeSet = isRuntimeJavaHomeSet; + this.runtimeJavaDetails = cache(providers, runtimeJavaDetails); + this.javaVersions = javaVersions; + this.minimumCompilerVersion = minimumCompilerVersion; + this.minimumRuntimeVersion = minimumRuntimeVersion; + this.gradleJavaVersion = gradleJavaVersion; + this.gitRevision = gitRevision; + this.testSeed = testSeed; + this.isCi = isCi; + this.defaultParallel = defaultParallel; + this.isSnapshotBuild = isSnapshotBuild; + this.bwcVersions = cache(providers, bwcVersions); + this.gitOrigin = gitOrigin; + } + + // This is a workaround for https://github.com/gradle/gradle/issues/25550 + private Provider cache(ProviderFactory providerFactory, Provider incomingProvider) { + SingleObjectCache cache = new SingleObjectCache<>(); + return providerFactory.provider(() -> cache.computeIfAbsent(() -> incomingProvider.getOrNull())); + } + + private static boolean parseBoolean(String s) { + if (s == null) { + return false; + } + return Boolean.parseBoolean(s); + } + + @Override + public boolean getInFipsJvm() { + return inFipsJvm.getOrElse(false); + } + + @Override + public Provider getRuntimeJavaHome() { + return runtimeJavaHome; + } + + @Override + public void withFipsEnabledOnly(Task task) { + task.onlyIf("FIPS mode disabled", task1 -> getInFipsJvm() == false); + } + + @Override + public Boolean getIsRuntimeJavaHomeSet() { + return isRuntimeJavaHomeSet; + } + + @Override + public List getJavaVersions() { + return javaVersions; + } + + @Override + public JavaVersion getMinimumCompilerVersion() { + return minimumCompilerVersion; + } + + @Override + public JavaVersion getMinimumRuntimeVersion() { + return minimumRuntimeVersion; + } + + @Override + public JavaVersion getGradleJavaVersion() { + return gradleJavaVersion; + } + + @Override + public Provider getRuntimeJavaVersion() { + return runtimeJavaVersion; + } + + @Override + public Provider> getJavaToolChainSpec() { + return javaToolChainSpec; + } + + @Override + public Provider getRuntimeJavaDetails() { + return runtimeJavaDetails; + } + + @Override + public String getGitRevision() { + return gitRevision; + } + + @Override + public String getGitOrigin() { + return gitOrigin; + } + + @Override + public ZonedDateTime getBuildDate() { + ZonedDateTime value = buildDate.get(); + if (value == null) { + value = ZonedDateTime.now(ZoneOffset.UTC); + if (buildDate.compareAndSet(null, value) == false) { + // If another thread initialized it first, return the initialized value + value = buildDate.get(); + } + } + return value; + } + + @Override + public String getTestSeed() { + return testSeed; + } + + @Override + public Boolean isCi() { + return isCi; + } + + @Override + public Integer getDefaultParallel() { + return defaultParallel; + } + + @Override + public Boolean isSnapshotBuild() { + return isSnapshotBuild; + } + + @Override + public BwcVersions getBwcVersions() { + return bwcVersions.get(); + } + + @Override + public Random getRandom() { + return new Random(Long.parseUnsignedLong(testSeed.split(":")[0], 16)); + } + + @Override + public Boolean isGraalVmRuntime() { + return runtimeJavaDetails.get().toLowerCase().contains("graalvm"); + } + + private static class SingleObjectCache { + private T instance; + + public T computeIfAbsent(Supplier supplier) { + synchronized (this) { + if (instance == null) { + instance = supplier.get(); + } + return instance; + } + } + + public T get() { + return instance; + } + } + + public Provider getBwcVersionsProvider() { + return bwcVersions; + } + + // for testing; not part of public api + public void setBwcVersions(Provider bwcVersions) { + this.bwcVersions = bwcVersions; + } + + // for testing; not part of public api + public void setGitOrigin(String gitOrigin) { + this.gitOrigin = gitOrigin; + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java index 27d2a66feb206..86f59aa0ab41e 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java @@ -51,8 +51,6 @@ import java.io.InputStreamReader; import java.io.UncheckedIOException; import java.nio.file.Files; -import java.time.ZoneOffset; -import java.time.ZonedDateTime; import java.util.ArrayList; import java.util.List; import java.util.Locale; @@ -124,8 +122,10 @@ public void apply(Project project) { ); BuildParameterExtension buildParams = project.getExtensions() .create( - "buildParams", BuildParameterExtension.class, + BuildParameterExtension.EXTENSION_NAME, + DefaultBuildParameterExtension.class, + providers, actualRuntimeJavaHome, resolveToolchainSpecFromEnv(), actualRuntimeJavaHome.map( @@ -145,7 +145,6 @@ public void apply(Project project) { Jvm.current().getJavaVersion(), gitInfo.getRevision(), gitInfo.getOrigin(), - ZonedDateTime.now(ZoneOffset.UTC), getTestSeed(), System.getenv("JENKINS_URL") != null || System.getenv("BUILDKITE_BUILD_URL") != null || System.getProperty("isCI") != null, ParallelDetector.findDefaultParallel(project), diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditPrecommitPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditPrecommitPlugin.java index f70e25a57e331..e45a1d3dd25b1 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditPrecommitPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditPrecommitPlugin.java @@ -12,12 +12,10 @@ import org.elasticsearch.gradle.dependencies.CompileOnlyResolvePlugin; import org.elasticsearch.gradle.internal.ExportElasticsearchBuildResourcesTask; import org.elasticsearch.gradle.internal.conventions.precommit.PrecommitPlugin; -import org.elasticsearch.gradle.internal.info.BuildParameterExtension; import org.gradle.api.Project; import org.gradle.api.Task; import org.gradle.api.artifacts.Configuration; import org.gradle.api.artifacts.component.ModuleComponentIdentifier; -import org.gradle.api.provider.Property; import org.gradle.api.tasks.TaskProvider; import java.io.File; @@ -34,7 +32,7 @@ public class ThirdPartyAuditPrecommitPlugin extends PrecommitPlugin { @Override public TaskProvider createTask(Project project) { project.getRootProject().getPlugins().apply(CompileOnlyResolvePlugin.class); - Property buildParams = loadBuildParams(project); + var buildParams = loadBuildParams(project); project.getPlugins().apply(CompileOnlyResolvePlugin.class); project.getConfigurations().create("forbiddenApisCliJar"); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/snyk/SnykDependencyMonitoringGradlePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/snyk/SnykDependencyMonitoringGradlePlugin.java index fa10daf8dfaaa..704394b4f01a9 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/snyk/SnykDependencyMonitoringGradlePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/snyk/SnykDependencyMonitoringGradlePlugin.java @@ -10,7 +10,6 @@ package org.elasticsearch.gradle.internal.snyk; import org.elasticsearch.gradle.internal.conventions.info.GitInfo; -import org.elasticsearch.gradle.internal.info.BuildParameterExtension; import org.elasticsearch.gradle.internal.info.GlobalBuildInfoPlugin; import org.gradle.api.Plugin; import org.gradle.api.Project; @@ -18,7 +17,6 @@ import org.gradle.api.file.ProjectLayout; import org.gradle.api.plugins.JavaPlugin; import org.gradle.api.plugins.JavaPluginExtension; -import org.gradle.api.provider.Property; import org.gradle.api.provider.ProviderFactory; import org.gradle.api.tasks.SourceSet; @@ -41,7 +39,7 @@ public SnykDependencyMonitoringGradlePlugin(ProjectLayout projectLayout, Provide @Override public void apply(Project project) { project.getRootProject().getPlugins().apply(GlobalBuildInfoPlugin.class); - Property buildParams = loadBuildParams(project); + var buildParams = loadBuildParams(project); var generateTaskProvider = project.getTasks() .register("generateSnykDependencyGraph", GenerateSnykDependencyGraph.class, generateSnykDependencyGraph -> { diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/TestWithSslPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/TestWithSslPlugin.java index 68711881b02f4..94018d1501e0b 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/TestWithSslPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/TestWithSslPlugin.java @@ -35,7 +35,7 @@ public class TestWithSslPlugin implements Plugin { @Override public void apply(Project project) { File keyStoreDir = new File(project.getBuildDir(), "keystore"); - BuildParameterExtension buildParams = project.getRootProject().getExtensions().getByType(BuildParameterExtension.class); + var buildParams = project.getRootProject().getExtensions().getByType(BuildParameterExtension.class); TaskProvider exportKeyStore = project.getTasks() .register("copyTestCertificates", ExportElasticsearchBuildResourcesTask.class, (t) -> { t.copy("test/ssl/test-client.crt"); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/AbstractYamlRestCompatTestPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/AbstractYamlRestCompatTestPlugin.java index ca669276123b3..b511702d1c7c3 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/AbstractYamlRestCompatTestPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/AbstractYamlRestCompatTestPlugin.java @@ -11,7 +11,6 @@ import org.elasticsearch.gradle.Version; import org.elasticsearch.gradle.internal.ElasticsearchJavaBasePlugin; -import org.elasticsearch.gradle.internal.info.BuildParameterExtension; import org.elasticsearch.gradle.internal.info.GlobalBuildInfoPlugin; import org.elasticsearch.gradle.internal.test.rest.CopyRestApiTask; import org.elasticsearch.gradle.internal.test.rest.CopyRestTestsTask; @@ -78,7 +77,7 @@ public AbstractYamlRestCompatTestPlugin(ProjectLayout projectLayout, FileOperati @Override public void apply(Project project) { project.getRootProject().getRootProject().getPlugins().apply(GlobalBuildInfoPlugin.class); - BuildParameterExtension buildParams = loadBuildParams(project).get(); + var buildParams = loadBuildParams(project).get(); final Path compatRestResourcesDir = Path.of("restResources").resolve("compat"); final Path compatSpecsDir = compatRestResourcesDir.resolve("yamlSpecs"); diff --git a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/info/BuildParameterExtensionSpec.groovy b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/info/BuildParameterExtensionSpec.groovy new file mode 100644 index 0000000000000..343268b9b4d47 --- /dev/null +++ b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/info/BuildParameterExtensionSpec.groovy @@ -0,0 +1,112 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.gradle.internal.info + +import spock.lang.Specification + +import org.elasticsearch.gradle.internal.BwcVersions +import org.gradle.api.JavaVersion +import org.gradle.api.Project +import org.gradle.api.provider.Provider +import org.gradle.api.provider.ProviderFactory +import org.gradle.testfixtures.ProjectBuilder +import org.junit.Assert + +import java.util.concurrent.CountDownLatch +import java.util.concurrent.Executors +import java.util.concurrent.Future +import java.util.concurrent.TimeUnit +import java.util.concurrent.atomic.AtomicInteger + +import static org.junit.Assert.fail + +class BuildParameterExtensionSpec extends Specification { + + ProjectBuilder projectBuilder = new ProjectBuilder() + + def "#getterName is cached anc concurrently accessible"() { + given: + def project = projectBuilder.build() + def providers = project.getProviders(); + def buildParams = extension(project, providers) + int numberOfThreads = 10; + when: + var service = Executors.newFixedThreadPool(numberOfThreads) + var latch = new CountDownLatch(numberOfThreads) + def testedProvider = buildParams."$getterName"() + def futures = (1..numberOfThreads).collect { + service.submit( + () -> { + try { + testedProvider.get() + } catch (AssertionError e) { + latch.countDown() + Assert.fail("Accessing cached provider more than once") + } + latch.countDown() + } + ) + } + latch.await(10, TimeUnit.SECONDS) + + then: + futures.collect { it.state() }.any() { it == Future.State.FAILED } == false + + where: + getterName << [ + "getRuntimeJavaHome", + "getJavaToolChainSpec", + "getRuntimeJavaDetails", + "getRuntimeJavaVersion", + "getBwcVersionsProvider" + ] + } + + private BuildParameterExtension extension(Project project, ProviderFactory providers) { + return project.getExtensions().create( + BuildParameterExtension.class, "buildParameters", DefaultBuildParameterExtension.class, + providers, + providerMock(), + providerMock(), + providerMock(), + true, + providerMock(), + [ + Mock(JavaHome), + Mock(JavaHome), + ], + JavaVersion.VERSION_11, + JavaVersion.VERSION_11, + JavaVersion.VERSION_11, + "gitRevision", + "gitOrigin", + "testSeed", + false, + 5, + true, + // cannot use Mock here because of the way the provider is used by gradle internal property api + providerMock() + ) + } + + private Provider providerMock() { + Provider provider = Mock(Provider) + AtomicInteger counter = new AtomicInteger(0) + provider.getOrNull() >> { + println "accessing provider" + return counter.get() == 1 ? fail("Accessing cached provider more than once") : counter.incrementAndGet() + } + provider.get() >> { + fail("Accessing cached provider directly") + } + return provider + + } +} diff --git a/build-tools/src/testFixtures/groovy/org/elasticsearch/gradle/fixtures/AbstractGradleFuncTest.groovy b/build-tools/src/testFixtures/groovy/org/elasticsearch/gradle/fixtures/AbstractGradleFuncTest.groovy index 07214b5fbf845..fe23204d5601c 100644 --- a/build-tools/src/testFixtures/groovy/org/elasticsearch/gradle/fixtures/AbstractGradleFuncTest.groovy +++ b/build-tools/src/testFixtures/groovy/org/elasticsearch/gradle/fixtures/AbstractGradleFuncTest.groovy @@ -183,7 +183,7 @@ abstract class AbstractGradleFuncTest extends Specification { ] BwcVersions versions = new BwcVersions(currentVersion, versionList, ['main', '8.x', '8.3', '8.2', '8.1', '7.16']) - buildParams.getBwcVersionsProperty().set(versions) + buildParams.setBwcVersions(project.provider { versions} ) """ } From 23008be7fba1af1c1ea038dda914530d795fa3c3 Mon Sep 17 00:00:00 2001 From: Valeriy Khakhutskyy <1292899+valeriy42@users.noreply.github.com> Date: Fri, 13 Dec 2024 10:33:33 +0100 Subject: [PATCH 07/32] [ML] Simplify minimum supported snapshot version handling for Machine Learning jobs (#118549) Since in 9.0 we don't need to support snapshots prior to 7.17, we can simplify the changes made in #81039 and re-introduce a single contant to manage the minimum supported snapshot version. --- .../elasticsearch/xpack/core/ml/MachineLearningField.java | 8 ++------ .../elasticsearch/xpack/deprecation/MlDeprecationIT.java | 2 +- .../xpack/deprecation/MlDeprecationChecker.java | 7 +++---- .../xpack/ml/integration/AnomalyJobCRUDIT.java | 2 +- .../xpack/ml/action/TransportOpenJobAction.java | 7 +++---- .../xpack/ml/job/task/OpenJobPersistentTasksExecutor.java | 7 +++---- 6 files changed, 13 insertions(+), 20 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MachineLearningField.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MachineLearningField.java index 3a37f94e6b2d4..a40babb2760fb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MachineLearningField.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MachineLearningField.java @@ -64,12 +64,8 @@ public final class MachineLearningField { License.OperationMode.PLATINUM ); - // Ideally this would be 7.0.0, but it has to be 6.4.0 because due to an oversight it's impossible - // for the Java code to distinguish the model states for versions 6.4.0 to 7.9.3 inclusive. - public static final MlConfigVersion MIN_CHECKED_SUPPORTED_SNAPSHOT_VERSION = MlConfigVersion.fromString("6.4.0"); - // We tell the user we support model snapshots newer than 7.0.0 as that's the major version - // boundary, even though behind the scenes we have to support back to 6.4.0. - public static final MlConfigVersion MIN_REPORTED_SUPPORTED_SNAPSHOT_VERSION = MlConfigVersion.V_7_0_0; + // This is the last version when we changed the ML job snapshot format. + public static final MlConfigVersion MIN_SUPPORTED_SNAPSHOT_VERSION = MlConfigVersion.V_8_3_0; private MachineLearningField() {} diff --git a/x-pack/plugin/deprecation/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/MlDeprecationIT.java b/x-pack/plugin/deprecation/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/MlDeprecationIT.java index 6d95038e2cbcc..54a48ab34e991 100644 --- a/x-pack/plugin/deprecation/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/MlDeprecationIT.java +++ b/x-pack/plugin/deprecation/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/MlDeprecationIT.java @@ -63,7 +63,7 @@ public void testMlDeprecationChecks() throws Exception { indexDoc( ".ml-anomalies-.write-" + jobId, jobId + "_model_snapshot_2", - "{\"job_id\":\"deprecation_check_job\",\"snapshot_id\":\"2\",\"snapshot_doc_count\":1,\"min_version\":\"8.0.0\"}" + "{\"job_id\":\"deprecation_check_job\",\"snapshot_id\":\"2\",\"snapshot_doc_count\":1,\"min_version\":\"8.3.0\"}" ); client().performRequest(new Request("POST", "/.ml-anomalies-*/_refresh")); diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/MlDeprecationChecker.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/MlDeprecationChecker.java index c0e1c054f7a13..88adfe5157418 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/MlDeprecationChecker.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/MlDeprecationChecker.java @@ -26,8 +26,7 @@ import java.util.Map; import java.util.Optional; -import static org.elasticsearch.xpack.core.ml.MachineLearningField.MIN_CHECKED_SUPPORTED_SNAPSHOT_VERSION; -import static org.elasticsearch.xpack.core.ml.MachineLearningField.MIN_REPORTED_SUPPORTED_SNAPSHOT_VERSION; +import static org.elasticsearch.xpack.core.ml.MachineLearningField.MIN_SUPPORTED_SNAPSHOT_VERSION; public class MlDeprecationChecker implements DeprecationChecker { @@ -69,13 +68,13 @@ static Optional checkDataFeedAggregations(DatafeedConfig dataf } static Optional checkModelSnapshot(ModelSnapshot modelSnapshot) { - if (modelSnapshot.getMinVersion().before(MIN_CHECKED_SUPPORTED_SNAPSHOT_VERSION)) { + if (modelSnapshot.getMinVersion().before(MIN_SUPPORTED_SNAPSHOT_VERSION)) { StringBuilder details = new StringBuilder( String.format( Locale.ROOT, "Delete model snapshot [%s] or update it to %s or greater.", modelSnapshot.getSnapshotId(), - MIN_REPORTED_SUPPORTED_SNAPSHOT_VERSION + MIN_SUPPORTED_SNAPSHOT_VERSION ) ); if (modelSnapshot.getLatestRecordTimeStamp() != null) { diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AnomalyJobCRUDIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AnomalyJobCRUDIT.java index 08fda90f9fd73..8fe87b043c78b 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AnomalyJobCRUDIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AnomalyJobCRUDIT.java @@ -195,7 +195,7 @@ public void testOpenJobWithOldSnapshot() { assertThat( ex.getMessage(), containsString( - "[open-job-with-old-model-snapshot] job model snapshot [snap_1] has min version before [7.0.0], " + "[open-job-with-old-model-snapshot] job model snapshot [snap_1] has min version before [8.3.0], " + "please revert to a newer model snapshot or reset the job" ) ); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java index bd628c4e04ac6..6da5a110defbf 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java @@ -58,8 +58,7 @@ import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; -import static org.elasticsearch.xpack.core.ml.MachineLearningField.MIN_CHECKED_SUPPORTED_SNAPSHOT_VERSION; -import static org.elasticsearch.xpack.core.ml.MachineLearningField.MIN_REPORTED_SUPPORTED_SNAPSHOT_VERSION; +import static org.elasticsearch.xpack.core.ml.MachineLearningField.MIN_SUPPORTED_SNAPSHOT_VERSION; import static org.elasticsearch.xpack.ml.job.task.OpenJobPersistentTasksExecutor.checkAssignmentState; /* @@ -214,7 +213,7 @@ public void onFailure(Exception e) { return; } assert modelSnapshot.getPage().results().size() == 1; - if (modelSnapshot.getPage().results().get(0).getMinVersion().onOrAfter(MIN_CHECKED_SUPPORTED_SNAPSHOT_VERSION)) { + if (modelSnapshot.getPage().results().get(0).getMinVersion().onOrAfter(MIN_SUPPORTED_SNAPSHOT_VERSION)) { modelSnapshotValidationListener.onResponse(true); return; } @@ -224,7 +223,7 @@ public void onFailure(Exception e) { + "please revert to a newer model snapshot or reset the job", jobParams.getJobId(), jobParams.getJob().getModelSnapshotId(), - MIN_REPORTED_SUPPORTED_SNAPSHOT_VERSION.toString() + MIN_SUPPORTED_SNAPSHOT_VERSION.toString() ) ); }, failure -> { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java index 89180cba77dfd..9c37ebc0abfd8 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java @@ -73,8 +73,7 @@ import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; -import static org.elasticsearch.xpack.core.ml.MachineLearningField.MIN_CHECKED_SUPPORTED_SNAPSHOT_VERSION; -import static org.elasticsearch.xpack.core.ml.MachineLearningField.MIN_REPORTED_SUPPORTED_SNAPSHOT_VERSION; +import static org.elasticsearch.xpack.core.ml.MachineLearningField.MIN_SUPPORTED_SNAPSHOT_VERSION; import static org.elasticsearch.xpack.core.ml.MlTasks.AWAITING_UPGRADE; import static org.elasticsearch.xpack.core.ml.MlTasks.PERSISTENT_TASK_MASTER_NODE_TIMEOUT; import static org.elasticsearch.xpack.ml.job.JobNodeSelector.AWAITING_LAZY_ASSIGNMENT; @@ -436,7 +435,7 @@ private void verifyCurrentSnapshotVersion(String jobId, ActionListener } assert snapshot.getPage().results().size() == 1; ModelSnapshot snapshotObj = snapshot.getPage().results().get(0); - if (snapshotObj.getMinVersion().onOrAfter(MIN_CHECKED_SUPPORTED_SNAPSHOT_VERSION)) { + if (snapshotObj.getMinVersion().onOrAfter(MIN_SUPPORTED_SNAPSHOT_VERSION)) { listener.onResponse(true); return; } @@ -446,7 +445,7 @@ private void verifyCurrentSnapshotVersion(String jobId, ActionListener + "please revert to a newer model snapshot or reset the job", jobId, jobSnapshotId, - MIN_REPORTED_SUPPORTED_SNAPSHOT_VERSION.toString() + MIN_SUPPORTED_SNAPSHOT_VERSION.toString() ) ); }, snapshotFailure -> { From 4ff5acccbed76e154758de49c4b1866f781d721a Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Fri, 13 Dec 2024 10:51:58 +0100 Subject: [PATCH 08/32] ESQL: push down LIMIT past LOOKUP JOIN (#118495) Fix https://github.com/elastic/elasticsearch/issues/117698 by enabling push down of `LIMIT` past `LEFT JOIN`s. There is a subtle point here: our `LOOKUP JOIN` currently _exactly preserves the number of rows from the left hand side_. This is different from SQL, where `LEFT JOIN` will return _at least one row for each row from the left_, but may return multiple rows in case of multiple matches. We, instead, throw multiple matches into multi-values, instead. (C.f. [tests that I'm about to add](https://github.com/elastic/elasticsearch/pull/118471/files#diff-334f3328c5f066a093ed8a5ea4a62cd6bcdb304b660b15763bb4f64d0e87ed7cR365-R369) that demonstrate this.) If we were to change our semantics to match SQL's, we'd have to adjust the pushdown, too. --- .../esql/qa/mixed/MixedClusterEsqlSpecIT.java | 4 +- .../xpack/esql/ccq/MultiClusterSpecIT.java | 8 +-- .../src/main/resources/lookup-join.csv-spec | 59 ++++++++++++------- .../xpack/esql/action/EsqlCapabilities.java | 2 +- .../logical/PushDownAndCombineLimits.java | 7 ++- .../elasticsearch/xpack/esql/CsvTests.java | 2 +- .../xpack/esql/analysis/AnalyzerTests.java | 4 +- .../xpack/esql/analysis/VerifierTests.java | 2 +- .../optimizer/LogicalPlanOptimizerTests.java | 24 ++++++++ 9 files changed, 77 insertions(+), 35 deletions(-) diff --git a/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java b/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java index 81070b3155f2e..1120a69cc5166 100644 --- a/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java +++ b/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java @@ -21,7 +21,7 @@ import java.util.List; import static org.elasticsearch.xpack.esql.CsvTestUtils.isEnabled; -import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_LOOKUP_V4; +import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_LOOKUP_V5; import static org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase.Mode.ASYNC; public class MixedClusterEsqlSpecIT extends EsqlSpecTestCase { @@ -96,7 +96,7 @@ protected boolean supportsInferenceTestService() { @Override protected boolean supportsIndexModeLookup() throws IOException { - return hasCapabilities(List.of(JOIN_LOOKUP_V4.capabilityName())); + return hasCapabilities(List.of(JOIN_LOOKUP_V5.capabilityName())); } @Override diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java index 2ec75683ab149..5c7f981c93a97 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java @@ -48,7 +48,7 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.classpathResources; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.INLINESTATS; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.INLINESTATS_V2; -import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_LOOKUP_V4; +import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_LOOKUP_V5; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_PLANNING_V1; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.METADATA_FIELDS_REMOTE_TEST; import static org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase.Mode.SYNC; @@ -124,7 +124,7 @@ protected void shouldSkipTest(String testName) throws IOException { assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(INLINESTATS.capabilityName())); assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(INLINESTATS_V2.capabilityName())); assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(JOIN_PLANNING_V1.capabilityName())); - assumeFalse("LOOKUP JOIN not yet supported in CCS", testCase.requiredCapabilities.contains(JOIN_LOOKUP_V4.capabilityName())); + assumeFalse("LOOKUP JOIN not yet supported in CCS", testCase.requiredCapabilities.contains(JOIN_LOOKUP_V5.capabilityName())); } private TestFeatureService remoteFeaturesService() throws IOException { @@ -283,8 +283,8 @@ protected boolean supportsInferenceTestService() { @Override protected boolean supportsIndexModeLookup() throws IOException { - // CCS does not yet support JOIN_LOOKUP_V4 and clusters falsely report they have this capability - // return hasCapabilities(List.of(JOIN_LOOKUP_V4.capabilityName())); + // CCS does not yet support JOIN_LOOKUP_V5 and clusters falsely report they have this capability + // return hasCapabilities(List.of(JOIN_LOOKUP_V5.capabilityName())); return false; } } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec index b01e12fa4f470..12e333c0ed9f2 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec @@ -5,7 +5,7 @@ //TODO: this sometimes returns null instead of the looked up value (likely related to the execution order) basicOnTheDataNode -required_capability: join_lookup_v4 +required_capability: join_lookup_v5 FROM employees | EVAL language_code = languages @@ -22,7 +22,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; basicRow -required_capability: join_lookup_v4 +required_capability: join_lookup_v5 ROW language_code = 1 | LOOKUP JOIN languages_lookup ON language_code @@ -33,7 +33,7 @@ language_code:integer | language_name:keyword ; basicOnTheCoordinator -required_capability: join_lookup_v4 +required_capability: join_lookup_v5 FROM employees | SORT emp_no @@ -50,7 +50,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; subsequentEvalOnTheDataNode -required_capability: join_lookup_v4 +required_capability: join_lookup_v5 FROM employees | EVAL language_code = languages @@ -68,7 +68,7 @@ emp_no:integer | language_code:integer | language_name:keyword | language_code_x ; subsequentEvalOnTheCoordinator -required_capability: join_lookup_v4 +required_capability: join_lookup_v5 FROM employees | SORT emp_no @@ -85,8 +85,25 @@ emp_no:integer | language_code:integer | language_name:keyword | language_code_x 10003 | 4 | german | 8 ; +sortEvalBeforeLookup +required_capability: join_lookup_v5 + +FROM employees +| SORT emp_no +| EVAL language_code = (emp_no % 10) + 1 +| LOOKUP JOIN languages_lookup ON language_code +| KEEP emp_no, language_code, language_name +| LIMIT 3 +; + +emp_no:integer | language_code:integer | language_name:keyword +10001 | 2 | French +10002 | 3 | Spanish +10003 | 4 | German +; + lookupIPFromRow -required_capability: join_lookup_v4 +required_capability: join_lookup_v5 ROW left = "left", client_ip = "172.21.0.5", right = "right" | LOOKUP JOIN clientips_lookup ON client_ip @@ -97,7 +114,7 @@ left | 172.21.0.5 | right | Development ; lookupIPFromRowWithShadowing -required_capability: join_lookup_v4 +required_capability: join_lookup_v5 ROW left = "left", client_ip = "172.21.0.5", env = "env", right = "right" | LOOKUP JOIN clientips_lookup ON client_ip @@ -108,7 +125,7 @@ left | 172.21.0.5 | right | Development ; lookupIPFromRowWithShadowingKeep -required_capability: join_lookup_v4 +required_capability: join_lookup_v5 ROW left = "left", client_ip = "172.21.0.5", env = "env", right = "right" | EVAL client_ip = client_ip::keyword @@ -121,7 +138,7 @@ left | 172.21.0.5 | right | Development ; lookupIPFromRowWithShadowingKeepReordered -required_capability: join_lookup_v4 +required_capability: join_lookup_v5 ROW left = "left", client_ip = "172.21.0.5", env = "env", right = "right" | EVAL client_ip = client_ip::keyword @@ -134,7 +151,7 @@ right | Development | 172.21.0.5 ; lookupIPFromIndex -required_capability: join_lookup_v4 +required_capability: join_lookup_v5 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -153,7 +170,7 @@ ignoreOrder:true ; lookupIPFromIndexKeep -required_capability: join_lookup_v4 +required_capability: join_lookup_v5 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -173,7 +190,7 @@ ignoreOrder:true ; lookupIPFromIndexStats -required_capability: join_lookup_v4 +required_capability: join_lookup_v5 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -189,7 +206,7 @@ count:long | env:keyword ; lookupIPFromIndexStatsKeep -required_capability: join_lookup_v4 +required_capability: join_lookup_v5 FROM sample_data | EVAL client_ip = client_ip::keyword @@ -206,7 +223,7 @@ count:long | env:keyword ; lookupMessageFromRow -required_capability: join_lookup_v4 +required_capability: join_lookup_v5 ROW left = "left", message = "Connected to 10.1.0.1", right = "right" | LOOKUP JOIN message_types_lookup ON message @@ -217,7 +234,7 @@ left | Connected to 10.1.0.1 | right | Success ; lookupMessageFromRowWithShadowing -required_capability: join_lookup_v4 +required_capability: join_lookup_v5 ROW left = "left", message = "Connected to 10.1.0.1", type = "unknown", right = "right" | LOOKUP JOIN message_types_lookup ON message @@ -228,7 +245,7 @@ left | Connected to 10.1.0.1 | right | Success ; lookupMessageFromRowWithShadowingKeep -required_capability: join_lookup_v4 +required_capability: join_lookup_v5 ROW left = "left", message = "Connected to 10.1.0.1", type = "unknown", right = "right" | LOOKUP JOIN message_types_lookup ON message @@ -240,7 +257,7 @@ left | Connected to 10.1.0.1 | right | Success ; lookupMessageFromIndex -required_capability: join_lookup_v4 +required_capability: join_lookup_v5 FROM sample_data | LOOKUP JOIN message_types_lookup ON message @@ -258,7 +275,7 @@ ignoreOrder:true ; lookupMessageFromIndexKeep -required_capability: join_lookup_v4 +required_capability: join_lookup_v5 FROM sample_data | LOOKUP JOIN message_types_lookup ON message @@ -277,7 +294,7 @@ ignoreOrder:true ; lookupMessageFromIndexKeepReordered -required_capability: join_lookup_v4 +required_capability: join_lookup_v5 FROM sample_data | LOOKUP JOIN message_types_lookup ON message @@ -296,7 +313,7 @@ Success | 172.21.2.162 | 3450233 | Connected to 10.1.0.3 ; lookupMessageFromIndexStats -required_capability: join_lookup_v4 +required_capability: join_lookup_v5 FROM sample_data | LOOKUP JOIN message_types_lookup ON message @@ -311,7 +328,7 @@ count:long | type:keyword ; lookupMessageFromIndexStatsKeep -required_capability: join_lookup_v4 +required_capability: join_lookup_v5 FROM sample_data | LOOKUP JOIN message_types_lookup ON message diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 6436e049c7dd8..ddabb3e937dc2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -539,7 +539,7 @@ public enum Cap { /** * LOOKUP JOIN */ - JOIN_LOOKUP_V4(Build.current().isSnapshot()), + JOIN_LOOKUP_V5(Build.current().isSnapshot()), /** * Fix for https://github.com/elastic/elasticsearch/issues/117054 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineLimits.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineLimits.java index fb9d3f7e2f91e..1cacebdf27cd2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineLimits.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineLimits.java @@ -19,7 +19,6 @@ import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.plan.logical.join.Join; import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes; -import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; public final class PushDownAndCombineLimits extends OptimizerRules.OptimizerRule { @@ -63,8 +62,10 @@ public LogicalPlan rule(Limit limit) { } } } else if (limit.child() instanceof Join join) { - if (join.config().type() == JoinTypes.LEFT && join.right() instanceof LocalRelation) { - // This is a hash join from something like a lookup. + if (join.config().type() == JoinTypes.LEFT) { + // NOTE! This is only correct because our LEFT JOINs preserve the number of rows from the left hand side. + // This deviates from SQL semantics. In SQL, multiple matches on the right hand side lead to multiple rows in the output. + // For us, multiple matches on the right hand side are collected into multi-values. return join.replaceChildren(limit.replaceChild(join.left()), join.right()); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index 2834e5f3f8358..c11ef8615eb72 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -263,7 +263,7 @@ public final void test() throws Throwable { ); assumeFalse( "lookup join disabled for csv tests", - testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.JOIN_LOOKUP_V4.capabilityName()) + testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.JOIN_LOOKUP_V5.capabilityName()) ); assumeFalse( "can't use TERM function in csv tests", diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index 30aec707df541..cfff245b19244 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -2139,7 +2139,7 @@ public void testLookupMatchTypeWrong() { } public void testLookupJoinUnknownIndex() { - assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V4.isEnabled()); + assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V5.isEnabled()); String errorMessage = "Unknown index [foobar]"; IndexResolution missingLookupIndex = IndexResolution.invalid(errorMessage); @@ -2168,7 +2168,7 @@ public void testLookupJoinUnknownIndex() { } public void testLookupJoinUnknownField() { - assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V4.isEnabled()); + assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V5.isEnabled()); String query = "FROM test | LOOKUP JOIN languages_lookup ON last_name"; String errorMessage = "1:45: Unknown column [last_name] in right side of join"; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index e20f0d8bbc8ff..4b916106165fb 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -1964,7 +1964,7 @@ public void testSortByAggregate() { } public void testLookupJoinDataTypeMismatch() { - assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V4.isEnabled()); + assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V5.isEnabled()); query("FROM test | EVAL language_code = languages | LOOKUP JOIN languages_lookup ON language_code"); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 87bc11d8388bc..0cb805b05d845 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -40,6 +40,7 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparison; +import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.core.util.Holder; @@ -112,7 +113,9 @@ import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.plan.logical.join.InlineJoin; import org.elasticsearch.xpack.esql.plan.logical.join.Join; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinConfig; import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes; +import org.elasticsearch.xpack.esql.plan.logical.join.LookupJoin; import org.elasticsearch.xpack.esql.plan.logical.local.EsqlProject; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; @@ -138,6 +141,7 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.TWO; import static org.elasticsearch.xpack.esql.EsqlTestUtils.as; import static org.elasticsearch.xpack.esql.EsqlTestUtils.emptySource; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.fieldAttribute; import static org.elasticsearch.xpack.esql.EsqlTestUtils.getFieldAttribute; import static org.elasticsearch.xpack.esql.EsqlTestUtils.loadMapping; import static org.elasticsearch.xpack.esql.EsqlTestUtils.localSource; @@ -1291,6 +1295,26 @@ public void testCombineLimits() { ); } + public void testPushdownLimitsPastLeftJoin() { + var leftChild = emptySource(); + var rightChild = new LocalRelation(Source.EMPTY, List.of(fieldAttribute()), LocalSupplier.EMPTY); + assertNotEquals(leftChild, rightChild); + + var joinConfig = new JoinConfig(JoinTypes.LEFT, List.of(), List.of(), List.of()); + var join = switch (randomIntBetween(0, 2)) { + case 0 -> new Join(EMPTY, leftChild, rightChild, joinConfig); + case 1 -> new LookupJoin(EMPTY, leftChild, rightChild, joinConfig); + case 2 -> new InlineJoin(EMPTY, leftChild, rightChild, joinConfig); + default -> throw new IllegalArgumentException(); + }; + + var limit = new Limit(EMPTY, L(10), join); + + var optimizedPlan = new PushDownAndCombineLimits().rule(limit); + + assertEquals(join.replaceChildren(limit.replaceChild(join.left()), join.right()), optimizedPlan); + } + public void testMultipleCombineLimits() { var numberOfLimits = randomIntBetween(3, 10); var minimum = randomIntBetween(10, 99); From 67e3302bb404a00d92416bbc35f6166fc362b0e7 Mon Sep 17 00:00:00 2001 From: Jedr Blaszyk Date: Fri, 13 Dec 2024 11:09:58 +0100 Subject: [PATCH 09/32] [Connector APIs] Update yaml rest tests for Connector APIs (#118260) * [Connector API] Update yaml tests * Update tests --------- Co-authored-by: Elastic Machine --- .../entsearch/connector/10_connector_put.yml | 5 +- .../130_connector_update_index_name.yml | 26 +++++++ .../connector/140_connector_update_native.yml | 4 +- .../entsearch/connector/15_connector_post.yml | 5 +- .../entsearch/connector/20_connector_list.yml | 70 +++++++++---------- 5 files changed, 68 insertions(+), 42 deletions(-) diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/10_connector_put.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/10_connector_put.yml index b0f850d09f76d..094d9cbf43089 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/10_connector_put.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/10_connector_put.yml @@ -58,7 +58,7 @@ setup: connector.put: connector_id: test-connector-native body: - index_name: search-test + index_name: content-search-test is_native: true - match: { result: 'created' } @@ -68,7 +68,7 @@ setup: connector_id: test-connector-native - match: { id: test-connector-native } - - match: { index_name: search-test } + - match: { index_name: content-search-test } - match: { is_native: true } - match: { sync_now: false } - match: { status: needs_configuration } @@ -151,6 +151,7 @@ setup: is_native: false service_type: super-connector + --- 'Create Connector - Id returned as part of response': - do: diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/130_connector_update_index_name.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/130_connector_update_index_name.yml index 4ffa5435a3d7b..f804dc02a9e01 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/130_connector_update_index_name.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/130_connector_update_index_name.yml @@ -125,3 +125,29 @@ setup: connector_id: test-connector - match: { index_name: search-1-test } + + +--- +"Update Managed Connector Index Name": + - do: + connector.put: + connector_id: test-connector-1 + body: + is_native: true + service_type: super-connector + + - do: + connector.update_index_name: + connector_id: test-connector-1 + body: + index_name: content-search-2-test + + + - match: { result: updated } + + - do: + connector.get: + connector_id: test-connector-1 + + - match: { index_name: content-search-2-test } + diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/140_connector_update_native.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/140_connector_update_native.yml index 77c57532ad479..f8cd24d175312 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/140_connector_update_native.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/140_connector_update_native.yml @@ -7,7 +7,7 @@ setup: connector.put: connector_id: test-connector body: - index_name: search-1-test + index_name: content-search-1-test name: my-connector language: pl is_native: false @@ -29,7 +29,6 @@ setup: connector_id: test-connector - match: { is_native: true } - - match: { status: configured } - do: connector.update_native: @@ -44,7 +43,6 @@ setup: connector_id: test-connector - match: { is_native: false } - - match: { status: configured } --- "Update Connector Native - 404 when connector doesn't exist": diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/15_connector_post.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/15_connector_post.yml index 1cbff6a35e18b..634f99cd53fde 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/15_connector_post.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/15_connector_post.yml @@ -71,7 +71,7 @@ setup: - do: connector.post: body: - index_name: search-test + index_name: content-search-test is_native: true - set: { id: id } @@ -82,7 +82,7 @@ setup: connector_id: $id - match: { id: $id } - - match: { index_name: search-test } + - match: { index_name: content-search-test } - match: { is_native: true } - match: { sync_now: false } - match: { status: needs_configuration } @@ -102,6 +102,7 @@ setup: is_native: false service_type: super-connector + --- 'Create Connector - Index name used by another connector': - do: diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/20_connector_list.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/20_connector_list.yml index 10e4620ca5603..697b0ee419181 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/20_connector_list.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/20_connector_list.yml @@ -26,7 +26,7 @@ setup: connector.put: connector_id: connector-b body: - index_name: search-2-test + index_name: content-search-2-test name: my-connector-2 language: en is_native: true @@ -40,13 +40,13 @@ setup: - match: { count: 3 } # Alphabetical order by index_name for results - - match: { results.0.id: "connector-a" } - - match: { results.0.index_name: "search-1-test" } - - match: { results.0.language: "pl" } + - match: { results.0.id: "connector-b" } + - match: { results.0.index_name: "content-search-2-test" } + - match: { results.0.language: "en" } - - match: { results.1.id: "connector-b" } - - match: { results.1.index_name: "search-2-test" } - - match: { results.1.language: "en" } + - match: { results.1.id: "connector-a" } + - match: { results.1.index_name: "search-1-test" } + - match: { results.1.language: "pl" } - match: { results.2.id: "connector-c" } - match: { results.2.index_name: "search-3-test" } @@ -62,9 +62,9 @@ setup: - match: { count: 3 } # Alphabetical order by index_name for results - - match: { results.0.id: "connector-b" } - - match: { results.0.index_name: "search-2-test" } - - match: { results.0.language: "en" } + - match: { results.0.id: "connector-a" } + - match: { results.0.index_name: "search-1-test" } + - match: { results.0.language: "pl" } - match: { results.1.id: "connector-c" } - match: { results.1.index_name: "search-3-test" } @@ -79,13 +79,13 @@ setup: - match: { count: 3 } # Alphabetical order by index_name for results - - match: { results.0.id: "connector-a" } - - match: { results.0.index_name: "search-1-test" } - - match: { results.0.language: "pl" } + - match: { results.0.id: "connector-b" } + - match: { results.0.index_name: "content-search-2-test" } + - match: { results.0.language: "en" } - - match: { results.1.id: "connector-b" } - - match: { results.1.index_name: "search-2-test" } - - match: { results.1.language: "en" } + - match: { results.1.id: "connector-a" } + - match: { results.1.index_name: "search-1-test" } + - match: { results.1.language: "pl" } --- "List Connector - empty": @@ -118,11 +118,11 @@ setup: - do: connector.list: - index_name: search-1-test,search-2-test + index_name: search-1-test,content-search-2-test - match: { count: 2 } - - match: { results.0.index_name: "search-1-test" } - - match: { results.1.index_name: "search-2-test" } + - match: { results.0.index_name: "content-search-2-test" } + - match: { results.1.index_name: "search-1-test" } --- @@ -147,8 +147,8 @@ setup: connector_name: my-connector-1,my-connector-2 - match: { count: 2 } - - match: { results.0.name: "my-connector-1" } - - match: { results.1.name: "my-connector-2" } + - match: { results.0.name: "my-connector-2" } + - match: { results.1.name: "my-connector-1" } --- @@ -156,10 +156,10 @@ setup: - do: connector.list: connector_name: my-connector-1,my-connector-2 - index_name: search-2-test + index_name: content-search-2-test - match: { count: 1 } - - match: { results.0.index_name: "search-2-test" } + - match: { results.0.index_name: "content-search-2-test" } - match: { results.0.name: "my-connector-2" } @@ -230,13 +230,13 @@ setup: - match: { count: 3 } # Alphabetical order by index_name for results - - match: { results.0.id: "connector-a" } - - match: { results.0.index_name: "search-1-test" } - - match: { results.0.language: "pl" } + - match: { results.0.id: "connector-b" } + - match: { results.0.index_name: "content-search-2-test" } + - match: { results.0.language: "en" } - - match: { results.1.id: "connector-b" } - - match: { results.1.index_name: "search-2-test" } - - match: { results.1.language: "en" } + - match: { results.1.id: "connector-a" } + - match: { results.1.index_name: "search-1-test" } + - match: { results.1.language: "pl" } - match: { results.2.id: "connector-c" } - match: { results.2.index_name: "search-3-test" } @@ -255,13 +255,13 @@ setup: - match: { count: 3 } # Alphabetical order by index_name for results - - match: { results.0.id: "connector-a" } - - match: { results.0.index_name: "search-1-test" } - - match: { results.0.language: "pl" } + - match: { results.0.id: "connector-b" } + - match: { results.0.index_name: "content-search-2-test" } + - match: { results.0.language: "en" } - - match: { results.1.id: "connector-b" } - - match: { results.1.index_name: "search-2-test" } - - match: { results.1.language: "en" } + - match: { results.1.id: "connector-a" } + - match: { results.1.index_name: "search-1-test" } + - match: { results.1.language: "pl" } - match: { results.2.id: "connector-c" } - match: { results.2.index_name: "search-3-test" } From 140d88c59a10074c5a0993dd66f31578a25f2360 Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Fri, 13 Dec 2024 11:38:53 +0100 Subject: [PATCH 10/32] ESQL: Dependency check for binary plans (#118326) Make the dependency checker for query plans take into account binary plans and make sure that fields required from the left hand side are actually obtained from there (and analogously for the right). --- .../functions/description/categorize.asciidoc | 2 +- .../functions/kibana/definition/term.json | 2 +- .../esql/functions/kibana/docs/term.md | 8 +-- .../xpack/esql/index/EsIndex.java | 3 + .../xpack/esql/optimizer/LogicalVerifier.java | 3 +- .../esql/optimizer/PhysicalVerifier.java | 9 +-- .../rules/PlanConsistencyChecker.java | 62 +++++++++++++++---- .../xpack/esql/plan/logical/BinaryPlan.java | 5 ++ .../xpack/esql/plan/logical/join/Join.java | 11 ++++ .../esql/plan/physical/AggregateExec.java | 4 +- .../xpack/esql/plan/physical/BinaryExec.java | 5 ++ .../esql/plan/physical/HashJoinExec.java | 10 +++ .../esql/plan/physical/LookupJoinExec.java | 15 +++++ .../esql/analysis/AnalyzerTestUtils.java | 8 ++- .../optimizer/LogicalPlanOptimizerTests.java | 29 ++++++++- .../optimizer/PhysicalPlanOptimizerTests.java | 59 +++++++++++++++++- 16 files changed, 201 insertions(+), 34 deletions(-) diff --git a/docs/reference/esql/functions/description/categorize.asciidoc b/docs/reference/esql/functions/description/categorize.asciidoc index 32af0051e91c8..c956066ad53f3 100644 --- a/docs/reference/esql/functions/description/categorize.asciidoc +++ b/docs/reference/esql/functions/description/categorize.asciidoc @@ -8,4 +8,4 @@ Groups text messages into categories of similarly formatted text values. * can't be used within other expressions * can't be used with multiple groupings -* can't be used or referenced within aggregations +* can't be used or referenced within aggregate functions diff --git a/docs/reference/esql/functions/kibana/definition/term.json b/docs/reference/esql/functions/kibana/definition/term.json index d8bb61fd596a1..b0f129afd239c 100644 --- a/docs/reference/esql/functions/kibana/definition/term.json +++ b/docs/reference/esql/functions/kibana/definition/term.json @@ -78,7 +78,7 @@ } ], "examples" : [ - "from books \n| where term(author, \"gabriel\") \n| keep book_no, title\n| limit 3;" + "FROM books \n| WHERE TERM(author, \"gabriel\") \n| KEEP book_no, title\n| LIMIT 3;" ], "preview" : true, "snapshot_only" : true diff --git a/docs/reference/esql/functions/kibana/docs/term.md b/docs/reference/esql/functions/kibana/docs/term.md index 83e61a949208d..ffecd26d737f7 100644 --- a/docs/reference/esql/functions/kibana/docs/term.md +++ b/docs/reference/esql/functions/kibana/docs/term.md @@ -6,8 +6,8 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ Performs a Term query on the specified field. Returns true if the provided term matches the row. ``` -from books -| where term(author, "gabriel") -| keep book_no, title -| limit 3; +FROM books +| WHERE TERM(author, "gabriel") +| KEEP book_no, title +| LIMIT 3; ``` diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/index/EsIndex.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/index/EsIndex.java index ee51a6f391a65..d3fc9e15e2e04 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/index/EsIndex.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/index/EsIndex.java @@ -25,6 +25,9 @@ public class EsIndex implements Writeable { private final Map mapping; private final Map indexNameWithModes; + /** + * Intended for tests. Returns an index with an empty index mode map. + */ public EsIndex(String name, Map mapping) { this(name, mapping, Map.of()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalVerifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalVerifier.java index 5e91425296822..dce828dbf192d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalVerifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalVerifier.java @@ -14,7 +14,6 @@ public final class LogicalVerifier { - private static final PlanConsistencyChecker DEPENDENCY_CHECK = new PlanConsistencyChecker<>(); public static final LogicalVerifier INSTANCE = new LogicalVerifier(); private LogicalVerifier() {} @@ -25,7 +24,7 @@ public Failures verify(LogicalPlan plan) { Failures dependencyFailures = new Failures(); plan.forEachUp(p -> { - DEPENDENCY_CHECK.checkPlan(p, dependencyFailures); + PlanConsistencyChecker.checkPlan(p, dependencyFailures); if (failures.hasFailures() == false) { p.forEachExpression(ex -> { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalVerifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalVerifier.java index 9132cf87541bb..4ec90fc1ed50a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalVerifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalVerifier.java @@ -13,7 +13,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.optimizer.rules.PlanConsistencyChecker; import org.elasticsearch.xpack.esql.plan.logical.Enrich; -import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; import org.elasticsearch.xpack.esql.plan.physical.EnrichExec; import org.elasticsearch.xpack.esql.plan.physical.FieldExtractExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; @@ -28,7 +27,6 @@ public final class PhysicalVerifier { public static final PhysicalVerifier INSTANCE = new PhysicalVerifier(); - private static final PlanConsistencyChecker DEPENDENCY_CHECK = new PlanConsistencyChecker<>(); private PhysicalVerifier() {} @@ -44,11 +42,6 @@ public Collection verify(PhysicalPlan plan) { } plan.forEachDown(p -> { - if (p instanceof AggregateExec agg) { - var exclude = Expressions.references(agg.ordinalAttributes()); - DEPENDENCY_CHECK.checkPlan(p, exclude, depFailures); - return; - } if (p instanceof FieldExtractExec fieldExtractExec) { Attribute sourceAttribute = fieldExtractExec.sourceAttribute(); if (sourceAttribute == null) { @@ -62,7 +55,7 @@ public Collection verify(PhysicalPlan plan) { ); } } - DEPENDENCY_CHECK.checkPlan(p, depFailures); + PlanConsistencyChecker.checkPlan(p, depFailures); }); if (depFailures.hasFailures()) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PlanConsistencyChecker.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PlanConsistencyChecker.java index 5101e3f73bfdf..d5bd110e8df74 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PlanConsistencyChecker.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PlanConsistencyChecker.java @@ -12,31 +12,42 @@ import org.elasticsearch.xpack.esql.core.expression.AttributeSet; import org.elasticsearch.xpack.esql.core.expression.NameId; import org.elasticsearch.xpack.esql.plan.QueryPlan; +import org.elasticsearch.xpack.esql.plan.logical.BinaryPlan; +import org.elasticsearch.xpack.esql.plan.physical.BinaryExec; import java.util.HashSet; import java.util.Set; import static org.elasticsearch.xpack.esql.common.Failure.fail; -public class PlanConsistencyChecker

    > { +public class PlanConsistencyChecker { /** * Check whether a single {@link QueryPlan} produces no duplicate attributes and its children provide all of its required * {@link QueryPlan#references() references}. Otherwise, add * {@link org.elasticsearch.xpack.esql.common.Failure Failure}s to the {@link Failures} object. */ - public void checkPlan(P p, Failures failures) { - checkPlan(p, AttributeSet.EMPTY, failures); - } - - public void checkPlan(P p, AttributeSet exclude, Failures failures) { - AttributeSet refs = p.references(); - AttributeSet input = p.inputSet(); - AttributeSet missing = refs.subtract(input).subtract(exclude); - // TODO: for Joins, we should probably check if the required fields from the left child are actually in the left child, not - // just any child (and analogously for the right child). - if (missing.isEmpty() == false) { - failures.add(fail(p, "Plan [{}] optimized incorrectly due to missing references {}", p.nodeString(), missing)); + public static void checkPlan(QueryPlan p, Failures failures) { + if (p instanceof BinaryPlan binaryPlan) { + checkMissingBinary( + p, + binaryPlan.leftReferences(), + binaryPlan.left().outputSet(), + binaryPlan.rightReferences(), + binaryPlan.right().outputSet(), + failures + ); + } else if (p instanceof BinaryExec binaryExec) { + checkMissingBinary( + p, + binaryExec.leftReferences(), + binaryExec.left().outputSet(), + binaryExec.rightReferences(), + binaryExec.right().outputSet(), + failures + ); + } else { + checkMissing(p, p.references(), p.inputSet(), "missing references", failures); } Set outputAttributeNames = new HashSet<>(); @@ -49,4 +60,29 @@ public void checkPlan(P p, AttributeSet exclude, Failures failures) { } } } + + private static void checkMissingBinary( + QueryPlan plan, + AttributeSet leftReferences, + AttributeSet leftInput, + AttributeSet rightReferences, + AttributeSet rightInput, + Failures failures + ) { + checkMissing(plan, leftReferences, leftInput, "missing references from left hand side", failures); + checkMissing(plan, rightReferences, rightInput, "missing references from right hand side", failures); + } + + private static void checkMissing( + QueryPlan plan, + AttributeSet references, + AttributeSet input, + String detailErrorMessage, + Failures failures + ) { + AttributeSet missing = references.subtract(input); + if (missing.isEmpty() == false) { + failures.add(fail(plan, "Plan [{}] optimized incorrectly due to {} {}", plan.nodeString(), detailErrorMessage, missing)); + } + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/BinaryPlan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/BinaryPlan.java index 91cd7f7a15840..dbd22dd297f88 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/BinaryPlan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/BinaryPlan.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.esql.plan.logical; +import org.elasticsearch.xpack.esql.core.expression.AttributeSet; import org.elasticsearch.xpack.esql.core.tree.Source; import java.util.Arrays; @@ -30,6 +31,10 @@ public LogicalPlan right() { return right; } + public abstract AttributeSet leftReferences(); + + public abstract AttributeSet rightReferences(); + @Override public final BinaryPlan replaceChildren(List newChildren) { return replaceChildren(newChildren.get(0), newChildren.get(1)); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java index 6af29fb23b3bb..a2c159e506880 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.AttributeSet; +import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -97,6 +98,16 @@ public List output() { return lazyOutput; } + @Override + public AttributeSet leftReferences() { + return Expressions.references(config().leftFields()); + } + + @Override + public AttributeSet rightReferences() { + return Expressions.references(config().rightFields()); + } + public List rightOutputFields() { AttributeSet leftInputs = left().outputSet(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/AggregateExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/AggregateExec.java index 35f45250ed270..3c2d49567813c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/AggregateExec.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/AggregateExec.java @@ -184,7 +184,9 @@ public List output() { @Override protected AttributeSet computeReferences() { - return mode.isInputPartial() ? new AttributeSet(intermediateAttributes) : Aggregate.computeReferences(aggregates, groupings); + return mode.isInputPartial() + ? new AttributeSet(intermediateAttributes) + : Aggregate.computeReferences(aggregates, groupings).subtract(new AttributeSet(ordinalAttributes())); } /** Returns the attributes that can be loaded from ordinals -- no explicit extraction is needed */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/BinaryExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/BinaryExec.java index 6f200bad17a72..9a1b76205b595 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/BinaryExec.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/BinaryExec.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.plan.physical; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.esql.core.expression.AttributeSet; import org.elasticsearch.xpack.esql.core.tree.Source; import java.io.IOException; @@ -40,6 +41,10 @@ public PhysicalPlan right() { return right; } + public abstract AttributeSet leftReferences(); + + public abstract AttributeSet rightReferences(); + @Override public void writeTo(StreamOutput out) throws IOException { Source.EMPTY.writeTo(out); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/HashJoinExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/HashJoinExec.java index 5ae3702993fcb..362c83bf76213 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/HashJoinExec.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/HashJoinExec.java @@ -119,6 +119,16 @@ protected AttributeSet computeReferences() { return Expressions.references(leftFields); } + @Override + public AttributeSet leftReferences() { + return Expressions.references(leftFields); + } + + @Override + public AttributeSet rightReferences() { + return Expressions.references(rightFields); + } + @Override public HashJoinExec replaceChildren(PhysicalPlan left, PhysicalPlan right) { return new HashJoinExec(source(), left, right, matchFields, leftFields, rightFields, output); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/LookupJoinExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/LookupJoinExec.java index 26fd12447e664..2aff38993aa98 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/LookupJoinExec.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/LookupJoinExec.java @@ -119,6 +119,21 @@ protected AttributeSet computeReferences() { return Expressions.references(leftFields); } + @Override + public AttributeSet leftReferences() { + return Expressions.references(leftFields); + } + + @Override + public AttributeSet rightReferences() { + // TODO: currently it's hard coded that we add all fields from the lookup index. But the output we "officially" get from the right + // hand side is inconsistent: + // - After logical optimization, there's a FragmentExec with an EsRelation on the right hand side with all the fields. + // - After local physical optimization, there's just an EsQueryExec here, with no fields other than _doc mentioned and we don't + // insert field extractions in the plan, either. + return AttributeSet.EMPTY; + } + @Override public LookupJoinExec replaceChildren(PhysicalPlan left, PhysicalPlan right) { return new LookupJoinExec(source(), left, right, leftFields, rightFields, addedFields); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTestUtils.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTestUtils.java index 4e89a09db9ed4..5e79e40b7e938 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTestUtils.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTestUtils.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.analysis; +import org.elasticsearch.index.IndexMode; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.enrich.ResolvedEnrichPolicy; @@ -104,6 +105,11 @@ public static LogicalPlan analyze(String query, String mapping, QueryParams para return analyzer.analyze(plan); } + public static IndexResolution loadMapping(String resource, String indexName, IndexMode indexMode) { + EsIndex test = new EsIndex(indexName, EsqlTestUtils.loadMapping(resource), Map.of(indexName, indexMode)); + return IndexResolution.valid(test); + } + public static IndexResolution loadMapping(String resource, String indexName) { EsIndex test = new EsIndex(indexName, EsqlTestUtils.loadMapping(resource)); return IndexResolution.valid(test); @@ -118,7 +124,7 @@ public static IndexResolution expandedDefaultIndexResolution() { } public static IndexResolution defaultLookupResolution() { - return loadMapping("mapping-languages.json", "languages_lookup"); + return loadMapping("mapping-languages.json", "languages_lookup", IndexMode.LOOKUP); } public static EnrichResolution defaultEnrichResolution() { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 0cb805b05d845..7e498eb6654b9 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -149,6 +149,7 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.withDefaultLimitWarning; import static org.elasticsearch.xpack.esql.analysis.Analyzer.NO_FIELDS; import static org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils.analyze; +import static org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils.defaultLookupResolution; import static org.elasticsearch.xpack.esql.core.expression.Literal.NULL; import static org.elasticsearch.xpack.esql.core.tree.Source.EMPTY; import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; @@ -221,7 +222,13 @@ public static void init() { EsIndex test = new EsIndex("test", mapping, Map.of("test", IndexMode.STANDARD)); IndexResolution getIndexResult = IndexResolution.valid(test); analyzer = new Analyzer( - new AnalyzerContext(EsqlTestUtils.TEST_CFG, new EsqlFunctionRegistry(), getIndexResult, enrichResolution), + new AnalyzerContext( + EsqlTestUtils.TEST_CFG, + new EsqlFunctionRegistry(), + getIndexResult, + defaultLookupResolution(), + enrichResolution + ), TEST_VERIFIER ); @@ -4896,6 +4903,26 @@ public void testPlanSanityCheck() throws Exception { assertThat(e.getMessage(), containsString(" optimized incorrectly due to missing references [salary")); } + public void testPlanSanityCheckWithBinaryPlans() throws Exception { + var plan = optimizedPlan(""" + FROM test + | RENAME languages AS language_code + | LOOKUP JOIN languages_lookup ON language_code + """); + + var project = as(plan, Project.class); + var limit = as(project.child(), Limit.class); + var join = as(limit.child(), Join.class); + + var joinWithInvalidLeftPlan = join.replaceChildren(join.right(), join.right()); + IllegalStateException e = expectThrows(IllegalStateException.class, () -> logicalOptimizer.optimize(joinWithInvalidLeftPlan)); + assertThat(e.getMessage(), containsString(" optimized incorrectly due to missing references from left hand side [language_code")); + + var joinWithInvalidRightPlan = join.replaceChildren(join.left(), join.left()); + e = expectThrows(IllegalStateException.class, () -> logicalOptimizer.optimize(joinWithInvalidRightPlan)); + assertThat(e.getMessage(), containsString(" optimized incorrectly due to missing references from right hand side [language_code")); + } + // https://github.com/elastic/elasticsearch/issues/104995 public void testNoWrongIsNotNullPruning() { var plan = optimizedPlan(""" diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index 9682bb1c8b076..ec1d55a0fc58f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -115,6 +115,7 @@ import org.elasticsearch.xpack.esql.plan.physical.HashJoinExec; import org.elasticsearch.xpack.esql.plan.physical.LimitExec; import org.elasticsearch.xpack.esql.plan.physical.LocalSourceExec; +import org.elasticsearch.xpack.esql.plan.physical.LookupJoinExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.plan.physical.ProjectExec; import org.elasticsearch.xpack.esql.plan.physical.TopNExec; @@ -155,6 +156,7 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.withDefaultLimitWarning; import static org.elasticsearch.xpack.esql.SerializationTestUtils.assertSerialization; import static org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils.analyze; +import static org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils.defaultLookupResolution; import static org.elasticsearch.xpack.esql.core.expression.Expressions.name; import static org.elasticsearch.xpack.esql.core.expression.Expressions.names; import static org.elasticsearch.xpack.esql.core.expression.function.scalar.FunctionTestUtils.l; @@ -281,16 +283,30 @@ TestDataSource makeTestDataSource( String indexName, String mappingFileName, EsqlFunctionRegistry functionRegistry, + IndexResolution lookupResolution, EnrichResolution enrichResolution, SearchStats stats ) { Map mapping = loadMapping(mappingFileName); EsIndex index = new EsIndex(indexName, mapping, Map.of("test", IndexMode.STANDARD)); IndexResolution getIndexResult = IndexResolution.valid(index); - Analyzer analyzer = new Analyzer(new AnalyzerContext(config, functionRegistry, getIndexResult, enrichResolution), TEST_VERIFIER); + Analyzer analyzer = new Analyzer( + new AnalyzerContext(config, functionRegistry, getIndexResult, lookupResolution, enrichResolution), + TEST_VERIFIER + ); return new TestDataSource(mapping, index, analyzer, stats); } + TestDataSource makeTestDataSource( + String indexName, + String mappingFileName, + EsqlFunctionRegistry functionRegistry, + EnrichResolution enrichResolution, + SearchStats stats + ) { + return makeTestDataSource(indexName, mappingFileName, functionRegistry, defaultLookupResolution(), enrichResolution, stats); + } + TestDataSource makeTestDataSource( String indexName, String mappingFileName, @@ -2312,6 +2328,39 @@ public void testVerifierOnMissingReferences() { assertThat(e.getMessage(), containsString(" > 10[INTEGER]]] optimized incorrectly due to missing references [emp_no{f}#")); } + public void testVerifierOnMissingReferencesWithBinaryPlans() throws Exception { + // Do not assert serialization: + // This will have a LookupJoinExec, which is not serializable because it doesn't leave the coordinator. + var plan = physicalPlan(""" + FROM test + | RENAME languages AS language_code + | SORT language_code + | LOOKUP JOIN languages_lookup ON language_code + """, testData, false); + + var planWithInvalidJoinLeftSide = plan.transformUp(LookupJoinExec.class, join -> join.replaceChildren(join.right(), join.right())); + + var e = expectThrows(IllegalStateException.class, () -> physicalPlanOptimizer.verify(planWithInvalidJoinLeftSide)); + assertThat(e.getMessage(), containsString(" optimized incorrectly due to missing references from left hand side [language_code")); + + var planWithInvalidJoinRightSide = plan.transformUp( + LookupJoinExec.class, + // LookupJoinExec.rightReferences() is currently EMPTY (hack); use a HashJoinExec instead. + join -> new HashJoinExec( + join.source(), + join.left(), + join.left(), + join.leftFields(), + join.leftFields(), + join.rightFields(), + join.output() + ) + ); + + e = expectThrows(IllegalStateException.class, () -> physicalPlanOptimizer.verify(planWithInvalidJoinRightSide)); + assertThat(e.getMessage(), containsString(" optimized incorrectly due to missing references from right hand side [language_code")); + } + public void testVerifierOnDuplicateOutputAttributes() { var plan = physicalPlan(""" from test @@ -6863,11 +6912,17 @@ private PhysicalPlan physicalPlan(String query) { } private PhysicalPlan physicalPlan(String query, TestDataSource dataSource) { + return physicalPlan(query, dataSource, true); + } + + private PhysicalPlan physicalPlan(String query, TestDataSource dataSource, boolean assertSerialization) { var logical = logicalOptimizer.optimize(dataSource.analyzer.analyze(parser.createStatement(query))); // System.out.println("Logical\n" + logical); var physical = mapper.map(logical); // System.out.println(physical); - assertSerialization(physical); + if (assertSerialization) { + assertSerialization(physical); + } return physical; } From 2be4cd983fb13a2903ad61ea2c6212aa31e39364 Mon Sep 17 00:00:00 2001 From: Gal Lalouche Date: Fri, 13 Dec 2024 12:41:24 +0200 Subject: [PATCH 11/32] ESQL: Support ST_EXTENT_AGG (#117451) This PR adds support for ST_EXTENT_AGG aggregation, i.e., computing a bounding box over a set of points/shapes (Cartesian or geo). Note the difference between this aggregation and the already implemented scalar function ST_EXTENT. This isn't a very efficient implementation, and future PRs will attempt to read these extents directly from the doc values. We currently always use longitude wrapping, i.e., we may wrap around the dateline for a smaller bounding box. Future PRs will let the user control this behavior. Fixes #104659. --- docs/changelog/117451.yaml | 6 + .../functions/aggregation-functions.asciidoc | 2 + .../description/st_extent_agg.asciidoc | 5 + .../functions/examples/st_extent_agg.asciidoc | 13 + .../kibana/definition/st_extent_agg.json | 61 +++++ .../functions/kibana/docs/st_extent_agg.md | 12 + .../functions/layout/st_extent_agg.asciidoc | 15 ++ .../parameters/st_extent_agg.asciidoc | 6 + .../functions/signature/st_extent_agg.svg | 1 + .../functions/types/st_extent_agg.asciidoc | 12 + .../utils/SpatialEnvelopeVisitor.java | 72 ++--- .../utils/SpatialEnvelopeVisitorTests.java | 12 +- ...esianPointDocValuesAggregatorFunction.java | 187 +++++++++++++ ...ntDocValuesAggregatorFunctionSupplier.java | 41 +++ ...ntDocValuesGroupingAggregatorFunction.java | 230 ++++++++++++++++ ...anPointSourceValuesAggregatorFunction.java | 192 ++++++++++++++ ...ourceValuesAggregatorFunctionSupplier.java | 41 +++ ...ourceValuesGroupingAggregatorFunction.java | 235 +++++++++++++++++ ...xtentCartesianShapeAggregatorFunction.java | 192 ++++++++++++++ ...tesianShapeAggregatorFunctionSupplier.java | 40 +++ ...tesianShapeGroupingAggregatorFunction.java | 235 +++++++++++++++++ ...ntGeoPointDocValuesAggregatorFunction.java | 201 ++++++++++++++ ...ntDocValuesAggregatorFunctionSupplier.java | 40 +++ ...ntDocValuesGroupingAggregatorFunction.java | 242 +++++++++++++++++ ...eoPointSourceValuesAggregatorFunction.java | 206 +++++++++++++++ ...ourceValuesAggregatorFunctionSupplier.java | 41 +++ ...ourceValuesGroupingAggregatorFunction.java | 247 ++++++++++++++++++ ...atialExtentGeoShapeAggregatorFunction.java | 206 +++++++++++++++ ...entGeoShapeAggregatorFunctionSupplier.java | 40 +++ ...entGeoShapeGroupingAggregatorFunction.java | 247 ++++++++++++++++++ .../aggregation/AbstractArrayState.java | 4 +- .../spatial/CentroidPointAggregator.java | 7 + .../spatial/GeoPointEnvelopeVisitor.java | 63 +++++ .../aggregation/spatial/PointType.java | 107 ++++++++ .../spatial/SpatialAggregationUtils.java | 88 +++++++ ...roidCartesianPointDocValuesAggregator.java | 22 +- ...dCartesianPointSourceValuesAggregator.java | 20 +- ...alCentroidGeoPointDocValuesAggregator.java | 26 +- ...entroidGeoPointSourceValuesAggregator.java | 20 +- .../spatial/SpatialExtentAggregator.java | 36 +++ ...tentCartesianPointDocValuesAggregator.java | 42 +++ ...tCartesianPointSourceValuesAggregator.java | 45 ++++ ...SpatialExtentCartesianShapeAggregator.java | 43 +++ ...tialExtentGeoPointDocValuesAggregator.java | 45 ++++ ...lExtentGeoPointSourceValuesAggregator.java | 48 ++++ .../SpatialExtentGeoShapeAggregator.java | 46 ++++ .../spatial/SpatialExtentGroupingState.java | 154 +++++++++++ ...entGroupingStateWrappedLongitudeState.java | 182 +++++++++++++ ...tialExtentLongitudeWrappingAggregator.java | 62 +++++ .../spatial/SpatialExtentState.java | 82 ++++++ ...atialExtentStateWrappedLongitudeState.java | 91 +++++++ .../mapping-airports_no_doc_values.json | 4 +- .../src/main/resources/spatial.csv-spec | 99 +++++++ .../xpack/esql/action/EsqlCapabilities.java | 3 + .../function/EsqlFunctionRegistry.java | 2 + .../aggregate/AggregateWritables.java | 1 + .../aggregate/SpatialAggregateFunction.java | 31 ++- .../function/aggregate/SpatialCentroid.java | 43 ++- .../function/aggregate/SpatialExtent.java | 119 +++++++++ .../function/scalar/spatial/StEnvelope.java | 3 +- .../function/scalar/spatial/StXMax.java | 3 +- .../function/scalar/spatial/StXMin.java | 3 +- .../function/scalar/spatial/StYMax.java | 3 +- .../function/scalar/spatial/StYMin.java | 3 +- .../xpack/esql/planner/AggregateMapper.java | 52 +++- .../esql/expression/RectangleMatcher.java | 61 +++++ .../WellKnownBinaryBytesRefMatcher.java | 45 ++++ .../function/AbstractAggregationTestCase.java | 8 +- .../function/MultiRowTestCaseSupplier.java | 85 +++--- .../function/aggregate/CountTests.java | 5 +- .../aggregate/SpatialCentroidTests.java | 5 +- .../aggregate/SpatialExtentTests.java | 102 ++++++++ .../scalar/spatial/StEnvelopeTests.java | 5 +- .../function/scalar/spatial/StXMaxTests.java | 5 +- .../function/scalar/spatial/StXMinTests.java | 5 +- .../function/scalar/spatial/StYMaxTests.java | 5 +- .../function/scalar/spatial/StYMinTests.java | 5 +- .../optimizer/PhysicalPlanOptimizerTests.java | 135 +++++++++- .../rest-api-spec/test/esql/60_usage.yml | 4 +- 79 files changed, 4923 insertions(+), 234 deletions(-) create mode 100644 docs/changelog/117451.yaml create mode 100644 docs/reference/esql/functions/description/st_extent_agg.asciidoc create mode 100644 docs/reference/esql/functions/examples/st_extent_agg.asciidoc create mode 100644 docs/reference/esql/functions/kibana/definition/st_extent_agg.json create mode 100644 docs/reference/esql/functions/kibana/docs/st_extent_agg.md create mode 100644 docs/reference/esql/functions/layout/st_extent_agg.asciidoc create mode 100644 docs/reference/esql/functions/parameters/st_extent_agg.asciidoc create mode 100644 docs/reference/esql/functions/signature/st_extent_agg.svg create mode 100644 docs/reference/esql/functions/types/st_extent_agg.asciidoc create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesAggregatorFunctionSupplier.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesAggregatorFunctionSupplier.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeAggregatorFunctionSupplier.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesAggregatorFunctionSupplier.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesAggregatorFunctionSupplier.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeAggregatorFunctionSupplier.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/GeoPointEnvelopeVisitor.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/PointType.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialAggregationUtils.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentAggregator.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesAggregator.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesAggregator.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeAggregator.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesAggregator.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesAggregator.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeAggregator.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGroupingState.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGroupingStateWrappedLongitudeState.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentLongitudeWrappingAggregator.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentState.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentStateWrappedLongitudeState.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialExtent.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/RectangleMatcher.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/WellKnownBinaryBytesRefMatcher.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialExtentTests.java diff --git a/docs/changelog/117451.yaml b/docs/changelog/117451.yaml new file mode 100644 index 0000000000000..bda0ca59e0953 --- /dev/null +++ b/docs/changelog/117451.yaml @@ -0,0 +1,6 @@ +pr: 117451 +summary: ST_EXTENT aggregation +area: ES|QL +type: feature +issues: + - 104659 diff --git a/docs/reference/esql/functions/aggregation-functions.asciidoc b/docs/reference/esql/functions/aggregation-functions.asciidoc index c2c2508ad5de2..24b42a6efd831 100644 --- a/docs/reference/esql/functions/aggregation-functions.asciidoc +++ b/docs/reference/esql/functions/aggregation-functions.asciidoc @@ -17,6 +17,7 @@ The <> command supports these aggregate functions: * <> * <> * experimental:[] <> +* experimental:[] <> * <> * <> * <> @@ -33,6 +34,7 @@ include::layout/median_absolute_deviation.asciidoc[] include::layout/min.asciidoc[] include::layout/percentile.asciidoc[] include::layout/st_centroid_agg.asciidoc[] +include::layout/st_extent_agg.asciidoc[] include::layout/std_dev.asciidoc[] include::layout/sum.asciidoc[] include::layout/top.asciidoc[] diff --git a/docs/reference/esql/functions/description/st_extent_agg.asciidoc b/docs/reference/esql/functions/description/st_extent_agg.asciidoc new file mode 100644 index 0000000000000..a9e1acfb0e6fb --- /dev/null +++ b/docs/reference/esql/functions/description/st_extent_agg.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Calculate the spatial extent over a field with geometry type. Returns a bounding box for all values of the field. diff --git a/docs/reference/esql/functions/examples/st_extent_agg.asciidoc b/docs/reference/esql/functions/examples/st_extent_agg.asciidoc new file mode 100644 index 0000000000000..179be82103641 --- /dev/null +++ b/docs/reference/esql/functions/examples/st_extent_agg.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/spatial.csv-spec[tag=st_extent_agg-airports] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/spatial.csv-spec[tag=st_extent_agg-airports-result] +|=== + diff --git a/docs/reference/esql/functions/kibana/definition/st_extent_agg.json b/docs/reference/esql/functions/kibana/definition/st_extent_agg.json new file mode 100644 index 0000000000000..19afcc59e38a4 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/st_extent_agg.json @@ -0,0 +1,61 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "agg", + "name" : "st_extent_agg", + "description" : "Calculate the spatial extent over a field with geometry type. Returns a bounding box for all values of the field.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_shape" + } + ], + "examples" : [ + "FROM airports\n| WHERE country == \"India\"\n| STATS extent = ST_EXTENT_AGG(location)" + ], + "preview" : false, + "snapshot_only" : false +} diff --git a/docs/reference/esql/functions/kibana/docs/st_extent_agg.md b/docs/reference/esql/functions/kibana/docs/st_extent_agg.md new file mode 100644 index 0000000000000..a2e307c5b2c55 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/st_extent_agg.md @@ -0,0 +1,12 @@ + + +### ST_EXTENT_AGG +Calculate the spatial extent over a field with geometry type. Returns a bounding box for all values of the field. + +``` +FROM airports +| WHERE country == "India" +| STATS extent = ST_EXTENT_AGG(location) +``` diff --git a/docs/reference/esql/functions/layout/st_extent_agg.asciidoc b/docs/reference/esql/functions/layout/st_extent_agg.asciidoc new file mode 100644 index 0000000000000..946bef661e70c --- /dev/null +++ b/docs/reference/esql/functions/layout/st_extent_agg.asciidoc @@ -0,0 +1,15 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-st_extent_agg]] +=== `ST_EXTENT_AGG` + +*Syntax* + +[.text-center] +image::esql/functions/signature/st_extent_agg.svg[Embedded,opts=inline] + +include::../parameters/st_extent_agg.asciidoc[] +include::../description/st_extent_agg.asciidoc[] +include::../types/st_extent_agg.asciidoc[] +include::../examples/st_extent_agg.asciidoc[] diff --git a/docs/reference/esql/functions/parameters/st_extent_agg.asciidoc b/docs/reference/esql/functions/parameters/st_extent_agg.asciidoc new file mode 100644 index 0000000000000..8903aa1a472a3 --- /dev/null +++ b/docs/reference/esql/functions/parameters/st_extent_agg.asciidoc @@ -0,0 +1,6 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Parameters* + +`field`:: + diff --git a/docs/reference/esql/functions/signature/st_extent_agg.svg b/docs/reference/esql/functions/signature/st_extent_agg.svg new file mode 100644 index 0000000000000..bb19b68bfb08b --- /dev/null +++ b/docs/reference/esql/functions/signature/st_extent_agg.svg @@ -0,0 +1 @@ +ST_EXTENT_AGG(field) \ No newline at end of file diff --git a/docs/reference/esql/functions/types/st_extent_agg.asciidoc b/docs/reference/esql/functions/types/st_extent_agg.asciidoc new file mode 100644 index 0000000000000..c836aa1896f07 --- /dev/null +++ b/docs/reference/esql/functions/types/st_extent_agg.asciidoc @@ -0,0 +1,12 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +field | result +cartesian_point | cartesian_shape +cartesian_shape | cartesian_shape +geo_point | geo_shape +geo_shape | geo_shape +|=== diff --git a/libs/geo/src/main/java/org/elasticsearch/geometry/utils/SpatialEnvelopeVisitor.java b/libs/geo/src/main/java/org/elasticsearch/geometry/utils/SpatialEnvelopeVisitor.java index eee4a62c7d588..696be2808ed1f 100644 --- a/libs/geo/src/main/java/org/elasticsearch/geometry/utils/SpatialEnvelopeVisitor.java +++ b/libs/geo/src/main/java/org/elasticsearch/geometry/utils/SpatialEnvelopeVisitor.java @@ -83,10 +83,15 @@ public static Optional visitCartesian(Geometry geometry) { return Optional.empty(); } + public enum WrapLongitude { + NO_WRAP, + WRAP + } + /** * Determine the BBOX assuming the CRS is geographic (eg WGS84) and optionally wrapping the longitude around the dateline. */ - public static Optional visitGeo(Geometry geometry, boolean wrapLongitude) { + public static Optional visitGeo(Geometry geometry, WrapLongitude wrapLongitude) { var visitor = new SpatialEnvelopeVisitor(new GeoPointVisitor(wrapLongitude)); if (geometry.visit(visitor)) { return Optional.of(visitor.getResult()); @@ -181,40 +186,16 @@ public Rectangle getResult() { * */ public static class GeoPointVisitor implements PointVisitor { - private double minY = Double.POSITIVE_INFINITY; - private double maxY = Double.NEGATIVE_INFINITY; - private double minNegX = Double.POSITIVE_INFINITY; - private double maxNegX = Double.NEGATIVE_INFINITY; - private double minPosX = Double.POSITIVE_INFINITY; - private double maxPosX = Double.NEGATIVE_INFINITY; - - public double getMinY() { - return minY; - } - - public double getMaxY() { - return maxY; - } - - public double getMinNegX() { - return minNegX; - } + protected double minY = Double.POSITIVE_INFINITY; + protected double maxY = Double.NEGATIVE_INFINITY; + protected double minNegX = Double.POSITIVE_INFINITY; + protected double maxNegX = Double.NEGATIVE_INFINITY; + protected double minPosX = Double.POSITIVE_INFINITY; + protected double maxPosX = Double.NEGATIVE_INFINITY; - public double getMaxNegX() { - return maxNegX; - } - - public double getMinPosX() { - return minPosX; - } + private final WrapLongitude wrapLongitude; - public double getMaxPosX() { - return maxPosX; - } - - private final boolean wrapLongitude; - - public GeoPointVisitor(boolean wrapLongitude) { + public GeoPointVisitor(WrapLongitude wrapLongitude) { this.wrapLongitude = wrapLongitude; } @@ -253,32 +234,35 @@ public Rectangle getResult() { return getResult(minNegX, minPosX, maxNegX, maxPosX, maxY, minY, wrapLongitude); } - private static Rectangle getResult( + protected static Rectangle getResult( double minNegX, double minPosX, double maxNegX, double maxPosX, double maxY, double minY, - boolean wrapLongitude + WrapLongitude wrapLongitude ) { assert Double.isFinite(maxY); if (Double.isInfinite(minPosX)) { return new Rectangle(minNegX, maxNegX, maxY, minY); } else if (Double.isInfinite(minNegX)) { return new Rectangle(minPosX, maxPosX, maxY, minY); - } else if (wrapLongitude) { - double unwrappedWidth = maxPosX - minNegX; - double wrappedWidth = (180 - minPosX) - (-180 - maxNegX); - if (unwrappedWidth <= wrappedWidth) { - return new Rectangle(minNegX, maxPosX, maxY, minY); - } else { - return new Rectangle(minPosX, maxNegX, maxY, minY); - } } else { - return new Rectangle(minNegX, maxPosX, maxY, minY); + return switch (wrapLongitude) { + case NO_WRAP -> new Rectangle(minNegX, maxPosX, maxY, minY); + case WRAP -> maybeWrap(minNegX, minPosX, maxNegX, maxPosX, maxY, minY); + }; } } + + private static Rectangle maybeWrap(double minNegX, double minPosX, double maxNegX, double maxPosX, double maxY, double minY) { + double unwrappedWidth = maxPosX - minNegX; + double wrappedWidth = 360 + maxNegX - minPosX; + return unwrappedWidth <= wrappedWidth + ? new Rectangle(minNegX, maxPosX, maxY, minY) + : new Rectangle(minPosX, maxNegX, maxY, minY); + } } private boolean isValid() { diff --git a/libs/geo/src/test/java/org/elasticsearch/geometry/utils/SpatialEnvelopeVisitorTests.java b/libs/geo/src/test/java/org/elasticsearch/geometry/utils/SpatialEnvelopeVisitorTests.java index fc35df295e566..893a1700616a6 100644 --- a/libs/geo/src/test/java/org/elasticsearch/geometry/utils/SpatialEnvelopeVisitorTests.java +++ b/libs/geo/src/test/java/org/elasticsearch/geometry/utils/SpatialEnvelopeVisitorTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.geo.ShapeTestUtils; import org.elasticsearch.geometry.Point; import org.elasticsearch.geometry.Rectangle; +import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor.WrapLongitude; import org.elasticsearch.test.ESTestCase; import static org.hamcrest.Matchers.equalTo; @@ -36,7 +37,7 @@ public void testVisitCartesianShape() { public void testVisitGeoShapeNoWrap() { for (int i = 0; i < 1000; i++) { var geometry = GeometryTestUtils.randomGeometryWithoutCircle(0, false); - var bbox = SpatialEnvelopeVisitor.visitGeo(geometry, false); + var bbox = SpatialEnvelopeVisitor.visitGeo(geometry, WrapLongitude.NO_WRAP); assertNotNull(bbox); assertTrue(i + ": " + geometry, bbox.isPresent()); var result = bbox.get(); @@ -48,7 +49,8 @@ public void testVisitGeoShapeNoWrap() { public void testVisitGeoShapeWrap() { for (int i = 0; i < 1000; i++) { var geometry = GeometryTestUtils.randomGeometryWithoutCircle(0, true); - var bbox = SpatialEnvelopeVisitor.visitGeo(geometry, false); + // TODO this should be WRAP instead + var bbox = SpatialEnvelopeVisitor.visitGeo(geometry, WrapLongitude.NO_WRAP); assertNotNull(bbox); assertTrue(i + ": " + geometry, bbox.isPresent()); var result = bbox.get(); @@ -81,7 +83,7 @@ public void testVisitCartesianPoints() { } public void testVisitGeoPointsNoWrapping() { - var visitor = new SpatialEnvelopeVisitor(new SpatialEnvelopeVisitor.GeoPointVisitor(false)); + var visitor = new SpatialEnvelopeVisitor(new SpatialEnvelopeVisitor.GeoPointVisitor(WrapLongitude.NO_WRAP)); double minY = Double.MAX_VALUE; double maxY = -Double.MAX_VALUE; double minX = Double.MAX_VALUE; @@ -103,7 +105,7 @@ public void testVisitGeoPointsNoWrapping() { } public void testVisitGeoPointsWrapping() { - var visitor = new SpatialEnvelopeVisitor(new SpatialEnvelopeVisitor.GeoPointVisitor(true)); + var visitor = new SpatialEnvelopeVisitor(new SpatialEnvelopeVisitor.GeoPointVisitor(WrapLongitude.WRAP)); double minY = Double.POSITIVE_INFINITY; double maxY = Double.NEGATIVE_INFINITY; double minNegX = Double.POSITIVE_INFINITY; @@ -145,7 +147,7 @@ public void testVisitGeoPointsWrapping() { } public void testWillCrossDateline() { - var visitor = new SpatialEnvelopeVisitor(new SpatialEnvelopeVisitor.GeoPointVisitor(true)); + var visitor = new SpatialEnvelopeVisitor(new SpatialEnvelopeVisitor.GeoPointVisitor(WrapLongitude.WRAP)); visitor.visit(new Point(-90.0, 0.0)); visitor.visit(new Point(90.0, 0.0)); assertCrossesDateline(visitor, false); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesAggregatorFunction.java new file mode 100644 index 0000000000000..21306036fbf50 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesAggregatorFunction.java @@ -0,0 +1,187 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.aggregation.AggregatorFunction; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link SpatialExtentCartesianPointDocValuesAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialExtentCartesianPointDocValuesAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("minX", ElementType.INT), + new IntermediateStateDesc("maxX", ElementType.INT), + new IntermediateStateDesc("maxY", ElementType.INT), + new IntermediateStateDesc("minY", ElementType.INT) ); + + private final DriverContext driverContext; + + private final SpatialExtentState state; + + private final List channels; + + public SpatialExtentCartesianPointDocValuesAggregatorFunction(DriverContext driverContext, + List channels, SpatialExtentState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static SpatialExtentCartesianPointDocValuesAggregatorFunction create( + DriverContext driverContext, List channels) { + return new SpatialExtentCartesianPointDocValuesAggregatorFunction(driverContext, channels, SpatialExtentCartesianPointDocValuesAggregator.initSingle()); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page, BooleanVector mask) { + if (mask.allFalse()) { + // Entire page masked away + return; + } + if (mask.allTrue()) { + // No masking + LongBlock block = page.getBlock(channels.get(0)); + LongVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept + LongBlock block = page.getBlock(channels.get(0)); + LongVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector, mask); + } else { + addRawBlock(block, mask); + } + } + + private void addRawVector(LongVector vector) { + for (int i = 0; i < vector.getPositionCount(); i++) { + SpatialExtentCartesianPointDocValuesAggregator.combine(state, vector.getLong(i)); + } + } + + private void addRawVector(LongVector vector, BooleanVector mask) { + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + SpatialExtentCartesianPointDocValuesAggregator.combine(state, vector.getLong(i)); + } + } + + private void addRawBlock(LongBlock block) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + SpatialExtentCartesianPointDocValuesAggregator.combine(state, block.getLong(i)); + } + } + } + + private void addRawBlock(LongBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + SpatialExtentCartesianPointDocValuesAggregator.combine(state, block.getLong(i)); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + Block minXUncast = page.getBlock(channels.get(0)); + if (minXUncast.areAllValuesNull()) { + return; + } + IntVector minX = ((IntBlock) minXUncast).asVector(); + assert minX.getPositionCount() == 1; + Block maxXUncast = page.getBlock(channels.get(1)); + if (maxXUncast.areAllValuesNull()) { + return; + } + IntVector maxX = ((IntBlock) maxXUncast).asVector(); + assert maxX.getPositionCount() == 1; + Block maxYUncast = page.getBlock(channels.get(2)); + if (maxYUncast.areAllValuesNull()) { + return; + } + IntVector maxY = ((IntBlock) maxYUncast).asVector(); + assert maxY.getPositionCount() == 1; + Block minYUncast = page.getBlock(channels.get(3)); + if (minYUncast.areAllValuesNull()) { + return; + } + IntVector minY = ((IntBlock) minYUncast).asVector(); + assert minY.getPositionCount() == 1; + SpatialExtentCartesianPointDocValuesAggregator.combineIntermediate(state, minX.getInt(0), maxX.getInt(0), maxY.getInt(0), minY.getInt(0)); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + state.toIntermediate(blocks, offset, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = SpatialExtentCartesianPointDocValuesAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..751ea3b4c4a9d --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesAggregatorFunctionSupplier.java @@ -0,0 +1,41 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link SpatialExtentCartesianPointDocValuesAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialExtentCartesianPointDocValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final List channels; + + public SpatialExtentCartesianPointDocValuesAggregatorFunctionSupplier(List channels) { + this.channels = channels; + } + + @Override + public SpatialExtentCartesianPointDocValuesAggregatorFunction aggregator( + DriverContext driverContext) { + return SpatialExtentCartesianPointDocValuesAggregatorFunction.create(driverContext, channels); + } + + @Override + public SpatialExtentCartesianPointDocValuesGroupingAggregatorFunction groupingAggregator( + DriverContext driverContext) { + return SpatialExtentCartesianPointDocValuesGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return "spatial_extent_cartesian_point_doc of valuess"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..a5191e57959b8 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesGroupingAggregatorFunction.java @@ -0,0 +1,230 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; +import org.elasticsearch.compute.aggregation.SeenGroupIds; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link SpatialExtentCartesianPointDocValuesAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialExtentCartesianPointDocValuesGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("minX", ElementType.INT), + new IntermediateStateDesc("maxX", ElementType.INT), + new IntermediateStateDesc("maxY", ElementType.INT), + new IntermediateStateDesc("minY", ElementType.INT) ); + + private final SpatialExtentGroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + public SpatialExtentCartesianPointDocValuesGroupingAggregatorFunction(List channels, + SpatialExtentGroupingState state, DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static SpatialExtentCartesianPointDocValuesGroupingAggregatorFunction create( + List channels, DriverContext driverContext) { + return new SpatialExtentCartesianPointDocValuesGroupingAggregatorFunction(channels, SpatialExtentCartesianPointDocValuesAggregator.initGrouping(), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + LongBlock valuesBlock = page.getBlock(channels.get(0)); + LongVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void close() { + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void close() { + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, LongBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + SpatialExtentCartesianPointDocValuesAggregator.combine(state, groupId, values.getLong(v)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, LongVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + SpatialExtentCartesianPointDocValuesAggregator.combine(state, groupId, values.getLong(groupPosition + positionOffset)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, LongBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + SpatialExtentCartesianPointDocValuesAggregator.combine(state, groupId, values.getLong(v)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, LongVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + SpatialExtentCartesianPointDocValuesAggregator.combine(state, groupId, values.getLong(groupPosition + positionOffset)); + } + } + } + + @Override + public void selectedMayContainUnseenGroups(SeenGroupIds seenGroupIds) { + state.enableGroupIdTracking(seenGroupIds); + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block minXUncast = page.getBlock(channels.get(0)); + if (minXUncast.areAllValuesNull()) { + return; + } + IntVector minX = ((IntBlock) minXUncast).asVector(); + Block maxXUncast = page.getBlock(channels.get(1)); + if (maxXUncast.areAllValuesNull()) { + return; + } + IntVector maxX = ((IntBlock) maxXUncast).asVector(); + Block maxYUncast = page.getBlock(channels.get(2)); + if (maxYUncast.areAllValuesNull()) { + return; + } + IntVector maxY = ((IntBlock) maxYUncast).asVector(); + Block minYUncast = page.getBlock(channels.get(3)); + if (minYUncast.areAllValuesNull()) { + return; + } + IntVector minY = ((IntBlock) minYUncast).asVector(); + assert minX.getPositionCount() == maxX.getPositionCount() && minX.getPositionCount() == maxY.getPositionCount() && minX.getPositionCount() == minY.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + SpatialExtentCartesianPointDocValuesAggregator.combineIntermediate(state, groupId, minX.getInt(groupPosition + positionOffset), maxX.getInt(groupPosition + positionOffset), maxY.getInt(groupPosition + positionOffset), minY.getInt(groupPosition + positionOffset)); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + SpatialExtentGroupingState inState = ((SpatialExtentCartesianPointDocValuesGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + SpatialExtentCartesianPointDocValuesAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = SpatialExtentCartesianPointDocValuesAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesAggregatorFunction.java new file mode 100644 index 0000000000000..6610168e1df21 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesAggregatorFunction.java @@ -0,0 +1,192 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.aggregation.AggregatorFunction; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link SpatialExtentCartesianPointSourceValuesAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialExtentCartesianPointSourceValuesAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("minX", ElementType.INT), + new IntermediateStateDesc("maxX", ElementType.INT), + new IntermediateStateDesc("maxY", ElementType.INT), + new IntermediateStateDesc("minY", ElementType.INT) ); + + private final DriverContext driverContext; + + private final SpatialExtentState state; + + private final List channels; + + public SpatialExtentCartesianPointSourceValuesAggregatorFunction(DriverContext driverContext, + List channels, SpatialExtentState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static SpatialExtentCartesianPointSourceValuesAggregatorFunction create( + DriverContext driverContext, List channels) { + return new SpatialExtentCartesianPointSourceValuesAggregatorFunction(driverContext, channels, SpatialExtentCartesianPointSourceValuesAggregator.initSingle()); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page, BooleanVector mask) { + if (mask.allFalse()) { + // Entire page masked away + return; + } + if (mask.allTrue()) { + // No masking + BytesRefBlock block = page.getBlock(channels.get(0)); + BytesRefVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept + BytesRefBlock block = page.getBlock(channels.get(0)); + BytesRefVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector, mask); + } else { + addRawBlock(block, mask); + } + } + + private void addRawVector(BytesRefVector vector) { + BytesRef scratch = new BytesRef(); + for (int i = 0; i < vector.getPositionCount(); i++) { + SpatialExtentCartesianPointSourceValuesAggregator.combine(state, vector.getBytesRef(i, scratch)); + } + } + + private void addRawVector(BytesRefVector vector, BooleanVector mask) { + BytesRef scratch = new BytesRef(); + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + SpatialExtentCartesianPointSourceValuesAggregator.combine(state, vector.getBytesRef(i, scratch)); + } + } + + private void addRawBlock(BytesRefBlock block) { + BytesRef scratch = new BytesRef(); + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + SpatialExtentCartesianPointSourceValuesAggregator.combine(state, block.getBytesRef(i, scratch)); + } + } + } + + private void addRawBlock(BytesRefBlock block, BooleanVector mask) { + BytesRef scratch = new BytesRef(); + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + SpatialExtentCartesianPointSourceValuesAggregator.combine(state, block.getBytesRef(i, scratch)); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + Block minXUncast = page.getBlock(channels.get(0)); + if (minXUncast.areAllValuesNull()) { + return; + } + IntVector minX = ((IntBlock) minXUncast).asVector(); + assert minX.getPositionCount() == 1; + Block maxXUncast = page.getBlock(channels.get(1)); + if (maxXUncast.areAllValuesNull()) { + return; + } + IntVector maxX = ((IntBlock) maxXUncast).asVector(); + assert maxX.getPositionCount() == 1; + Block maxYUncast = page.getBlock(channels.get(2)); + if (maxYUncast.areAllValuesNull()) { + return; + } + IntVector maxY = ((IntBlock) maxYUncast).asVector(); + assert maxY.getPositionCount() == 1; + Block minYUncast = page.getBlock(channels.get(3)); + if (minYUncast.areAllValuesNull()) { + return; + } + IntVector minY = ((IntBlock) minYUncast).asVector(); + assert minY.getPositionCount() == 1; + SpatialExtentCartesianPointSourceValuesAggregator.combineIntermediate(state, minX.getInt(0), maxX.getInt(0), maxY.getInt(0), minY.getInt(0)); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + state.toIntermediate(blocks, offset, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = SpatialExtentCartesianPointSourceValuesAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..7f4d1d69ae928 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesAggregatorFunctionSupplier.java @@ -0,0 +1,41 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link SpatialExtentCartesianPointSourceValuesAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialExtentCartesianPointSourceValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final List channels; + + public SpatialExtentCartesianPointSourceValuesAggregatorFunctionSupplier(List channels) { + this.channels = channels; + } + + @Override + public SpatialExtentCartesianPointSourceValuesAggregatorFunction aggregator( + DriverContext driverContext) { + return SpatialExtentCartesianPointSourceValuesAggregatorFunction.create(driverContext, channels); + } + + @Override + public SpatialExtentCartesianPointSourceValuesGroupingAggregatorFunction groupingAggregator( + DriverContext driverContext) { + return SpatialExtentCartesianPointSourceValuesGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return "spatial_extent_cartesian_point_source of valuess"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..4e06158952fc3 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesGroupingAggregatorFunction.java @@ -0,0 +1,235 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; +import org.elasticsearch.compute.aggregation.SeenGroupIds; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link SpatialExtentCartesianPointSourceValuesAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialExtentCartesianPointSourceValuesGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("minX", ElementType.INT), + new IntermediateStateDesc("maxX", ElementType.INT), + new IntermediateStateDesc("maxY", ElementType.INT), + new IntermediateStateDesc("minY", ElementType.INT) ); + + private final SpatialExtentGroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + public SpatialExtentCartesianPointSourceValuesGroupingAggregatorFunction(List channels, + SpatialExtentGroupingState state, DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static SpatialExtentCartesianPointSourceValuesGroupingAggregatorFunction create( + List channels, DriverContext driverContext) { + return new SpatialExtentCartesianPointSourceValuesGroupingAggregatorFunction(channels, SpatialExtentCartesianPointSourceValuesAggregator.initGrouping(), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + BytesRefBlock valuesBlock = page.getBlock(channels.get(0)); + BytesRefVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void close() { + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void close() { + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, BytesRefBlock values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + SpatialExtentCartesianPointSourceValuesAggregator.combine(state, groupId, values.getBytesRef(v, scratch)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, BytesRefVector values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + SpatialExtentCartesianPointSourceValuesAggregator.combine(state, groupId, values.getBytesRef(groupPosition + positionOffset, scratch)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, BytesRefBlock values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + SpatialExtentCartesianPointSourceValuesAggregator.combine(state, groupId, values.getBytesRef(v, scratch)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, BytesRefVector values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + SpatialExtentCartesianPointSourceValuesAggregator.combine(state, groupId, values.getBytesRef(groupPosition + positionOffset, scratch)); + } + } + } + + @Override + public void selectedMayContainUnseenGroups(SeenGroupIds seenGroupIds) { + state.enableGroupIdTracking(seenGroupIds); + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block minXUncast = page.getBlock(channels.get(0)); + if (minXUncast.areAllValuesNull()) { + return; + } + IntVector minX = ((IntBlock) minXUncast).asVector(); + Block maxXUncast = page.getBlock(channels.get(1)); + if (maxXUncast.areAllValuesNull()) { + return; + } + IntVector maxX = ((IntBlock) maxXUncast).asVector(); + Block maxYUncast = page.getBlock(channels.get(2)); + if (maxYUncast.areAllValuesNull()) { + return; + } + IntVector maxY = ((IntBlock) maxYUncast).asVector(); + Block minYUncast = page.getBlock(channels.get(3)); + if (minYUncast.areAllValuesNull()) { + return; + } + IntVector minY = ((IntBlock) minYUncast).asVector(); + assert minX.getPositionCount() == maxX.getPositionCount() && minX.getPositionCount() == maxY.getPositionCount() && minX.getPositionCount() == minY.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + SpatialExtentCartesianPointSourceValuesAggregator.combineIntermediate(state, groupId, minX.getInt(groupPosition + positionOffset), maxX.getInt(groupPosition + positionOffset), maxY.getInt(groupPosition + positionOffset), minY.getInt(groupPosition + positionOffset)); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + SpatialExtentGroupingState inState = ((SpatialExtentCartesianPointSourceValuesGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + SpatialExtentCartesianPointSourceValuesAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = SpatialExtentCartesianPointSourceValuesAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeAggregatorFunction.java new file mode 100644 index 0000000000000..19aa4f7ca78a2 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeAggregatorFunction.java @@ -0,0 +1,192 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.aggregation.AggregatorFunction; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link SpatialExtentCartesianShapeAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialExtentCartesianShapeAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("minX", ElementType.INT), + new IntermediateStateDesc("maxX", ElementType.INT), + new IntermediateStateDesc("maxY", ElementType.INT), + new IntermediateStateDesc("minY", ElementType.INT) ); + + private final DriverContext driverContext; + + private final SpatialExtentState state; + + private final List channels; + + public SpatialExtentCartesianShapeAggregatorFunction(DriverContext driverContext, + List channels, SpatialExtentState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static SpatialExtentCartesianShapeAggregatorFunction create(DriverContext driverContext, + List channels) { + return new SpatialExtentCartesianShapeAggregatorFunction(driverContext, channels, SpatialExtentCartesianShapeAggregator.initSingle()); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page, BooleanVector mask) { + if (mask.allFalse()) { + // Entire page masked away + return; + } + if (mask.allTrue()) { + // No masking + BytesRefBlock block = page.getBlock(channels.get(0)); + BytesRefVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept + BytesRefBlock block = page.getBlock(channels.get(0)); + BytesRefVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector, mask); + } else { + addRawBlock(block, mask); + } + } + + private void addRawVector(BytesRefVector vector) { + BytesRef scratch = new BytesRef(); + for (int i = 0; i < vector.getPositionCount(); i++) { + SpatialExtentCartesianShapeAggregator.combine(state, vector.getBytesRef(i, scratch)); + } + } + + private void addRawVector(BytesRefVector vector, BooleanVector mask) { + BytesRef scratch = new BytesRef(); + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + SpatialExtentCartesianShapeAggregator.combine(state, vector.getBytesRef(i, scratch)); + } + } + + private void addRawBlock(BytesRefBlock block) { + BytesRef scratch = new BytesRef(); + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + SpatialExtentCartesianShapeAggregator.combine(state, block.getBytesRef(i, scratch)); + } + } + } + + private void addRawBlock(BytesRefBlock block, BooleanVector mask) { + BytesRef scratch = new BytesRef(); + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + SpatialExtentCartesianShapeAggregator.combine(state, block.getBytesRef(i, scratch)); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + Block minXUncast = page.getBlock(channels.get(0)); + if (minXUncast.areAllValuesNull()) { + return; + } + IntVector minX = ((IntBlock) minXUncast).asVector(); + assert minX.getPositionCount() == 1; + Block maxXUncast = page.getBlock(channels.get(1)); + if (maxXUncast.areAllValuesNull()) { + return; + } + IntVector maxX = ((IntBlock) maxXUncast).asVector(); + assert maxX.getPositionCount() == 1; + Block maxYUncast = page.getBlock(channels.get(2)); + if (maxYUncast.areAllValuesNull()) { + return; + } + IntVector maxY = ((IntBlock) maxYUncast).asVector(); + assert maxY.getPositionCount() == 1; + Block minYUncast = page.getBlock(channels.get(3)); + if (minYUncast.areAllValuesNull()) { + return; + } + IntVector minY = ((IntBlock) minYUncast).asVector(); + assert minY.getPositionCount() == 1; + SpatialExtentCartesianShapeAggregator.combineIntermediate(state, minX.getInt(0), maxX.getInt(0), maxY.getInt(0), minY.getInt(0)); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + state.toIntermediate(blocks, offset, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = SpatialExtentCartesianShapeAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..9e4b292a0ea29 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeAggregatorFunctionSupplier.java @@ -0,0 +1,40 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link SpatialExtentCartesianShapeAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialExtentCartesianShapeAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final List channels; + + public SpatialExtentCartesianShapeAggregatorFunctionSupplier(List channels) { + this.channels = channels; + } + + @Override + public SpatialExtentCartesianShapeAggregatorFunction aggregator(DriverContext driverContext) { + return SpatialExtentCartesianShapeAggregatorFunction.create(driverContext, channels); + } + + @Override + public SpatialExtentCartesianShapeGroupingAggregatorFunction groupingAggregator( + DriverContext driverContext) { + return SpatialExtentCartesianShapeGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return "spatial_extent_cartesian of shapes"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..c55c3d9c66946 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeGroupingAggregatorFunction.java @@ -0,0 +1,235 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; +import org.elasticsearch.compute.aggregation.SeenGroupIds; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link SpatialExtentCartesianShapeAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialExtentCartesianShapeGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("minX", ElementType.INT), + new IntermediateStateDesc("maxX", ElementType.INT), + new IntermediateStateDesc("maxY", ElementType.INT), + new IntermediateStateDesc("minY", ElementType.INT) ); + + private final SpatialExtentGroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + public SpatialExtentCartesianShapeGroupingAggregatorFunction(List channels, + SpatialExtentGroupingState state, DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static SpatialExtentCartesianShapeGroupingAggregatorFunction create(List channels, + DriverContext driverContext) { + return new SpatialExtentCartesianShapeGroupingAggregatorFunction(channels, SpatialExtentCartesianShapeAggregator.initGrouping(), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + BytesRefBlock valuesBlock = page.getBlock(channels.get(0)); + BytesRefVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void close() { + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void close() { + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, BytesRefBlock values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + SpatialExtentCartesianShapeAggregator.combine(state, groupId, values.getBytesRef(v, scratch)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, BytesRefVector values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + SpatialExtentCartesianShapeAggregator.combine(state, groupId, values.getBytesRef(groupPosition + positionOffset, scratch)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, BytesRefBlock values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + SpatialExtentCartesianShapeAggregator.combine(state, groupId, values.getBytesRef(v, scratch)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, BytesRefVector values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + SpatialExtentCartesianShapeAggregator.combine(state, groupId, values.getBytesRef(groupPosition + positionOffset, scratch)); + } + } + } + + @Override + public void selectedMayContainUnseenGroups(SeenGroupIds seenGroupIds) { + state.enableGroupIdTracking(seenGroupIds); + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block minXUncast = page.getBlock(channels.get(0)); + if (minXUncast.areAllValuesNull()) { + return; + } + IntVector minX = ((IntBlock) minXUncast).asVector(); + Block maxXUncast = page.getBlock(channels.get(1)); + if (maxXUncast.areAllValuesNull()) { + return; + } + IntVector maxX = ((IntBlock) maxXUncast).asVector(); + Block maxYUncast = page.getBlock(channels.get(2)); + if (maxYUncast.areAllValuesNull()) { + return; + } + IntVector maxY = ((IntBlock) maxYUncast).asVector(); + Block minYUncast = page.getBlock(channels.get(3)); + if (minYUncast.areAllValuesNull()) { + return; + } + IntVector minY = ((IntBlock) minYUncast).asVector(); + assert minX.getPositionCount() == maxX.getPositionCount() && minX.getPositionCount() == maxY.getPositionCount() && minX.getPositionCount() == minY.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + SpatialExtentCartesianShapeAggregator.combineIntermediate(state, groupId, minX.getInt(groupPosition + positionOffset), maxX.getInt(groupPosition + positionOffset), maxY.getInt(groupPosition + positionOffset), minY.getInt(groupPosition + positionOffset)); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + SpatialExtentGroupingState inState = ((SpatialExtentCartesianShapeGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + SpatialExtentCartesianShapeAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = SpatialExtentCartesianShapeAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesAggregatorFunction.java new file mode 100644 index 0000000000000..c883e82d45989 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesAggregatorFunction.java @@ -0,0 +1,201 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.aggregation.AggregatorFunction; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link SpatialExtentGeoPointDocValuesAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialExtentGeoPointDocValuesAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("minNegX", ElementType.INT), + new IntermediateStateDesc("minPosX", ElementType.INT), + new IntermediateStateDesc("maxNegX", ElementType.INT), + new IntermediateStateDesc("maxPosX", ElementType.INT), + new IntermediateStateDesc("maxY", ElementType.INT), + new IntermediateStateDesc("minY", ElementType.INT) ); + + private final DriverContext driverContext; + + private final SpatialExtentStateWrappedLongitudeState state; + + private final List channels; + + public SpatialExtentGeoPointDocValuesAggregatorFunction(DriverContext driverContext, + List channels, SpatialExtentStateWrappedLongitudeState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static SpatialExtentGeoPointDocValuesAggregatorFunction create(DriverContext driverContext, + List channels) { + return new SpatialExtentGeoPointDocValuesAggregatorFunction(driverContext, channels, SpatialExtentGeoPointDocValuesAggregator.initSingle()); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page, BooleanVector mask) { + if (mask.allFalse()) { + // Entire page masked away + return; + } + if (mask.allTrue()) { + // No masking + LongBlock block = page.getBlock(channels.get(0)); + LongVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept + LongBlock block = page.getBlock(channels.get(0)); + LongVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector, mask); + } else { + addRawBlock(block, mask); + } + } + + private void addRawVector(LongVector vector) { + for (int i = 0; i < vector.getPositionCount(); i++) { + SpatialExtentGeoPointDocValuesAggregator.combine(state, vector.getLong(i)); + } + } + + private void addRawVector(LongVector vector, BooleanVector mask) { + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + SpatialExtentGeoPointDocValuesAggregator.combine(state, vector.getLong(i)); + } + } + + private void addRawBlock(LongBlock block) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + SpatialExtentGeoPointDocValuesAggregator.combine(state, block.getLong(i)); + } + } + } + + private void addRawBlock(LongBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + SpatialExtentGeoPointDocValuesAggregator.combine(state, block.getLong(i)); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + Block minNegXUncast = page.getBlock(channels.get(0)); + if (minNegXUncast.areAllValuesNull()) { + return; + } + IntVector minNegX = ((IntBlock) minNegXUncast).asVector(); + assert minNegX.getPositionCount() == 1; + Block minPosXUncast = page.getBlock(channels.get(1)); + if (minPosXUncast.areAllValuesNull()) { + return; + } + IntVector minPosX = ((IntBlock) minPosXUncast).asVector(); + assert minPosX.getPositionCount() == 1; + Block maxNegXUncast = page.getBlock(channels.get(2)); + if (maxNegXUncast.areAllValuesNull()) { + return; + } + IntVector maxNegX = ((IntBlock) maxNegXUncast).asVector(); + assert maxNegX.getPositionCount() == 1; + Block maxPosXUncast = page.getBlock(channels.get(3)); + if (maxPosXUncast.areAllValuesNull()) { + return; + } + IntVector maxPosX = ((IntBlock) maxPosXUncast).asVector(); + assert maxPosX.getPositionCount() == 1; + Block maxYUncast = page.getBlock(channels.get(4)); + if (maxYUncast.areAllValuesNull()) { + return; + } + IntVector maxY = ((IntBlock) maxYUncast).asVector(); + assert maxY.getPositionCount() == 1; + Block minYUncast = page.getBlock(channels.get(5)); + if (minYUncast.areAllValuesNull()) { + return; + } + IntVector minY = ((IntBlock) minYUncast).asVector(); + assert minY.getPositionCount() == 1; + SpatialExtentGeoPointDocValuesAggregator.combineIntermediate(state, minNegX.getInt(0), minPosX.getInt(0), maxNegX.getInt(0), maxPosX.getInt(0), maxY.getInt(0), minY.getInt(0)); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + state.toIntermediate(blocks, offset, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = SpatialExtentGeoPointDocValuesAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..f72a4cc648ec8 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesAggregatorFunctionSupplier.java @@ -0,0 +1,40 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link SpatialExtentGeoPointDocValuesAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialExtentGeoPointDocValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final List channels; + + public SpatialExtentGeoPointDocValuesAggregatorFunctionSupplier(List channels) { + this.channels = channels; + } + + @Override + public SpatialExtentGeoPointDocValuesAggregatorFunction aggregator(DriverContext driverContext) { + return SpatialExtentGeoPointDocValuesAggregatorFunction.create(driverContext, channels); + } + + @Override + public SpatialExtentGeoPointDocValuesGroupingAggregatorFunction groupingAggregator( + DriverContext driverContext) { + return SpatialExtentGeoPointDocValuesGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return "spatial_extent_geo_point_doc of valuess"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..eee5bc5df41a4 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesGroupingAggregatorFunction.java @@ -0,0 +1,242 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; +import org.elasticsearch.compute.aggregation.SeenGroupIds; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link SpatialExtentGeoPointDocValuesAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialExtentGeoPointDocValuesGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("minNegX", ElementType.INT), + new IntermediateStateDesc("minPosX", ElementType.INT), + new IntermediateStateDesc("maxNegX", ElementType.INT), + new IntermediateStateDesc("maxPosX", ElementType.INT), + new IntermediateStateDesc("maxY", ElementType.INT), + new IntermediateStateDesc("minY", ElementType.INT) ); + + private final SpatialExtentGroupingStateWrappedLongitudeState state; + + private final List channels; + + private final DriverContext driverContext; + + public SpatialExtentGeoPointDocValuesGroupingAggregatorFunction(List channels, + SpatialExtentGroupingStateWrappedLongitudeState state, DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static SpatialExtentGeoPointDocValuesGroupingAggregatorFunction create( + List channels, DriverContext driverContext) { + return new SpatialExtentGeoPointDocValuesGroupingAggregatorFunction(channels, SpatialExtentGeoPointDocValuesAggregator.initGrouping(), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + LongBlock valuesBlock = page.getBlock(channels.get(0)); + LongVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void close() { + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void close() { + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, LongBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + SpatialExtentGeoPointDocValuesAggregator.combine(state, groupId, values.getLong(v)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, LongVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + SpatialExtentGeoPointDocValuesAggregator.combine(state, groupId, values.getLong(groupPosition + positionOffset)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, LongBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + SpatialExtentGeoPointDocValuesAggregator.combine(state, groupId, values.getLong(v)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, LongVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + SpatialExtentGeoPointDocValuesAggregator.combine(state, groupId, values.getLong(groupPosition + positionOffset)); + } + } + } + + @Override + public void selectedMayContainUnseenGroups(SeenGroupIds seenGroupIds) { + state.enableGroupIdTracking(seenGroupIds); + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block minNegXUncast = page.getBlock(channels.get(0)); + if (minNegXUncast.areAllValuesNull()) { + return; + } + IntVector minNegX = ((IntBlock) minNegXUncast).asVector(); + Block minPosXUncast = page.getBlock(channels.get(1)); + if (minPosXUncast.areAllValuesNull()) { + return; + } + IntVector minPosX = ((IntBlock) minPosXUncast).asVector(); + Block maxNegXUncast = page.getBlock(channels.get(2)); + if (maxNegXUncast.areAllValuesNull()) { + return; + } + IntVector maxNegX = ((IntBlock) maxNegXUncast).asVector(); + Block maxPosXUncast = page.getBlock(channels.get(3)); + if (maxPosXUncast.areAllValuesNull()) { + return; + } + IntVector maxPosX = ((IntBlock) maxPosXUncast).asVector(); + Block maxYUncast = page.getBlock(channels.get(4)); + if (maxYUncast.areAllValuesNull()) { + return; + } + IntVector maxY = ((IntBlock) maxYUncast).asVector(); + Block minYUncast = page.getBlock(channels.get(5)); + if (minYUncast.areAllValuesNull()) { + return; + } + IntVector minY = ((IntBlock) minYUncast).asVector(); + assert minNegX.getPositionCount() == minPosX.getPositionCount() && minNegX.getPositionCount() == maxNegX.getPositionCount() && minNegX.getPositionCount() == maxPosX.getPositionCount() && minNegX.getPositionCount() == maxY.getPositionCount() && minNegX.getPositionCount() == minY.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + SpatialExtentGeoPointDocValuesAggregator.combineIntermediate(state, groupId, minNegX.getInt(groupPosition + positionOffset), minPosX.getInt(groupPosition + positionOffset), maxNegX.getInt(groupPosition + positionOffset), maxPosX.getInt(groupPosition + positionOffset), maxY.getInt(groupPosition + positionOffset), minY.getInt(groupPosition + positionOffset)); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + SpatialExtentGroupingStateWrappedLongitudeState inState = ((SpatialExtentGeoPointDocValuesGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + SpatialExtentGeoPointDocValuesAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = SpatialExtentGeoPointDocValuesAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesAggregatorFunction.java new file mode 100644 index 0000000000000..cf65fbdde594c --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesAggregatorFunction.java @@ -0,0 +1,206 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.aggregation.AggregatorFunction; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link SpatialExtentGeoPointSourceValuesAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialExtentGeoPointSourceValuesAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("minNegX", ElementType.INT), + new IntermediateStateDesc("minPosX", ElementType.INT), + new IntermediateStateDesc("maxNegX", ElementType.INT), + new IntermediateStateDesc("maxPosX", ElementType.INT), + new IntermediateStateDesc("maxY", ElementType.INT), + new IntermediateStateDesc("minY", ElementType.INT) ); + + private final DriverContext driverContext; + + private final SpatialExtentStateWrappedLongitudeState state; + + private final List channels; + + public SpatialExtentGeoPointSourceValuesAggregatorFunction(DriverContext driverContext, + List channels, SpatialExtentStateWrappedLongitudeState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static SpatialExtentGeoPointSourceValuesAggregatorFunction create( + DriverContext driverContext, List channels) { + return new SpatialExtentGeoPointSourceValuesAggregatorFunction(driverContext, channels, SpatialExtentGeoPointSourceValuesAggregator.initSingle()); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page, BooleanVector mask) { + if (mask.allFalse()) { + // Entire page masked away + return; + } + if (mask.allTrue()) { + // No masking + BytesRefBlock block = page.getBlock(channels.get(0)); + BytesRefVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept + BytesRefBlock block = page.getBlock(channels.get(0)); + BytesRefVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector, mask); + } else { + addRawBlock(block, mask); + } + } + + private void addRawVector(BytesRefVector vector) { + BytesRef scratch = new BytesRef(); + for (int i = 0; i < vector.getPositionCount(); i++) { + SpatialExtentGeoPointSourceValuesAggregator.combine(state, vector.getBytesRef(i, scratch)); + } + } + + private void addRawVector(BytesRefVector vector, BooleanVector mask) { + BytesRef scratch = new BytesRef(); + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + SpatialExtentGeoPointSourceValuesAggregator.combine(state, vector.getBytesRef(i, scratch)); + } + } + + private void addRawBlock(BytesRefBlock block) { + BytesRef scratch = new BytesRef(); + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + SpatialExtentGeoPointSourceValuesAggregator.combine(state, block.getBytesRef(i, scratch)); + } + } + } + + private void addRawBlock(BytesRefBlock block, BooleanVector mask) { + BytesRef scratch = new BytesRef(); + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + SpatialExtentGeoPointSourceValuesAggregator.combine(state, block.getBytesRef(i, scratch)); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + Block minNegXUncast = page.getBlock(channels.get(0)); + if (minNegXUncast.areAllValuesNull()) { + return; + } + IntVector minNegX = ((IntBlock) minNegXUncast).asVector(); + assert minNegX.getPositionCount() == 1; + Block minPosXUncast = page.getBlock(channels.get(1)); + if (minPosXUncast.areAllValuesNull()) { + return; + } + IntVector minPosX = ((IntBlock) minPosXUncast).asVector(); + assert minPosX.getPositionCount() == 1; + Block maxNegXUncast = page.getBlock(channels.get(2)); + if (maxNegXUncast.areAllValuesNull()) { + return; + } + IntVector maxNegX = ((IntBlock) maxNegXUncast).asVector(); + assert maxNegX.getPositionCount() == 1; + Block maxPosXUncast = page.getBlock(channels.get(3)); + if (maxPosXUncast.areAllValuesNull()) { + return; + } + IntVector maxPosX = ((IntBlock) maxPosXUncast).asVector(); + assert maxPosX.getPositionCount() == 1; + Block maxYUncast = page.getBlock(channels.get(4)); + if (maxYUncast.areAllValuesNull()) { + return; + } + IntVector maxY = ((IntBlock) maxYUncast).asVector(); + assert maxY.getPositionCount() == 1; + Block minYUncast = page.getBlock(channels.get(5)); + if (minYUncast.areAllValuesNull()) { + return; + } + IntVector minY = ((IntBlock) minYUncast).asVector(); + assert minY.getPositionCount() == 1; + SpatialExtentGeoPointSourceValuesAggregator.combineIntermediate(state, minNegX.getInt(0), minPosX.getInt(0), maxNegX.getInt(0), maxPosX.getInt(0), maxY.getInt(0), minY.getInt(0)); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + state.toIntermediate(blocks, offset, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = SpatialExtentGeoPointSourceValuesAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..1af20d72d08b0 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesAggregatorFunctionSupplier.java @@ -0,0 +1,41 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link SpatialExtentGeoPointSourceValuesAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialExtentGeoPointSourceValuesAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final List channels; + + public SpatialExtentGeoPointSourceValuesAggregatorFunctionSupplier(List channels) { + this.channels = channels; + } + + @Override + public SpatialExtentGeoPointSourceValuesAggregatorFunction aggregator( + DriverContext driverContext) { + return SpatialExtentGeoPointSourceValuesAggregatorFunction.create(driverContext, channels); + } + + @Override + public SpatialExtentGeoPointSourceValuesGroupingAggregatorFunction groupingAggregator( + DriverContext driverContext) { + return SpatialExtentGeoPointSourceValuesGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return "spatial_extent_geo_point_source of valuess"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..bf8ab2554c7b7 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesGroupingAggregatorFunction.java @@ -0,0 +1,247 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; +import org.elasticsearch.compute.aggregation.SeenGroupIds; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link SpatialExtentGeoPointSourceValuesAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialExtentGeoPointSourceValuesGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("minNegX", ElementType.INT), + new IntermediateStateDesc("minPosX", ElementType.INT), + new IntermediateStateDesc("maxNegX", ElementType.INT), + new IntermediateStateDesc("maxPosX", ElementType.INT), + new IntermediateStateDesc("maxY", ElementType.INT), + new IntermediateStateDesc("minY", ElementType.INT) ); + + private final SpatialExtentGroupingStateWrappedLongitudeState state; + + private final List channels; + + private final DriverContext driverContext; + + public SpatialExtentGeoPointSourceValuesGroupingAggregatorFunction(List channels, + SpatialExtentGroupingStateWrappedLongitudeState state, DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static SpatialExtentGeoPointSourceValuesGroupingAggregatorFunction create( + List channels, DriverContext driverContext) { + return new SpatialExtentGeoPointSourceValuesGroupingAggregatorFunction(channels, SpatialExtentGeoPointSourceValuesAggregator.initGrouping(), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + BytesRefBlock valuesBlock = page.getBlock(channels.get(0)); + BytesRefVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void close() { + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void close() { + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, BytesRefBlock values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + SpatialExtentGeoPointSourceValuesAggregator.combine(state, groupId, values.getBytesRef(v, scratch)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, BytesRefVector values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + SpatialExtentGeoPointSourceValuesAggregator.combine(state, groupId, values.getBytesRef(groupPosition + positionOffset, scratch)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, BytesRefBlock values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + SpatialExtentGeoPointSourceValuesAggregator.combine(state, groupId, values.getBytesRef(v, scratch)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, BytesRefVector values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + SpatialExtentGeoPointSourceValuesAggregator.combine(state, groupId, values.getBytesRef(groupPosition + positionOffset, scratch)); + } + } + } + + @Override + public void selectedMayContainUnseenGroups(SeenGroupIds seenGroupIds) { + state.enableGroupIdTracking(seenGroupIds); + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block minNegXUncast = page.getBlock(channels.get(0)); + if (minNegXUncast.areAllValuesNull()) { + return; + } + IntVector minNegX = ((IntBlock) minNegXUncast).asVector(); + Block minPosXUncast = page.getBlock(channels.get(1)); + if (minPosXUncast.areAllValuesNull()) { + return; + } + IntVector minPosX = ((IntBlock) minPosXUncast).asVector(); + Block maxNegXUncast = page.getBlock(channels.get(2)); + if (maxNegXUncast.areAllValuesNull()) { + return; + } + IntVector maxNegX = ((IntBlock) maxNegXUncast).asVector(); + Block maxPosXUncast = page.getBlock(channels.get(3)); + if (maxPosXUncast.areAllValuesNull()) { + return; + } + IntVector maxPosX = ((IntBlock) maxPosXUncast).asVector(); + Block maxYUncast = page.getBlock(channels.get(4)); + if (maxYUncast.areAllValuesNull()) { + return; + } + IntVector maxY = ((IntBlock) maxYUncast).asVector(); + Block minYUncast = page.getBlock(channels.get(5)); + if (minYUncast.areAllValuesNull()) { + return; + } + IntVector minY = ((IntBlock) minYUncast).asVector(); + assert minNegX.getPositionCount() == minPosX.getPositionCount() && minNegX.getPositionCount() == maxNegX.getPositionCount() && minNegX.getPositionCount() == maxPosX.getPositionCount() && minNegX.getPositionCount() == maxY.getPositionCount() && minNegX.getPositionCount() == minY.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + SpatialExtentGeoPointSourceValuesAggregator.combineIntermediate(state, groupId, minNegX.getInt(groupPosition + positionOffset), minPosX.getInt(groupPosition + positionOffset), maxNegX.getInt(groupPosition + positionOffset), maxPosX.getInt(groupPosition + positionOffset), maxY.getInt(groupPosition + positionOffset), minY.getInt(groupPosition + positionOffset)); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + SpatialExtentGroupingStateWrappedLongitudeState inState = ((SpatialExtentGeoPointSourceValuesGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + SpatialExtentGeoPointSourceValuesAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = SpatialExtentGeoPointSourceValuesAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeAggregatorFunction.java new file mode 100644 index 0000000000000..abee9a1cee284 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeAggregatorFunction.java @@ -0,0 +1,206 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.aggregation.AggregatorFunction; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link SpatialExtentGeoShapeAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialExtentGeoShapeAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("minNegX", ElementType.INT), + new IntermediateStateDesc("minPosX", ElementType.INT), + new IntermediateStateDesc("maxNegX", ElementType.INT), + new IntermediateStateDesc("maxPosX", ElementType.INT), + new IntermediateStateDesc("maxY", ElementType.INT), + new IntermediateStateDesc("minY", ElementType.INT) ); + + private final DriverContext driverContext; + + private final SpatialExtentStateWrappedLongitudeState state; + + private final List channels; + + public SpatialExtentGeoShapeAggregatorFunction(DriverContext driverContext, + List channels, SpatialExtentStateWrappedLongitudeState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static SpatialExtentGeoShapeAggregatorFunction create(DriverContext driverContext, + List channels) { + return new SpatialExtentGeoShapeAggregatorFunction(driverContext, channels, SpatialExtentGeoShapeAggregator.initSingle()); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page, BooleanVector mask) { + if (mask.allFalse()) { + // Entire page masked away + return; + } + if (mask.allTrue()) { + // No masking + BytesRefBlock block = page.getBlock(channels.get(0)); + BytesRefVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept + BytesRefBlock block = page.getBlock(channels.get(0)); + BytesRefVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector, mask); + } else { + addRawBlock(block, mask); + } + } + + private void addRawVector(BytesRefVector vector) { + BytesRef scratch = new BytesRef(); + for (int i = 0; i < vector.getPositionCount(); i++) { + SpatialExtentGeoShapeAggregator.combine(state, vector.getBytesRef(i, scratch)); + } + } + + private void addRawVector(BytesRefVector vector, BooleanVector mask) { + BytesRef scratch = new BytesRef(); + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + SpatialExtentGeoShapeAggregator.combine(state, vector.getBytesRef(i, scratch)); + } + } + + private void addRawBlock(BytesRefBlock block) { + BytesRef scratch = new BytesRef(); + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + SpatialExtentGeoShapeAggregator.combine(state, block.getBytesRef(i, scratch)); + } + } + } + + private void addRawBlock(BytesRefBlock block, BooleanVector mask) { + BytesRef scratch = new BytesRef(); + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + SpatialExtentGeoShapeAggregator.combine(state, block.getBytesRef(i, scratch)); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + Block minNegXUncast = page.getBlock(channels.get(0)); + if (minNegXUncast.areAllValuesNull()) { + return; + } + IntVector minNegX = ((IntBlock) minNegXUncast).asVector(); + assert minNegX.getPositionCount() == 1; + Block minPosXUncast = page.getBlock(channels.get(1)); + if (minPosXUncast.areAllValuesNull()) { + return; + } + IntVector minPosX = ((IntBlock) minPosXUncast).asVector(); + assert minPosX.getPositionCount() == 1; + Block maxNegXUncast = page.getBlock(channels.get(2)); + if (maxNegXUncast.areAllValuesNull()) { + return; + } + IntVector maxNegX = ((IntBlock) maxNegXUncast).asVector(); + assert maxNegX.getPositionCount() == 1; + Block maxPosXUncast = page.getBlock(channels.get(3)); + if (maxPosXUncast.areAllValuesNull()) { + return; + } + IntVector maxPosX = ((IntBlock) maxPosXUncast).asVector(); + assert maxPosX.getPositionCount() == 1; + Block maxYUncast = page.getBlock(channels.get(4)); + if (maxYUncast.areAllValuesNull()) { + return; + } + IntVector maxY = ((IntBlock) maxYUncast).asVector(); + assert maxY.getPositionCount() == 1; + Block minYUncast = page.getBlock(channels.get(5)); + if (minYUncast.areAllValuesNull()) { + return; + } + IntVector minY = ((IntBlock) minYUncast).asVector(); + assert minY.getPositionCount() == 1; + SpatialExtentGeoShapeAggregator.combineIntermediate(state, minNegX.getInt(0), minPosX.getInt(0), maxNegX.getInt(0), maxPosX.getInt(0), maxY.getInt(0), minY.getInt(0)); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + state.toIntermediate(blocks, offset, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = SpatialExtentGeoShapeAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..09f210c7085f8 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeAggregatorFunctionSupplier.java @@ -0,0 +1,40 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link SpatialExtentGeoShapeAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialExtentGeoShapeAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final List channels; + + public SpatialExtentGeoShapeAggregatorFunctionSupplier(List channels) { + this.channels = channels; + } + + @Override + public SpatialExtentGeoShapeAggregatorFunction aggregator(DriverContext driverContext) { + return SpatialExtentGeoShapeAggregatorFunction.create(driverContext, channels); + } + + @Override + public SpatialExtentGeoShapeGroupingAggregatorFunction groupingAggregator( + DriverContext driverContext) { + return SpatialExtentGeoShapeGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return "spatial_extent_geo of shapes"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..1200259ea6c41 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeGroupingAggregatorFunction.java @@ -0,0 +1,247 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation.spatial; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; +import org.elasticsearch.compute.aggregation.IntermediateStateDesc; +import org.elasticsearch.compute.aggregation.SeenGroupIds; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link SpatialExtentGeoShapeAggregator}. + * This class is generated. Do not edit it. + */ +public final class SpatialExtentGeoShapeGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("minNegX", ElementType.INT), + new IntermediateStateDesc("minPosX", ElementType.INT), + new IntermediateStateDesc("maxNegX", ElementType.INT), + new IntermediateStateDesc("maxPosX", ElementType.INT), + new IntermediateStateDesc("maxY", ElementType.INT), + new IntermediateStateDesc("minY", ElementType.INT) ); + + private final SpatialExtentGroupingStateWrappedLongitudeState state; + + private final List channels; + + private final DriverContext driverContext; + + public SpatialExtentGeoShapeGroupingAggregatorFunction(List channels, + SpatialExtentGroupingStateWrappedLongitudeState state, DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static SpatialExtentGeoShapeGroupingAggregatorFunction create(List channels, + DriverContext driverContext) { + return new SpatialExtentGeoShapeGroupingAggregatorFunction(channels, SpatialExtentGeoShapeAggregator.initGrouping(), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + BytesRefBlock valuesBlock = page.getBlock(channels.get(0)); + BytesRefVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void close() { + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void close() { + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, BytesRefBlock values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + SpatialExtentGeoShapeAggregator.combine(state, groupId, values.getBytesRef(v, scratch)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, BytesRefVector values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + SpatialExtentGeoShapeAggregator.combine(state, groupId, values.getBytesRef(groupPosition + positionOffset, scratch)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, BytesRefBlock values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + SpatialExtentGeoShapeAggregator.combine(state, groupId, values.getBytesRef(v, scratch)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, BytesRefVector values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + SpatialExtentGeoShapeAggregator.combine(state, groupId, values.getBytesRef(groupPosition + positionOffset, scratch)); + } + } + } + + @Override + public void selectedMayContainUnseenGroups(SeenGroupIds seenGroupIds) { + state.enableGroupIdTracking(seenGroupIds); + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block minNegXUncast = page.getBlock(channels.get(0)); + if (minNegXUncast.areAllValuesNull()) { + return; + } + IntVector minNegX = ((IntBlock) minNegXUncast).asVector(); + Block minPosXUncast = page.getBlock(channels.get(1)); + if (minPosXUncast.areAllValuesNull()) { + return; + } + IntVector minPosX = ((IntBlock) minPosXUncast).asVector(); + Block maxNegXUncast = page.getBlock(channels.get(2)); + if (maxNegXUncast.areAllValuesNull()) { + return; + } + IntVector maxNegX = ((IntBlock) maxNegXUncast).asVector(); + Block maxPosXUncast = page.getBlock(channels.get(3)); + if (maxPosXUncast.areAllValuesNull()) { + return; + } + IntVector maxPosX = ((IntBlock) maxPosXUncast).asVector(); + Block maxYUncast = page.getBlock(channels.get(4)); + if (maxYUncast.areAllValuesNull()) { + return; + } + IntVector maxY = ((IntBlock) maxYUncast).asVector(); + Block minYUncast = page.getBlock(channels.get(5)); + if (minYUncast.areAllValuesNull()) { + return; + } + IntVector minY = ((IntBlock) minYUncast).asVector(); + assert minNegX.getPositionCount() == minPosX.getPositionCount() && minNegX.getPositionCount() == maxNegX.getPositionCount() && minNegX.getPositionCount() == maxPosX.getPositionCount() && minNegX.getPositionCount() == maxY.getPositionCount() && minNegX.getPositionCount() == minY.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + SpatialExtentGeoShapeAggregator.combineIntermediate(state, groupId, minNegX.getInt(groupPosition + positionOffset), minPosX.getInt(groupPosition + positionOffset), maxNegX.getInt(groupPosition + positionOffset), maxPosX.getInt(groupPosition + positionOffset), maxY.getInt(groupPosition + positionOffset), minY.getInt(groupPosition + positionOffset)); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + SpatialExtentGroupingStateWrappedLongitudeState inState = ((SpatialExtentGeoShapeGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + SpatialExtentGeoShapeAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = SpatialExtentGeoShapeAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/AbstractArrayState.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/AbstractArrayState.java index 45a45f4337beb..5fa1394e8cf96 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/AbstractArrayState.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/AbstractArrayState.java @@ -28,7 +28,7 @@ public AbstractArrayState(BigArrays bigArrays) { this.bigArrays = bigArrays; } - boolean hasValue(int groupId) { + public boolean hasValue(int groupId) { return seen == null || seen.get(groupId); } @@ -37,7 +37,7 @@ boolean hasValue(int groupId) { * idempotent and fast if already tracking so it's safe to, say, call it once * for every block of values that arrives containing {@code null}. */ - final void enableGroupIdTracking(SeenGroupIds seenGroupIds) { + public final void enableGroupIdTracking(SeenGroupIds seenGroupIds) { if (seen == null) { seen = seenGroupIds.seenGroupIds(bigArrays); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/CentroidPointAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/CentroidPointAggregator.java index c66c960dd8a99..47d927fda91b5 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/CentroidPointAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/CentroidPointAggregator.java @@ -32,6 +32,13 @@ * This requires that the planner has planned that points are loaded from the index as doc-values. */ abstract class CentroidPointAggregator { + public static CentroidState initSingle() { + return new CentroidState(); + } + + public static GroupingCentroidState initGrouping(BigArrays bigArrays) { + return new GroupingCentroidState(bigArrays); + } public static void combine(CentroidState current, double xVal, double xDel, double yVal, double yDel, long count) { current.add(xVal, xDel, yVal, yDel, count); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/GeoPointEnvelopeVisitor.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/GeoPointEnvelopeVisitor.java new file mode 100644 index 0000000000000..6bdd028f3d6ee --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/GeoPointEnvelopeVisitor.java @@ -0,0 +1,63 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.spatial; + +import org.elasticsearch.geometry.Rectangle; +import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor; +import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor.WrapLongitude; + +class GeoPointEnvelopeVisitor extends SpatialEnvelopeVisitor.GeoPointVisitor { + GeoPointEnvelopeVisitor() { + super(WrapLongitude.WRAP); + } + + void reset() { + minY = Double.POSITIVE_INFINITY; + maxY = Double.NEGATIVE_INFINITY; + minNegX = Double.POSITIVE_INFINITY; + maxNegX = Double.NEGATIVE_INFINITY; + minPosX = Double.POSITIVE_INFINITY; + maxPosX = Double.NEGATIVE_INFINITY; + } + + double getMinNegX() { + return minNegX; + } + + double getMinPosX() { + return minPosX; + } + + double getMaxNegX() { + return maxNegX; + } + + double getMaxPosX() { + return maxPosX; + } + + double getMaxY() { + return maxY; + } + + double getMinY() { + return minY; + } + + static Rectangle asRectangle( + double minNegX, + double minPosX, + double maxNegX, + double maxPosX, + double maxY, + double minY, + WrapLongitude wrapLongitude + ) { + return SpatialEnvelopeVisitor.GeoPointVisitor.getResult(minNegX, minPosX, maxNegX, maxPosX, maxY, minY, wrapLongitude); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/PointType.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/PointType.java new file mode 100644 index 0000000000000..5395ca0b85163 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/PointType.java @@ -0,0 +1,107 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.spatial; + +import org.apache.lucene.geo.GeoEncodingUtils; +import org.apache.lucene.geo.XYEncodingUtils; +import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.geometry.Rectangle; +import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor; +import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor.WrapLongitude; + +import java.util.Optional; + +public enum PointType { + GEO { + @Override + public Optional computeEnvelope(Geometry geo) { + return SpatialEnvelopeVisitor.visitGeo(geo, WrapLongitude.WRAP); + } + + @Override + public double decodeX(int encoded) { + return GeoEncodingUtils.decodeLongitude(encoded); + } + + @Override + public double decodeY(int encoded) { + return GeoEncodingUtils.decodeLatitude(encoded); + } + + @Override + public int encodeX(double decoded) { + return GeoEncodingUtils.encodeLongitude(decoded); + } + + @Override + public int encodeY(double decoded) { + return GeoEncodingUtils.encodeLatitude(decoded); + } + + // Geo encodes the longitude in the lower 32 bits and the latitude in the upper 32 bits. + @Override + public int extractX(long encoded) { + return SpatialAggregationUtils.extractSecond(encoded); + } + + @Override + public int extractY(long encoded) { + return SpatialAggregationUtils.extractFirst(encoded); + } + }, + CARTESIAN { + @Override + public Optional computeEnvelope(Geometry geo) { + return SpatialEnvelopeVisitor.visitCartesian(geo); + } + + @Override + public double decodeX(int encoded) { + return XYEncodingUtils.decode(encoded); + } + + @Override + public double decodeY(int encoded) { + return XYEncodingUtils.decode(encoded); + } + + @Override + public int encodeX(double decoded) { + return XYEncodingUtils.encode((float) decoded); + } + + @Override + public int encodeY(double decoded) { + return XYEncodingUtils.encode((float) decoded); + } + + @Override + public int extractX(long encoded) { + return SpatialAggregationUtils.extractFirst(encoded); + } + + @Override + public int extractY(long encoded) { + return SpatialAggregationUtils.extractSecond(encoded); + } + }; + + public abstract Optional computeEnvelope(Geometry geo); + + public abstract double decodeX(int encoded); + + public abstract double decodeY(int encoded); + + public abstract int encodeX(double decoded); + + public abstract int encodeY(double decoded); + + public abstract int extractX(long encoded); + + public abstract int extractY(long encoded); +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialAggregationUtils.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialAggregationUtils.java new file mode 100644 index 0000000000000..6b29b20601dae --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialAggregationUtils.java @@ -0,0 +1,88 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.spatial; + +import org.apache.lucene.geo.GeoEncodingUtils; +import org.apache.lucene.geo.XYEncodingUtils; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.geometry.Point; +import org.elasticsearch.geometry.Rectangle; +import org.elasticsearch.geometry.utils.GeometryValidator; +import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor.WrapLongitude; +import org.elasticsearch.geometry.utils.WellKnownBinary; + +class SpatialAggregationUtils { + private SpatialAggregationUtils() { /* Utility class */ } + + public static Geometry decode(BytesRef wkb) { + return WellKnownBinary.fromWKB(GeometryValidator.NOOP, false /* coerce */, wkb.bytes, wkb.offset, wkb.length); + } + + public static Point decodePoint(BytesRef wkb) { + return (Point) decode(wkb); + } + + public static double decodeX(long encoded) { + return XYEncodingUtils.decode(extractFirst(encoded)); + } + + public static int extractFirst(long encoded) { + return (int) (encoded >>> 32); + } + + public static double decodeY(long encoded) { + return XYEncodingUtils.decode(extractSecond(encoded)); + } + + public static int extractSecond(long encoded) { + return (int) (encoded & 0xFFFFFFFFL); + } + + public static double decodeLongitude(long encoded) { + return GeoEncodingUtils.decodeLongitude((int) (encoded & 0xFFFFFFFFL)); + } + + public static double decodeLatitude(long encoded) { + return GeoEncodingUtils.decodeLatitude((int) (encoded >>> 32)); + } + + public static int encodeNegativeLongitude(double d) { + return Double.isFinite(d) ? GeoEncodingUtils.encodeLongitude(d) : DEFAULT_NEG; + } + + public static int encodePositiveLongitude(double d) { + return Double.isFinite(d) ? GeoEncodingUtils.encodeLongitude(d) : DEFAULT_POS; + } + + public static Rectangle asRectangle(int minNegX, int minPosX, int maxNegX, int maxPosX, int maxY, int minY) { + assert minNegX <= 0 == maxNegX <= 0; + assert minPosX >= 0 == maxPosX >= 0; + return GeoPointEnvelopeVisitor.asRectangle( + minNegX <= 0 ? decodeLongitude(minNegX) : Double.POSITIVE_INFINITY, + minPosX >= 0 ? decodeLongitude(minPosX) : Double.POSITIVE_INFINITY, + maxNegX <= 0 ? decodeLongitude(maxNegX) : Double.NEGATIVE_INFINITY, + maxPosX >= 0 ? decodeLongitude(maxPosX) : Double.NEGATIVE_INFINITY, + GeoEncodingUtils.decodeLatitude(maxY), + GeoEncodingUtils.decodeLatitude(minY), + WrapLongitude.WRAP + ); + } + + public static int maxNeg(int a, int b) { + return a <= 0 && b <= 0 ? Math.max(a, b) : Math.min(a, b); + } + + public static int minPos(int a, int b) { + return a >= 0 && b >= 0 ? Math.min(a, b) : Math.max(a, b); + } + + // The default values are intentionally non-negative/non-positive, so we can mark unassigned values. + public static final int DEFAULT_POS = -1; + public static final int DEFAULT_NEG = 1; +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesAggregator.java index 0bafb6f8112de..891c22b71c7e9 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesAggregator.java @@ -7,12 +7,13 @@ package org.elasticsearch.compute.aggregation.spatial; -import org.apache.lucene.geo.XYEncodingUtils; -import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.ann.Aggregator; import org.elasticsearch.compute.ann.GroupingAggregator; import org.elasticsearch.compute.ann.IntermediateState; +import static org.elasticsearch.compute.aggregation.spatial.SpatialAggregationUtils.decodeX; +import static org.elasticsearch.compute.aggregation.spatial.SpatialAggregationUtils.decodeY; + /** * This aggregator calculates the centroid of a set of cartesian points. * It is assumes that the cartesian points are encoded as longs. @@ -28,15 +29,6 @@ ) @GroupingAggregator class SpatialCentroidCartesianPointDocValuesAggregator extends CentroidPointAggregator { - - public static CentroidState initSingle() { - return new CentroidState(); - } - - public static GroupingCentroidState initGrouping(BigArrays bigArrays) { - return new GroupingCentroidState(bigArrays); - } - public static void combine(CentroidState current, long v) { current.add(decodeX(v), decodeY(v)); } @@ -44,12 +36,4 @@ public static void combine(CentroidState current, long v) { public static void combine(GroupingCentroidState current, int groupId, long encoded) { current.add(decodeX(encoded), 0d, decodeY(encoded), 0d, 1, groupId); } - - private static double decodeX(long encoded) { - return XYEncodingUtils.decode((int) (encoded >>> 32)); - } - - private static double decodeY(long encoded) { - return XYEncodingUtils.decode((int) (encoded & 0xFFFFFFFFL)); - } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesAggregator.java index 5673892be4bf0..700721e3ea9d4 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesAggregator.java @@ -8,13 +8,10 @@ package org.elasticsearch.compute.aggregation.spatial; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.ann.Aggregator; import org.elasticsearch.compute.ann.GroupingAggregator; import org.elasticsearch.compute.ann.IntermediateState; import org.elasticsearch.geometry.Point; -import org.elasticsearch.geometry.utils.GeometryValidator; -import org.elasticsearch.geometry.utils.WellKnownBinary; /** * This aggregator calculates the centroid of a set of cartesian points. @@ -33,26 +30,13 @@ ) @GroupingAggregator class SpatialCentroidCartesianPointSourceValuesAggregator extends CentroidPointAggregator { - - public static CentroidState initSingle() { - return new CentroidState(); - } - - public static GroupingCentroidState initGrouping(BigArrays bigArrays) { - return new GroupingCentroidState(bigArrays); - } - public static void combine(CentroidState current, BytesRef wkb) { - Point point = decode(wkb); + Point point = SpatialAggregationUtils.decodePoint(wkb); current.add(point.getX(), point.getY()); } public static void combine(GroupingCentroidState current, int groupId, BytesRef wkb) { - Point point = decode(wkb); + Point point = SpatialAggregationUtils.decodePoint(wkb); current.add(point.getX(), 0d, point.getY(), 0d, 1, groupId); } - - private static Point decode(BytesRef wkb) { - return (Point) WellKnownBinary.fromWKB(GeometryValidator.NOOP, false, wkb.bytes, wkb.offset, wkb.length); - } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesAggregator.java index ee5ab0e292547..431e25a03779e 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesAggregator.java @@ -7,12 +7,13 @@ package org.elasticsearch.compute.aggregation.spatial; -import org.apache.lucene.geo.GeoEncodingUtils; -import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.ann.Aggregator; import org.elasticsearch.compute.ann.GroupingAggregator; import org.elasticsearch.compute.ann.IntermediateState; +import static org.elasticsearch.compute.aggregation.spatial.SpatialAggregationUtils.decodeLatitude; +import static org.elasticsearch.compute.aggregation.spatial.SpatialAggregationUtils.decodeLongitude; + /** * This aggregator calculates the centroid of a set of geo points. It is assumes that the geo points are encoded as longs. * This requires that the planner has planned that points are loaded from the index as doc-values. @@ -27,28 +28,11 @@ ) @GroupingAggregator class SpatialCentroidGeoPointDocValuesAggregator extends CentroidPointAggregator { - - public static CentroidState initSingle() { - return new CentroidState(); - } - - public static GroupingCentroidState initGrouping(BigArrays bigArrays) { - return new GroupingCentroidState(bigArrays); - } - public static void combine(CentroidState current, long v) { - current.add(decodeX(v), decodeY(v)); + current.add(decodeLongitude(v), decodeLatitude(v)); } public static void combine(GroupingCentroidState current, int groupId, long encoded) { - current.add(decodeX(encoded), 0d, decodeY(encoded), 0d, 1, groupId); - } - - private static double decodeX(long encoded) { - return GeoEncodingUtils.decodeLongitude((int) (encoded & 0xFFFFFFFFL)); - } - - private static double decodeY(long encoded) { - return GeoEncodingUtils.decodeLatitude((int) (encoded >>> 32)); + current.add(decodeLongitude(encoded), 0d, decodeLatitude(encoded), 0d, 1, groupId); } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesAggregator.java index caf55dcc2f4e1..90563b33b8abb 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesAggregator.java @@ -8,13 +8,10 @@ package org.elasticsearch.compute.aggregation.spatial; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.ann.Aggregator; import org.elasticsearch.compute.ann.GroupingAggregator; import org.elasticsearch.compute.ann.IntermediateState; import org.elasticsearch.geometry.Point; -import org.elasticsearch.geometry.utils.GeometryValidator; -import org.elasticsearch.geometry.utils.WellKnownBinary; /** * This aggregator calculates the centroid of a set of geo points. @@ -33,26 +30,13 @@ ) @GroupingAggregator class SpatialCentroidGeoPointSourceValuesAggregator extends CentroidPointAggregator { - - public static CentroidState initSingle() { - return new CentroidState(); - } - - public static GroupingCentroidState initGrouping(BigArrays bigArrays) { - return new GroupingCentroidState(bigArrays); - } - public static void combine(CentroidState current, BytesRef wkb) { - Point point = decode(wkb); + Point point = SpatialAggregationUtils.decodePoint(wkb); current.add(point.getX(), point.getY()); } public static void combine(GroupingCentroidState current, int groupId, BytesRef wkb) { - Point point = decode(wkb); + Point point = SpatialAggregationUtils.decodePoint(wkb); current.add(point.getX(), 0d, point.getY(), 0d, 1, groupId); } - - private static Point decode(BytesRef wkb) { - return (Point) WellKnownBinary.fromWKB(GeometryValidator.NOOP, false, wkb.bytes, wkb.offset, wkb.length); - } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentAggregator.java new file mode 100644 index 0000000000000..91e0f098d795e --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentAggregator.java @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.spatial; + +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.operator.DriverContext; + +// A bit of abuse of notation here, since we're extending this class to "inherit" its static methods. +// Unfortunately, this is the way it has to be done, since the generated code invokes these methods statically. +abstract class SpatialExtentAggregator { + public static void combineIntermediate(SpatialExtentState current, int minX, int maxX, int maxY, int minY) { + current.add(minX, maxX, maxY, minY); + } + + public static void combineIntermediate(SpatialExtentGroupingState current, int groupId, int minX, int maxX, int maxY, int minY) { + current.add(groupId, minX, maxX, maxY, minY); + } + + public static Block evaluateFinal(SpatialExtentState state, DriverContext driverContext) { + return state.toBlock(driverContext); + } + + public static Block evaluateFinal(SpatialExtentGroupingState state, IntVector selected, DriverContext driverContext) { + return state.toBlock(selected, driverContext); + } + + public static void combineStates(SpatialExtentGroupingState current, int groupId, SpatialExtentGroupingState inState, int inPosition) { + current.add(groupId, inState, inPosition); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesAggregator.java new file mode 100644 index 0000000000000..f64949b77707c --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointDocValuesAggregator.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.spatial; + +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; + +/** + * Computes the extent of a set of cartesian points. It is assumed the points are encoded as longs. + * This requires that the planner has planned that points are loaded from the index as doc-values. + */ +@Aggregator( + { + @IntermediateState(name = "minX", type = "INT"), + @IntermediateState(name = "maxX", type = "INT"), + @IntermediateState(name = "maxY", type = "INT"), + @IntermediateState(name = "minY", type = "INT") } +) +@GroupingAggregator +class SpatialExtentCartesianPointDocValuesAggregator extends SpatialExtentAggregator { + public static SpatialExtentState initSingle() { + return new SpatialExtentState(PointType.CARTESIAN); + } + + public static SpatialExtentGroupingState initGrouping() { + return new SpatialExtentGroupingState(PointType.CARTESIAN); + } + + public static void combine(SpatialExtentState current, long v) { + current.add(v); + } + + public static void combine(SpatialExtentGroupingState current, int groupId, long v) { + current.add(groupId, v); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesAggregator.java new file mode 100644 index 0000000000000..3488af4525dcb --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianPointSourceValuesAggregator.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.spatial; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; + +/** + * Computes the extent of a set of cartesian points. It is assumed that the cartesian points are encoded as WKB BytesRef. + * This requires that the planner has NOT planned that points are loaded from the index as doc-values, but from source instead. + * This is also used for final aggregations and aggregations in the coordinator node, + * even if the local node partial aggregation is done with {@link SpatialExtentCartesianPointDocValuesAggregator}. + */ +@Aggregator( + { + @IntermediateState(name = "minX", type = "INT"), + @IntermediateState(name = "maxX", type = "INT"), + @IntermediateState(name = "maxY", type = "INT"), + @IntermediateState(name = "minY", type = "INT") } +) +@GroupingAggregator +class SpatialExtentCartesianPointSourceValuesAggregator extends SpatialExtentAggregator { + public static SpatialExtentState initSingle() { + return new SpatialExtentState(PointType.CARTESIAN); + } + + public static SpatialExtentGroupingState initGrouping() { + return new SpatialExtentGroupingState(PointType.CARTESIAN); + } + + public static void combine(SpatialExtentState current, BytesRef bytes) { + current.add(SpatialAggregationUtils.decode(bytes)); + } + + public static void combine(SpatialExtentGroupingState current, int groupId, BytesRef bytes) { + current.add(groupId, SpatialAggregationUtils.decode(bytes)); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeAggregator.java new file mode 100644 index 0000000000000..6d50d27aa5a2d --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentCartesianShapeAggregator.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.spatial; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; + +/** + * Computes the extent of a set of cartesian shapes. It is assumed that the cartesian shapes are encoded as WKB BytesRef. + * We do not currently support reading shape values or extents from doc values. + */ +@Aggregator( + { + @IntermediateState(name = "minX", type = "INT"), + @IntermediateState(name = "maxX", type = "INT"), + @IntermediateState(name = "maxY", type = "INT"), + @IntermediateState(name = "minY", type = "INT") } +) +@GroupingAggregator +class SpatialExtentCartesianShapeAggregator extends SpatialExtentAggregator { + public static SpatialExtentState initSingle() { + return new SpatialExtentState(PointType.CARTESIAN); + } + + public static SpatialExtentGroupingState initGrouping() { + return new SpatialExtentGroupingState(PointType.CARTESIAN); + } + + public static void combine(SpatialExtentState current, BytesRef bytes) { + current.add(SpatialAggregationUtils.decode(bytes)); + } + + public static void combine(SpatialExtentGroupingState current, int groupId, BytesRef bytes) { + current.add(groupId, SpatialAggregationUtils.decode(bytes)); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesAggregator.java new file mode 100644 index 0000000000000..b9b8bf65e116b --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesAggregator.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.spatial; + +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; + +/** + * Computes the extent of a set of geo points. It is assumed the points are encoded as longs. + * This requires that the planner has planned that points are loaded from the index as doc-values. + */ +@Aggregator( + { + @IntermediateState(name = "minNegX", type = "INT"), + @IntermediateState(name = "minPosX", type = "INT"), + @IntermediateState(name = "maxNegX", type = "INT"), + @IntermediateState(name = "maxPosX", type = "INT"), + @IntermediateState(name = "maxY", type = "INT"), + @IntermediateState(name = "minY", type = "INT") } +) +@GroupingAggregator +class SpatialExtentGeoPointDocValuesAggregator extends SpatialExtentLongitudeWrappingAggregator { + // TODO support non-longitude wrapped geo shapes. + public static SpatialExtentStateWrappedLongitudeState initSingle() { + return new SpatialExtentStateWrappedLongitudeState(); + } + + public static SpatialExtentGroupingStateWrappedLongitudeState initGrouping() { + return new SpatialExtentGroupingStateWrappedLongitudeState(); + } + + public static void combine(SpatialExtentStateWrappedLongitudeState current, long encoded) { + current.add(encoded); + } + + public static void combine(SpatialExtentGroupingStateWrappedLongitudeState current, int groupId, long encoded) { + current.add(groupId, encoded); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesAggregator.java new file mode 100644 index 0000000000000..36a4e359f23fc --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointSourceValuesAggregator.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.spatial; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; + +/** + * Computes the extent of a set of geo points. It is assumed that the geo points are encoded as WKB BytesRef. + * This requires that the planner has NOT planned that points are loaded from the index as doc-values, but from source instead. + * This is also used for final aggregations and aggregations in the coordinator node, + * even if the local node partial aggregation is done with {@link SpatialExtentGeoPointDocValuesAggregator}. + */ +@Aggregator( + { + @IntermediateState(name = "minNegX", type = "INT"), + @IntermediateState(name = "minPosX", type = "INT"), + @IntermediateState(name = "maxNegX", type = "INT"), + @IntermediateState(name = "maxPosX", type = "INT"), + @IntermediateState(name = "maxY", type = "INT"), + @IntermediateState(name = "minY", type = "INT") } +) +@GroupingAggregator +class SpatialExtentGeoPointSourceValuesAggregator extends SpatialExtentLongitudeWrappingAggregator { + // TODO support non-longitude wrapped geo shapes. + public static SpatialExtentStateWrappedLongitudeState initSingle() { + return new SpatialExtentStateWrappedLongitudeState(); + } + + public static SpatialExtentGroupingStateWrappedLongitudeState initGrouping() { + return new SpatialExtentGroupingStateWrappedLongitudeState(); + } + + public static void combine(SpatialExtentStateWrappedLongitudeState current, BytesRef bytes) { + current.add(SpatialAggregationUtils.decode(bytes)); + } + + public static void combine(SpatialExtentGroupingStateWrappedLongitudeState current, int groupId, BytesRef bytes) { + current.add(groupId, SpatialAggregationUtils.decode(bytes)); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeAggregator.java new file mode 100644 index 0000000000000..3d1b9b6300c9d --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoShapeAggregator.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.spatial; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; + +/** + * Computes the extent of a set of geo shapes. It is assumed that the geo shapes are encoded as WKB BytesRef. + * We do not currently support reading shape values or extents from doc values. + */ +@Aggregator( + { + @IntermediateState(name = "minNegX", type = "INT"), + @IntermediateState(name = "minPosX", type = "INT"), + @IntermediateState(name = "maxNegX", type = "INT"), + @IntermediateState(name = "maxPosX", type = "INT"), + @IntermediateState(name = "maxY", type = "INT"), + @IntermediateState(name = "minY", type = "INT") } +) +@GroupingAggregator +class SpatialExtentGeoShapeAggregator extends SpatialExtentLongitudeWrappingAggregator { + // TODO support non-longitude wrapped geo shapes. + public static SpatialExtentStateWrappedLongitudeState initSingle() { + return new SpatialExtentStateWrappedLongitudeState(); + } + + public static SpatialExtentGroupingStateWrappedLongitudeState initGrouping() { + return new SpatialExtentGroupingStateWrappedLongitudeState(); + } + + public static void combine(SpatialExtentStateWrappedLongitudeState current, BytesRef bytes) { + current.add(SpatialAggregationUtils.decode(bytes)); + } + + public static void combine(SpatialExtentGroupingStateWrappedLongitudeState current, int groupId, BytesRef bytes) { + current.add(groupId, SpatialAggregationUtils.decode(bytes)); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGroupingState.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGroupingState.java new file mode 100644 index 0000000000000..9ce0ccdda0ff5 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGroupingState.java @@ -0,0 +1,154 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.spatial; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.IntArray; +import org.elasticsearch.compute.aggregation.AbstractArrayState; +import org.elasticsearch.compute.aggregation.SeenGroupIds; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.geometry.Rectangle; +import org.elasticsearch.geometry.utils.WellKnownBinary; + +import java.nio.ByteOrder; + +final class SpatialExtentGroupingState extends AbstractArrayState { + private final PointType pointType; + private IntArray minXs; + private IntArray maxXs; + private IntArray maxYs; + private IntArray minYs; + + SpatialExtentGroupingState(PointType pointType) { + this(pointType, BigArrays.NON_RECYCLING_INSTANCE); + } + + SpatialExtentGroupingState(PointType pointType, BigArrays bigArrays) { + super(bigArrays); + this.pointType = pointType; + this.minXs = bigArrays.newIntArray(0, false); + this.maxXs = bigArrays.newIntArray(0, false); + this.maxYs = bigArrays.newIntArray(0, false); + this.minYs = bigArrays.newIntArray(0, false); + enableGroupIdTracking(new SeenGroupIds.Empty()); + } + + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + assert blocks.length >= offset; + try ( + var minXsBuilder = driverContext.blockFactory().newIntBlockBuilder(selected.getPositionCount()); + var maxXsBuilder = driverContext.blockFactory().newIntBlockBuilder(selected.getPositionCount()); + var maxYsBuilder = driverContext.blockFactory().newIntBlockBuilder(selected.getPositionCount()); + var minYsBuilder = driverContext.blockFactory().newIntBlockBuilder(selected.getPositionCount()); + ) { + for (int i = 0; i < selected.getPositionCount(); i++) { + int group = selected.getInt(i); + assert hasValue(group); + minXsBuilder.appendInt(minXs.get(group)); + maxXsBuilder.appendInt(maxXs.get(group)); + maxYsBuilder.appendInt(maxYs.get(group)); + minYsBuilder.appendInt(minYs.get(group)); + } + blocks[offset + 0] = minXsBuilder.build(); + blocks[offset + 1] = maxXsBuilder.build(); + blocks[offset + 2] = maxYsBuilder.build(); + blocks[offset + 3] = minYsBuilder.build(); + } + } + + public void add(int groupId, Geometry geometry) { + ensureCapacity(groupId); + pointType.computeEnvelope(geometry) + .ifPresent( + r -> add( + groupId, + pointType.encodeX(r.getMinX()), + pointType.encodeX(r.getMaxX()), + pointType.encodeY(r.getMaxY()), + pointType.encodeY(r.getMinY()) + ) + ); + } + + public void add(int groupId, long encoded) { + int x = pointType.extractX(encoded); + int y = pointType.extractY(encoded); + add(groupId, x, x, y, y); + } + + public void add(int groupId, int minX, int maxX, int maxY, int minY) { + ensureCapacity(groupId); + if (hasValue(groupId)) { + minXs.set(groupId, Math.min(minXs.get(groupId), minX)); + maxXs.set(groupId, Math.max(maxXs.get(groupId), maxX)); + maxYs.set(groupId, Math.max(maxYs.get(groupId), maxY)); + minYs.set(groupId, Math.min(minYs.get(groupId), minY)); + } else { + minXs.set(groupId, minX); + maxXs.set(groupId, maxX); + maxYs.set(groupId, maxY); + minYs.set(groupId, minY); + } + trackGroupId(groupId); + } + + private void ensureCapacity(int groupId) { + long requiredSize = groupId + 1; + if (minXs.size() < requiredSize) { + assert minXs.size() == maxXs.size() && minXs.size() == maxYs.size() && minXs.size() == minYs.size(); + minXs = bigArrays.grow(minXs, requiredSize); + maxXs = bigArrays.grow(maxXs, requiredSize); + maxYs = bigArrays.grow(maxYs, requiredSize); + minYs = bigArrays.grow(minYs, requiredSize); + } + } + + public Block toBlock(IntVector selected, DriverContext driverContext) { + try (var builder = driverContext.blockFactory().newBytesRefBlockBuilder(selected.getPositionCount())) { + for (int i = 0; i < selected.getPositionCount(); i++) { + int si = selected.getInt(i); + if (hasValue(si)) { + builder.appendBytesRef( + new BytesRef( + WellKnownBinary.toWKB( + new Rectangle( + pointType.decodeX(minXs.get(si)), + pointType.decodeX(maxXs.get(si)), + pointType.decodeY(maxYs.get(si)), + pointType.decodeY(minYs.get(si)) + ), + ByteOrder.LITTLE_ENDIAN + ) + ) + ); + } else { + builder.appendNull(); + } + } + return builder.build(); + } + } + + public void add(int groupId, SpatialExtentGroupingState inState, int inPosition) { + ensureCapacity(groupId); + if (inState.hasValue(inPosition)) { + add( + groupId, + inState.minXs.get(inPosition), + inState.maxXs.get(inPosition), + inState.maxYs.get(inPosition), + inState.minYs.get(inPosition) + ); + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGroupingStateWrappedLongitudeState.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGroupingStateWrappedLongitudeState.java new file mode 100644 index 0000000000000..3dd7a6d4acde2 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGroupingStateWrappedLongitudeState.java @@ -0,0 +1,182 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.spatial; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.IntArray; +import org.elasticsearch.compute.aggregation.AbstractArrayState; +import org.elasticsearch.compute.aggregation.GroupingAggregatorState; +import org.elasticsearch.compute.aggregation.SeenGroupIds; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor; +import org.elasticsearch.geometry.utils.WellKnownBinary; + +import java.nio.ByteOrder; + +final class SpatialExtentGroupingStateWrappedLongitudeState extends AbstractArrayState implements GroupingAggregatorState { + // Only geo points support longitude wrapping. + private static final PointType POINT_TYPE = PointType.GEO; + private IntArray minNegXs; + private IntArray minPosXs; + private IntArray maxNegXs; + private IntArray maxPosXs; + private IntArray maxYs; + private IntArray minYs; + + private GeoPointEnvelopeVisitor geoPointVisitor = new GeoPointEnvelopeVisitor(); + + SpatialExtentGroupingStateWrappedLongitudeState() { + this(BigArrays.NON_RECYCLING_INSTANCE); + } + + SpatialExtentGroupingStateWrappedLongitudeState(BigArrays bigArrays) { + super(bigArrays); + this.minNegXs = bigArrays.newIntArray(0, false); + this.minPosXs = bigArrays.newIntArray(0, false); + this.maxNegXs = bigArrays.newIntArray(0, false); + this.maxPosXs = bigArrays.newIntArray(0, false); + this.maxYs = bigArrays.newIntArray(0, false); + this.minYs = bigArrays.newIntArray(0, false); + enableGroupIdTracking(new SeenGroupIds.Empty()); + } + + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + assert blocks.length >= offset; + try ( + var minNegXsBuilder = driverContext.blockFactory().newIntBlockBuilder(selected.getPositionCount()); + var minPosXsBuilder = driverContext.blockFactory().newIntBlockBuilder(selected.getPositionCount()); + var maxNegXsBuilder = driverContext.blockFactory().newIntBlockBuilder(selected.getPositionCount()); + var maxPosXsBuilder = driverContext.blockFactory().newIntBlockBuilder(selected.getPositionCount()); + var maxYsBuilder = driverContext.blockFactory().newIntBlockBuilder(selected.getPositionCount()); + var minYsBuilder = driverContext.blockFactory().newIntBlockBuilder(selected.getPositionCount()); + ) { + for (int i = 0; i < selected.getPositionCount(); i++) { + int group = selected.getInt(i); + assert hasValue(group); + assert minNegXs.get(group) <= 0 == maxNegXs.get(group) <= 0; + assert minPosXs.get(group) >= 0 == maxPosXs.get(group) >= 0; + minNegXsBuilder.appendInt(minNegXs.get(group)); + minPosXsBuilder.appendInt(minPosXs.get(group)); + maxNegXsBuilder.appendInt(maxNegXs.get(group)); + maxPosXsBuilder.appendInt(maxPosXs.get(group)); + maxYsBuilder.appendInt(maxYs.get(group)); + minYsBuilder.appendInt(minYs.get(group)); + } + blocks[offset + 0] = minNegXsBuilder.build(); + blocks[offset + 1] = minPosXsBuilder.build(); + blocks[offset + 2] = maxNegXsBuilder.build(); + blocks[offset + 3] = maxPosXsBuilder.build(); + blocks[offset + 4] = maxYsBuilder.build(); + blocks[offset + 5] = minYsBuilder.build(); + } + } + + public void add(int groupId, Geometry geo) { + ensureCapacity(groupId); + geoPointVisitor.reset(); + if (geo.visit(new SpatialEnvelopeVisitor(geoPointVisitor))) { + add( + groupId, + SpatialAggregationUtils.encodeNegativeLongitude(geoPointVisitor.getMinNegX()), + SpatialAggregationUtils.encodePositiveLongitude(geoPointVisitor.getMinPosX()), + SpatialAggregationUtils.encodeNegativeLongitude(geoPointVisitor.getMaxNegX()), + SpatialAggregationUtils.encodePositiveLongitude(geoPointVisitor.getMaxPosX()), + POINT_TYPE.encodeY(geoPointVisitor.getMaxY()), + POINT_TYPE.encodeY(geoPointVisitor.getMinY()) + ); + } + } + + public void add(int groupId, SpatialExtentGroupingStateWrappedLongitudeState inState, int inPosition) { + ensureCapacity(groupId); + if (inState.hasValue(inPosition)) { + add( + groupId, + inState.minNegXs.get(inPosition), + inState.minPosXs.get(inPosition), + inState.maxNegXs.get(inPosition), + inState.maxPosXs.get(inPosition), + inState.maxYs.get(inPosition), + inState.minYs.get(inPosition) + ); + } + } + + public void add(int groupId, long encoded) { + int x = POINT_TYPE.extractX(encoded); + int y = POINT_TYPE.extractY(encoded); + add(groupId, x, x, x, x, y, y); + } + + public void add(int groupId, int minNegX, int minPosX, int maxNegX, int maxPosX, int maxY, int minY) { + ensureCapacity(groupId); + if (hasValue(groupId)) { + minNegXs.set(groupId, Math.min(minNegXs.get(groupId), minNegX)); + minPosXs.set(groupId, SpatialAggregationUtils.minPos(minPosXs.get(groupId), minPosX)); + maxNegXs.set(groupId, SpatialAggregationUtils.maxNeg(maxNegXs.get(groupId), maxNegX)); + maxPosXs.set(groupId, Math.max(maxPosXs.get(groupId), maxPosX)); + maxYs.set(groupId, Math.max(maxYs.get(groupId), maxY)); + minYs.set(groupId, Math.min(minYs.get(groupId), minY)); + } else { + minNegXs.set(groupId, minNegX); + minPosXs.set(groupId, minPosX); + maxNegXs.set(groupId, maxNegX); + maxPosXs.set(groupId, maxPosX); + maxYs.set(groupId, maxY); + minYs.set(groupId, minY); + } + assert minNegX <= 0 == maxNegX <= 0 : "minNegX=" + minNegX + " maxNegX=" + maxNegX; + assert minPosX >= 0 == maxPosX >= 0 : "minPosX=" + minPosX + " maxPosX=" + maxPosX; + trackGroupId(groupId); + } + + private void ensureCapacity(int groupId) { + long requiredSize = groupId + 1; + if (minNegXs.size() < requiredSize) { + minNegXs = bigArrays.grow(minNegXs, requiredSize); + minPosXs = bigArrays.grow(minPosXs, requiredSize); + maxNegXs = bigArrays.grow(maxNegXs, requiredSize); + maxPosXs = bigArrays.grow(maxPosXs, requiredSize); + minYs = bigArrays.grow(minYs, requiredSize); + maxYs = bigArrays.grow(maxYs, requiredSize); + } + } + + public Block toBlock(IntVector selected, DriverContext driverContext) { + try (var builder = driverContext.blockFactory().newBytesRefBlockBuilder(selected.getPositionCount())) { + for (int i = 0; i < selected.getPositionCount(); i++) { + int si = selected.getInt(i); + if (hasValue(si)) { + builder.appendBytesRef( + new BytesRef( + WellKnownBinary.toWKB( + SpatialAggregationUtils.asRectangle( + minNegXs.get(si), + minPosXs.get(si), + maxNegXs.get(si), + maxPosXs.get(si), + maxYs.get(si), + minYs.get(si) + ), + ByteOrder.LITTLE_ENDIAN + ) + ) + ); + } else { + builder.appendNull(); + } + } + return builder.build(); + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentLongitudeWrappingAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentLongitudeWrappingAggregator.java new file mode 100644 index 0000000000000..80ba2d5e45658 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentLongitudeWrappingAggregator.java @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.spatial; + +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.operator.DriverContext; + +// A bit of abuse of notation here, since we're extending this class to "inherit" its static methods. +// Unfortunately, this is the way it has to be done, since the generated code invokes these methods statically. +abstract class SpatialExtentLongitudeWrappingAggregator { + public static void combineIntermediate( + SpatialExtentStateWrappedLongitudeState current, + int minNegX, + int minPosX, + int maxNegX, + int maxPosX, + int maxY, + int minY + ) { + current.add(minNegX, minPosX, maxNegX, maxPosX, maxY, minY); + } + + public static void combineIntermediate( + SpatialExtentGroupingStateWrappedLongitudeState current, + int groupId, + int minNegX, + int minPosX, + int maxNegX, + int maxPosX, + int maxY, + int minY + ) { + current.add(groupId, minNegX, minPosX, maxNegX, maxPosX, maxY, minY); + } + + public static Block evaluateFinal(SpatialExtentStateWrappedLongitudeState state, DriverContext driverContext) { + return state.toBlock(driverContext); + } + + public static Block evaluateFinal( + SpatialExtentGroupingStateWrappedLongitudeState state, + IntVector selected, + DriverContext driverContext + ) { + return state.toBlock(selected, driverContext); + } + + public static void combineStates( + SpatialExtentGroupingStateWrappedLongitudeState current, + int groupId, + SpatialExtentGroupingStateWrappedLongitudeState inState, + int inPosition + ) { + current.add(groupId, inState, inPosition); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentState.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentState.java new file mode 100644 index 0000000000000..0eea9b79f73ea --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentState.java @@ -0,0 +1,82 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.spatial; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.aggregation.AggregatorState; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.geometry.Rectangle; +import org.elasticsearch.geometry.utils.WellKnownBinary; + +import java.nio.ByteOrder; + +final class SpatialExtentState implements AggregatorState { + private final PointType pointType; + private boolean seen = false; + private int minX = Integer.MAX_VALUE; + private int maxX = Integer.MIN_VALUE; + private int maxY = Integer.MIN_VALUE; + private int minY = Integer.MAX_VALUE; + + SpatialExtentState(PointType pointType) { + this.pointType = pointType; + } + + @Override + public void close() {} + + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + assert blocks.length >= offset + 4; + var blockFactory = driverContext.blockFactory(); + blocks[offset + 0] = blockFactory.newConstantIntBlockWith(minX, 1); + blocks[offset + 1] = blockFactory.newConstantIntBlockWith(maxX, 1); + blocks[offset + 2] = blockFactory.newConstantIntBlockWith(maxY, 1); + blocks[offset + 3] = blockFactory.newConstantIntBlockWith(minY, 1); + } + + public void add(Geometry geo) { + pointType.computeEnvelope(geo) + .ifPresent( + r -> add( + pointType.encodeX(r.getMinX()), + pointType.encodeX(r.getMaxX()), + pointType.encodeY(r.getMaxY()), + pointType.encodeY(r.getMinY()) + ) + ); + } + + public void add(int minX, int maxX, int maxY, int minY) { + seen = true; + this.minX = Math.min(this.minX, minX); + this.maxX = Math.max(this.maxX, maxX); + this.maxY = Math.max(this.maxY, maxY); + this.minY = Math.min(this.minY, minY); + } + + public void add(long encoded) { + int x = pointType.extractX(encoded); + int y = pointType.extractY(encoded); + add(x, x, y, y); + } + + public Block toBlock(DriverContext driverContext) { + var factory = driverContext.blockFactory(); + return seen ? factory.newConstantBytesRefBlockWith(new BytesRef(toWKB()), 1) : factory.newConstantNullBlock(1); + } + + private byte[] toWKB() { + return WellKnownBinary.toWKB( + new Rectangle(pointType.decodeX(minX), pointType.decodeX(maxX), pointType.decodeY(maxY), pointType.decodeY(minY)), + ByteOrder.LITTLE_ENDIAN + ); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentStateWrappedLongitudeState.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentStateWrappedLongitudeState.java new file mode 100644 index 0000000000000..99200d2ed99f5 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/spatial/SpatialExtentStateWrappedLongitudeState.java @@ -0,0 +1,91 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.spatial; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.aggregation.AggregatorState; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor; +import org.elasticsearch.geometry.utils.WellKnownBinary; + +import java.nio.ByteOrder; + +final class SpatialExtentStateWrappedLongitudeState implements AggregatorState { + // Only geo points support longitude wrapping. + private static final PointType POINT_TYPE = PointType.GEO; + private boolean seen = false; + private int minNegX = SpatialAggregationUtils.DEFAULT_NEG; + private int minPosX = SpatialAggregationUtils.DEFAULT_POS; + private int maxNegX = SpatialAggregationUtils.DEFAULT_NEG; + private int maxPosX = SpatialAggregationUtils.DEFAULT_POS; + private int maxY = Integer.MIN_VALUE; + private int minY = Integer.MAX_VALUE; + + private GeoPointEnvelopeVisitor geoPointVisitor = new GeoPointEnvelopeVisitor(); + + @Override + public void close() {} + + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + assert blocks.length >= offset + 6; + var blockFactory = driverContext.blockFactory(); + blocks[offset + 0] = blockFactory.newConstantIntBlockWith(minNegX, 1); + blocks[offset + 1] = blockFactory.newConstantIntBlockWith(minPosX, 1); + blocks[offset + 2] = blockFactory.newConstantIntBlockWith(maxNegX, 1); + blocks[offset + 3] = blockFactory.newConstantIntBlockWith(maxPosX, 1); + blocks[offset + 4] = blockFactory.newConstantIntBlockWith(maxY, 1); + blocks[offset + 5] = blockFactory.newConstantIntBlockWith(minY, 1); + } + + public void add(Geometry geo) { + geoPointVisitor.reset(); + if (geo.visit(new SpatialEnvelopeVisitor(geoPointVisitor))) { + add( + SpatialAggregationUtils.encodeNegativeLongitude(geoPointVisitor.getMinNegX()), + SpatialAggregationUtils.encodePositiveLongitude(geoPointVisitor.getMinPosX()), + SpatialAggregationUtils.encodeNegativeLongitude(geoPointVisitor.getMaxNegX()), + SpatialAggregationUtils.encodePositiveLongitude(geoPointVisitor.getMaxPosX()), + POINT_TYPE.encodeY(geoPointVisitor.getMaxY()), + POINT_TYPE.encodeY(geoPointVisitor.getMinY()) + ); + } + } + + public void add(int minNegX, int minPosX, int maxNegX, int maxPosX, int maxY, int minY) { + seen = true; + this.minNegX = Math.min(this.minNegX, minNegX); + this.minPosX = SpatialAggregationUtils.minPos(this.minPosX, minPosX); + this.maxNegX = SpatialAggregationUtils.maxNeg(this.maxNegX, maxNegX); + this.maxPosX = Math.max(this.maxPosX, maxPosX); + this.maxY = Math.max(this.maxY, maxY); + this.minY = Math.min(this.minY, minY); + assert this.minNegX <= 0 == this.maxNegX <= 0 : "minNegX=" + this.minNegX + " maxNegX=" + this.maxNegX; + assert this.minPosX >= 0 == this.maxPosX >= 0 : "minPosX=" + this.minPosX + " maxPosX=" + this.maxPosX; + } + + public void add(long encoded) { + int x = POINT_TYPE.extractX(encoded); + int y = POINT_TYPE.extractY(encoded); + add(x, x, x, x, y, y); + } + + public Block toBlock(DriverContext driverContext) { + var factory = driverContext.blockFactory(); + return seen ? factory.newConstantBytesRefBlockWith(new BytesRef(toWKB()), 1) : factory.newConstantNullBlock(1); + } + + private byte[] toWKB() { + return WellKnownBinary.toWKB( + SpatialAggregationUtils.asRectangle(minNegX, minPosX, maxNegX, maxPosX, maxY, minY), + ByteOrder.LITTLE_ENDIAN + ); + } +} diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-airports_no_doc_values.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-airports_no_doc_values.json index d7097f89a17df..782fd40712f43 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-airports_no_doc_values.json +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-airports_no_doc_values.json @@ -24,7 +24,9 @@ "type": "keyword" }, "city_location": { - "type": "geo_point" + "type": "geo_point", + "index": true, + "doc_values": false } } } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec index ac9948c90f5e9..8694c973448e9 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec @@ -519,6 +519,63 @@ centroid:geo_point | count:long POINT (42.97109629958868 14.7552534006536) | 1 ; +############################################### +# Tests for ST_EXTENT_AGG on GEO_POINT type + +stExtentSingleGeoPoint +required_capability: st_extent_agg +ROW point = TO_GEOPOINT("POINT(42.97109629958868 14.7552534006536)") +| STATS extent = ST_EXTENT_AGG(point) +; + +extent:geo_shape +BBOX(42.97109629958868, 42.97109629958868, 14.7552534006536, 14.7552534006536) +; + +stExtentMultipleGeoPoints +required_capability: st_extent_agg +// tag::st_extent_agg-airports[] +FROM airports +| WHERE country == "India" +| STATS extent = ST_EXTENT_AGG(location) +// end::st_extent_agg-airports[] +; + +// tag::st_extent_agg-airports-result[] +extent:geo_shape +BBOX (70.77995480038226, 91.5882289968431, 33.9830909203738, 8.47650992218405) +// end::st_extent_agg-airports-result[] +; + +stExtentMultipleGeoPointsNoDocValues +required_capability: st_extent_agg +FROM airports_no_doc_values | WHERE country == "India" | STATS extent = ST_EXTENT_AGG(location) +; + +extent:geo_shape +BBOX (70.77995480038226, 91.5882289968431, 33.9830909203738, 8.47650992218405) +; + +stExtentMultipleGeoPointGrouping +required_capability: st_extent_agg +FROM airports | STATS extent = ST_EXTENT_AGG(location) BY country | SORT country | LIMIT 3 +; + +extent:geo_shape | country:keyword +BBOX (69.2100736219436, 69.2100736219436, 34.56339786294848, 34.56339786294848) | Afghanistan +BBOX (19.715032372623682, 19.715032372623682, 41.4208514476195, 41.4208514476195) | Albania +BBOX (-0.6067969836294651, 6.621946580708027, 36.69972063973546, 35.62027471605688) | Algeria +; + +stExtentGeoShapes +required_capability: st_extent_agg +FROM airport_city_boundaries | WHERE region == "City of New York" | STATS extent = ST_EXTENT_AGG(city_boundary) +; + +extent:geo_shape +BBOX (-74.25880000926554, -73.70020005851984, 40.91759996954352, 40.47659996431321) +; + ############################################### # Tests for ST_INTERSECTS on GEO_POINT type @@ -1698,6 +1755,48 @@ centroid:cartesian_point | count:long POINT (726480.0130685265 3359566.331716279) | 849 ; +############################################### +# Tests for ST_EXTENT_AGG on CARTESIAN_POINT type + +stExtentSingleCartesianPoint +required_capability: st_extent_agg +ROW point = TO_CARTESIANPOINT("POINT(429.7109629958868 147.552534006536)") +| STATS extent = ST_EXTENT_AGG(point) +; + +extent:cartesian_shape +BBOX (429.7109680175781, 429.7109680175781, 147.5525360107422, 147.5525360107422) +; + +stExtentMultipleCartesianPoints +required_capability: st_extent_agg +FROM airports_web | WHERE scalerank == 9 | STATS extent = ST_EXTENT_AGG(location) +; + +extent:cartesian_shape +BBOX (4783520.5, 1.6168486E7, 8704352.0, -584415.9375) +; + +stExtentMultipleCartesianPointGrouping +required_capability: st_extent_agg +FROM airports_web | STATS extent = ST_EXTENT_AGG(location) BY scalerank | SORT scalerank DESC | LIMIT 3 +; + +extent:cartesian_shape | scalerank:integer +BBOX (4783520.5, 1.6168486E7, 8704352.0, -584415.9375) | 9 +BBOX (-1.936604E7, 1.8695374E7, 1.4502138E7, -3943067.25) | 8 +BBOX (-1.891609E7, 1.9947946E7, 8455470.0, -7128878.5) | 7 +; + +stExtentCartesianShapes +required_capability: st_extent_agg +FROM cartesian_multipolygons | STATS extent = ST_EXTENT_AGG(shape) +; + +extent:cartesian_shape +BBOX (0.0, 3.0, 3.0, 0.0) +; + ############################################### # Tests for ST_INTERSECTS on CARTESIAN_POINT type diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index ddabb3e937dc2..4cf3162fcca3b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -189,6 +189,9 @@ public enum Cap { */ ST_DISTANCE, + /** Support for function {@code ST_EXTENT}. */ + ST_EXTENT_AGG, + /** * Fix determination of CRS types in spatial functions when folding. */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index 3749b46879354..50d0d2438d8a1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -28,6 +28,7 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.Percentile; import org.elasticsearch.xpack.esql.expression.function.aggregate.Rate; import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialCentroid; +import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialExtent; import org.elasticsearch.xpack.esql.expression.function.aggregate.StdDev; import org.elasticsearch.xpack.esql.expression.function.aggregate.Sum; import org.elasticsearch.xpack.esql.expression.function.aggregate.Top; @@ -353,6 +354,7 @@ private static FunctionDefinition[][] functions() { new FunctionDefinition[] { def(SpatialCentroid.class, SpatialCentroid::new, "st_centroid_agg"), def(SpatialContains.class, SpatialContains::new, "st_contains"), + def(SpatialExtent.class, SpatialExtent::new, "st_extent_agg"), def(SpatialDisjoint.class, SpatialDisjoint::new, "st_disjoint"), def(SpatialIntersects.class, SpatialIntersects::new, "st_intersects"), def(SpatialWithin.class, SpatialWithin::new, "st_within"), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateWritables.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateWritables.java index d74b5c8b386b8..db1d2a9e6f254 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateWritables.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateWritables.java @@ -25,6 +25,7 @@ public static List getNamedWriteables() { Percentile.ENTRY, Rate.ENTRY, SpatialCentroid.ENTRY, + SpatialExtent.ENTRY, StdDev.ENTRY, Sum.ENTRY, Top.ENTRY, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialAggregateFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialAggregateFunction.java index 87eec540932b1..35f99e4b648df 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialAggregateFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialAggregateFunction.java @@ -8,6 +8,9 @@ package org.elasticsearch.xpack.esql.expression.function.aggregate; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.index.mapper.MappedFieldType.FieldExtractPreference; +import org.elasticsearch.license.License; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -22,26 +25,34 @@ * select the best one. */ public abstract class SpatialAggregateFunction extends AggregateFunction { - protected final boolean useDocValues; + protected final FieldExtractPreference fieldExtractPreference; - protected SpatialAggregateFunction(Source source, Expression field, Expression filter, boolean useDocValues) { + protected SpatialAggregateFunction(Source source, Expression field, Expression filter, FieldExtractPreference fieldExtractPreference) { super(source, field, filter, emptyList()); - this.useDocValues = useDocValues; + this.fieldExtractPreference = fieldExtractPreference; } - protected SpatialAggregateFunction(StreamInput in, boolean useDocValues) throws IOException { + protected SpatialAggregateFunction(StreamInput in, FieldExtractPreference fieldExtractPreference) throws IOException { super(in); - // The useDocValues field is only used on data nodes local planning, and therefor never serialized - this.useDocValues = useDocValues; + // The fieldExtractPreference field is only used on data nodes local planning, and therefore never serialized + this.fieldExtractPreference = fieldExtractPreference; } public abstract SpatialAggregateFunction withDocValues(); + @Override + public boolean checkLicense(XPackLicenseState state) { + return switch (field().dataType()) { + case GEO_SHAPE, CARTESIAN_SHAPE -> state.isAllowedByLicense(License.OperationMode.PLATINUM); + default -> true; + }; + } + @Override public int hashCode() { // NB: the hashcode is currently used for key generation so // to avoid clashes between aggs with the same arguments, add the class name as variation - return Objects.hash(getClass(), children(), useDocValues); + return Objects.hash(getClass(), children(), fieldExtractPreference); } @Override @@ -50,12 +61,12 @@ public boolean equals(Object obj) { SpatialAggregateFunction other = (SpatialAggregateFunction) obj; return Objects.equals(other.field(), field()) && Objects.equals(other.parameters(), parameters()) - && Objects.equals(other.useDocValues, useDocValues); + && Objects.equals(other.fieldExtractPreference, fieldExtractPreference); } return false; } - public boolean useDocValues() { - return useDocValues; + public FieldExtractPreference fieldExtractPreference() { + return fieldExtractPreference; } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroid.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroid.java index aad95c07e3492..84915d024ea82 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroid.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroid.java @@ -13,6 +13,7 @@ import org.elasticsearch.compute.aggregation.spatial.SpatialCentroidCartesianPointSourceValuesAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.spatial.SpatialCentroidGeoPointDocValuesAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.spatial.SpatialCentroidGeoPointSourceValuesAggregatorFunctionSupplier; +import org.elasticsearch.index.mapper.MappedFieldType.FieldExtractPreference; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Literal; @@ -27,6 +28,7 @@ import java.io.IOException; import java.util.List; +import static org.elasticsearch.index.mapper.MappedFieldType.FieldExtractPreference.NONE; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions.isSpatialPoint; @@ -47,15 +49,15 @@ public class SpatialCentroid extends SpatialAggregateFunction implements ToAggre examples = @Example(file = "spatial", tag = "st_centroid_agg-airports") ) public SpatialCentroid(Source source, @Param(name = "field", type = { "geo_point", "cartesian_point" }) Expression field) { - this(source, field, Literal.TRUE, false); + this(source, field, Literal.TRUE, NONE); } - private SpatialCentroid(Source source, Expression field, Expression filter, boolean useDocValues) { - super(source, field, filter, useDocValues); + private SpatialCentroid(Source source, Expression field, Expression filter, FieldExtractPreference preference) { + super(source, field, filter, preference); } private SpatialCentroid(StreamInput in) throws IOException { - super(in, false); + super(in, NONE); } @Override @@ -65,12 +67,12 @@ public String getWriteableName() { @Override public SpatialCentroid withFilter(Expression filter) { - return new SpatialCentroid(source(), field(), filter, useDocValues); + return new SpatialCentroid(source(), field(), filter, fieldExtractPreference); } @Override public SpatialCentroid withDocValues() { - return new SpatialCentroid(source(), field(), filter(), true); + return new SpatialCentroid(source(), field(), filter(), FieldExtractPreference.DOC_VALUES); } @Override @@ -98,23 +100,16 @@ public SpatialCentroid replaceChildren(List newChildren) { @Override public AggregatorFunctionSupplier supplier(List inputChannels) { DataType type = field().dataType(); - if (useDocValues) { - // When the points are read as doc-values (eg. from the index), feed them into the doc-values aggregator - if (type == DataType.GEO_POINT) { - return new SpatialCentroidGeoPointDocValuesAggregatorFunctionSupplier(inputChannels); - } - if (type == DataType.CARTESIAN_POINT) { - return new SpatialCentroidCartesianPointDocValuesAggregatorFunctionSupplier(inputChannels); - } - } else { - // When the points are read as WKB from source or as point literals, feed them into the source-values aggregator - if (type == DataType.GEO_POINT) { - return new SpatialCentroidGeoPointSourceValuesAggregatorFunctionSupplier(inputChannels); - } - if (type == DataType.CARTESIAN_POINT) { - return new SpatialCentroidCartesianPointSourceValuesAggregatorFunctionSupplier(inputChannels); - } - } - throw EsqlIllegalArgumentException.illegalDataType(type); + return switch (type) { + case DataType.GEO_POINT -> switch (fieldExtractPreference) { + case DOC_VALUES -> new SpatialCentroidGeoPointDocValuesAggregatorFunctionSupplier(inputChannels); + case NONE -> new SpatialCentroidGeoPointSourceValuesAggregatorFunctionSupplier(inputChannels); + }; + case DataType.CARTESIAN_POINT -> switch (fieldExtractPreference) { + case DOC_VALUES -> new SpatialCentroidCartesianPointDocValuesAggregatorFunctionSupplier(inputChannels); + case NONE -> new SpatialCentroidCartesianPointSourceValuesAggregatorFunctionSupplier(inputChannels); + }; + default -> throw EsqlIllegalArgumentException.illegalDataType(type); + }; } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialExtent.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialExtent.java new file mode 100644 index 0000000000000..5cc1701faf13a --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialExtent.java @@ -0,0 +1,119 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.esql.expression.function.aggregate; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.spatial.SpatialExtentCartesianPointDocValuesAggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.spatial.SpatialExtentCartesianPointSourceValuesAggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.spatial.SpatialExtentCartesianShapeAggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.spatial.SpatialExtentGeoPointDocValuesAggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.spatial.SpatialExtentGeoPointSourceValuesAggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.spatial.SpatialExtentGeoShapeAggregatorFunctionSupplier; +import org.elasticsearch.index.mapper.MappedFieldType.FieldExtractPreference; +import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.Literal; +import org.elasticsearch.xpack.esql.core.tree.NodeInfo; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.Example; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.planner.ToAggregator; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; +import static org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions.isSpatial; + +/** + * Calculate spatial extent of all values of a field in matching documents. + */ +public final class SpatialExtent extends SpatialAggregateFunction implements ToAggregator { + public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( + Expression.class, + "SpatialExtent", + SpatialExtent::new + ); + + @FunctionInfo( + returnType = { "geo_shape", "cartesian_shape" }, + description = "Calculate the spatial extent over a field with geometry type. Returns a bounding box for all values of the field.", + isAggregation = true, + examples = @Example(file = "spatial", tag = "st_extent_agg-airports") + ) + public SpatialExtent( + Source source, + @Param(name = "field", type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }) Expression field + ) { + this(source, field, Literal.TRUE, FieldExtractPreference.NONE); + } + + private SpatialExtent(Source source, Expression field, Expression filter, FieldExtractPreference preference) { + super(source, field, filter, preference); + } + + private SpatialExtent(StreamInput in) throws IOException { + super(in, FieldExtractPreference.NONE); + } + + @Override + public String getWriteableName() { + return ENTRY.name; + } + + @Override + public SpatialExtent withFilter(Expression filter) { + return new SpatialExtent(source(), field(), filter, fieldExtractPreference); + } + + @Override + public org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialExtent withDocValues() { + return new SpatialExtent(source(), field(), filter(), FieldExtractPreference.DOC_VALUES); + } + + @Override + protected TypeResolution resolveType() { + return isSpatial(field(), sourceText(), DEFAULT); + } + + @Override + public DataType dataType() { + return DataType.isSpatialGeo(field().dataType()) ? DataType.GEO_SHAPE : DataType.CARTESIAN_SHAPE; + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, SpatialExtent::new, field()); + } + + @Override + public SpatialExtent replaceChildren(List newChildren) { + return new SpatialExtent(source(), newChildren.get(0)); + } + + @Override + public AggregatorFunctionSupplier supplier(List inputChannels) { + return switch (field().dataType()) { + case DataType.GEO_POINT -> switch (fieldExtractPreference) { + case DOC_VALUES -> new SpatialExtentGeoPointDocValuesAggregatorFunctionSupplier(inputChannels); + case NONE -> new SpatialExtentGeoPointSourceValuesAggregatorFunctionSupplier(inputChannels); + }; + case DataType.CARTESIAN_POINT -> switch (fieldExtractPreference) { + case DOC_VALUES -> new SpatialExtentCartesianPointDocValuesAggregatorFunctionSupplier(inputChannels); + case NONE -> new SpatialExtentCartesianPointSourceValuesAggregatorFunctionSupplier(inputChannels); + }; + // Shapes don't differentiate between source and doc values. + case DataType.GEO_SHAPE -> new SpatialExtentGeoShapeAggregatorFunctionSupplier(inputChannels); + case DataType.CARTESIAN_SHAPE -> new SpatialExtentCartesianShapeAggregatorFunctionSupplier(inputChannels); + default -> throw EsqlIllegalArgumentException.illegalDataType(field().dataType()); + }; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelope.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelope.java index 934991f3a8088..ca243efcc2851 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelope.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelope.java @@ -14,6 +14,7 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.geometry.Point; import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor; +import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor.WrapLongitude; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -129,7 +130,7 @@ static BytesRef fromWellKnownBinaryGeo(BytesRef wkb) { if (geometry instanceof Point) { return wkb; } - var envelope = SpatialEnvelopeVisitor.visitGeo(geometry, true); + var envelope = SpatialEnvelopeVisitor.visitGeo(geometry, WrapLongitude.WRAP); if (envelope.isPresent()) { return UNSPECIFIED.asWkb(envelope.get()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMax.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMax.java index d6d710b175113..69eede1c5fac5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMax.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMax.java @@ -14,6 +14,7 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.geometry.Point; import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor; +import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor.WrapLongitude; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -114,7 +115,7 @@ static double fromWellKnownBinaryGeo(BytesRef wkb) { if (geometry instanceof Point point) { return point.getX(); } - var envelope = SpatialEnvelopeVisitor.visitGeo(geometry, true); + var envelope = SpatialEnvelopeVisitor.visitGeo(geometry, WrapLongitude.WRAP); if (envelope.isPresent()) { return envelope.get().getMaxX(); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMin.java index a5fa11bc11b0f..b29a547ab0af6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMin.java @@ -14,6 +14,7 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.geometry.Point; import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor; +import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor.WrapLongitude; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -114,7 +115,7 @@ static double fromWellKnownBinaryGeo(BytesRef wkb) { if (geometry instanceof Point point) { return point.getX(); } - var envelope = SpatialEnvelopeVisitor.visitGeo(geometry, true); + var envelope = SpatialEnvelopeVisitor.visitGeo(geometry, WrapLongitude.WRAP); if (envelope.isPresent()) { return envelope.get().getMinX(); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMax.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMax.java index fbbea8e024a6b..981b500bcaef7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMax.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMax.java @@ -14,6 +14,7 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.geometry.Point; import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor; +import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor.WrapLongitude; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -114,7 +115,7 @@ static double fromWellKnownBinaryGeo(BytesRef wkb) { if (geometry instanceof Point point) { return point.getY(); } - var envelope = SpatialEnvelopeVisitor.visitGeo(geometry, true); + var envelope = SpatialEnvelopeVisitor.visitGeo(geometry, WrapLongitude.WRAP); if (envelope.isPresent()) { return envelope.get().getMaxY(); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMin.java index 1707d3b4f2fb9..882aeb30afaee 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMin.java @@ -14,6 +14,7 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.geometry.Point; import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor; +import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor.WrapLongitude; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -114,7 +115,7 @@ static double fromWellKnownBinaryGeo(BytesRef wkb) { if (geometry instanceof Point point) { return point.getY(); } - var envelope = SpatialEnvelopeVisitor.visitGeo(geometry, true); + var envelope = SpatialEnvelopeVisitor.visitGeo(geometry, WrapLongitude.WRAP); if (envelope.isPresent()) { return envelope.get().getMinY(); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java index 1f55e293b8e75..1918e3036e2b0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java @@ -35,6 +35,7 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.Rate; import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialAggregateFunction; import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialCentroid; +import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialExtent; import org.elasticsearch.xpack.esql.expression.function.aggregate.StdDev; import org.elasticsearch.xpack.esql.expression.function.aggregate.Sum; import org.elasticsearch.xpack.esql.expression.function.aggregate.ToPartial; @@ -66,7 +67,7 @@ final class AggregateMapper { private static final List NUMERIC = List.of("Int", "Long", "Double"); - private static final List SPATIAL = List.of("GeoPoint", "CartesianPoint"); + private static final List SPATIAL_EXTRA_CONFIGS = List.of("SourceValues", "DocValues"); /** List of all mappable ESQL agg functions (excludes surrogates like AVG = SUM/COUNT). */ private static final List> AGG_FUNCTIONS = List.of( @@ -77,6 +78,7 @@ final class AggregateMapper { Min.class, Percentile.class, SpatialCentroid.class, + SpatialExtent.class, StdDev.class, Sum.class, Values.class, @@ -89,7 +91,11 @@ final class AggregateMapper { ); /** Record of agg Class, type, and grouping (or non-grouping). */ - private record AggDef(Class aggClazz, String type, String extra, boolean grouping) {} + private record AggDef(Class aggClazz, String type, String extra, boolean grouping) { + public AggDef withoutExtra() { + return new AggDef(aggClazz, type, "", grouping); + } + } /** Map of AggDef types to intermediate named expressions. */ private static final Map> MAPPER = AGG_FUNCTIONS.stream() @@ -145,7 +151,7 @@ private static List entryForAgg(String aggAlias, AggregateFunct var aggDef = new AggDef( aggregateFunction.getClass(), dataTypeToString(aggregateFunction.field().dataType(), aggregateFunction.getClass()), - aggregateFunction instanceof SpatialCentroid ? "SourceValues" : "", + aggregateFunction instanceof SpatialAggregateFunction ? "SourceValues" : "", grouping ); var is = getNonNull(aggDef); @@ -154,7 +160,7 @@ private static List entryForAgg(String aggAlias, AggregateFunct /** Gets the agg from the mapper - wrapper around map::get for more informative failure.*/ private static List getNonNull(AggDef aggDef) { - var l = MAPPER.get(aggDef); + var l = MAPPER.getOrDefault(aggDef, MAPPER.get(aggDef.withoutExtra())); if (l == null) { throw new EsqlIllegalArgumentException("Cannot find intermediate state for: " + aggDef); } @@ -170,9 +176,14 @@ private static Stream, Tuple>> typeAndNames(Class types = List.of("Boolean", "Int", "Long", "Double", "Ip", "BytesRef"); } else if (clazz == Count.class) { types = List.of(""); // no extra type distinction - } else if (SpatialAggregateFunction.class.isAssignableFrom(clazz)) { - types = SPATIAL; - extraConfigs = List.of("SourceValues", "DocValues"); + } else if (clazz == SpatialCentroid.class) { + types = List.of("GeoPoint", "CartesianPoint"); + extraConfigs = SPATIAL_EXTRA_CONFIGS; + } else if (clazz == SpatialExtent.class) { + return Stream.concat( + combine(clazz, List.of("GeoPoint", "CartesianPoint"), SPATIAL_EXTRA_CONFIGS), + combine(clazz, List.of("GeoShape", "CartesianShape"), List.of("")) + ); } else if (Values.class.isAssignableFrom(clazz)) { // TODO can't we figure this out from the function itself? types = List.of("Int", "Long", "Double", "Boolean", "BytesRef"); @@ -188,6 +199,10 @@ private static Stream, Tuple>> typeAndNames(Class assert false : "unknown aggregate type " + clazz; throw new IllegalArgumentException("unknown aggregate type " + clazz); } + return combine(clazz, types, extraConfigs); + } + + private static Stream, Tuple>> combine(Class clazz, List types, List extraConfigs) { return combinations(types, extraConfigs).map(combo -> new Tuple<>(clazz, combo)); } @@ -219,6 +234,15 @@ private static List lookupIntermediateState(AggDef aggDef /** Looks up the intermediate state method for a given class, type, and grouping. */ private static MethodHandle lookup(Class clazz, String type, String extra, boolean grouping) { + try { + return lookupRetry(clazz, type, extra, grouping); + } catch (IllegalAccessException | NoSuchMethodException | ClassNotFoundException e) { + throw new EsqlIllegalArgumentException(e); + } + } + + private static MethodHandle lookupRetry(Class clazz, String type, String extra, boolean grouping) throws IllegalAccessException, + NoSuchMethodException, ClassNotFoundException { try { return MethodHandles.lookup() .findStatic( @@ -226,8 +250,14 @@ private static MethodHandle lookup(Class clazz, String type, String extra, bo "intermediateStateDesc", MethodType.methodType(List.class) ); - } catch (IllegalAccessException | NoSuchMethodException | ClassNotFoundException e) { - throw new EsqlIllegalArgumentException(e); + } catch (NoSuchMethodException ignore) { + // Retry without the extra information. + return MethodHandles.lookup() + .findStatic( + Class.forName(determineAggName(clazz, type, "", grouping)), + "intermediateStateDesc", + MethodType.methodType(List.class) + ); } } @@ -301,8 +331,10 @@ private static String dataTypeToString(DataType type, Class aggClass) { case DataType.KEYWORD, DataType.IP, DataType.VERSION, DataType.TEXT, DataType.SEMANTIC_TEXT -> "BytesRef"; case GEO_POINT -> "GeoPoint"; case CARTESIAN_POINT -> "CartesianPoint"; + case GEO_SHAPE -> "GeoShape"; + case CARTESIAN_SHAPE -> "CartesianShape"; case UNSUPPORTED, NULL, UNSIGNED_LONG, SHORT, BYTE, FLOAT, HALF_FLOAT, SCALED_FLOAT, OBJECT, SOURCE, DATE_PERIOD, TIME_DURATION, - CARTESIAN_SHAPE, GEO_SHAPE, DOC_DATA_TYPE, TSID_DATA_TYPE, PARTIAL_AGG -> throw new EsqlIllegalArgumentException( + DOC_DATA_TYPE, TSID_DATA_TYPE, PARTIAL_AGG -> throw new EsqlIllegalArgumentException( "illegal agg type: " + type.typeName() ); }; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/RectangleMatcher.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/RectangleMatcher.java new file mode 100644 index 0000000000000..48fbc9c8e0378 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/RectangleMatcher.java @@ -0,0 +1,61 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression; + +import org.elasticsearch.compute.aggregation.spatial.PointType; +import org.elasticsearch.geometry.Rectangle; +import org.hamcrest.Description; +import org.hamcrest.Matchers; +import org.hamcrest.TypeSafeMatcher; + +/** + * Example usage: assertThat(actualRectangle, RectangleMatcher.closeTo(expectedRectangle, 0.0001, PointType.CARTESIAN));, or it + * can be used as a parameter to {@link WellKnownBinaryBytesRefMatcher}. + */ +public class RectangleMatcher extends TypeSafeMatcher { + private final Rectangle r; + private final PointType pointType; + private final double error; + + public static TypeSafeMatcher closeTo(Rectangle r, double error, PointType pointType) { + return new RectangleMatcher(r, error, pointType); + } + + private RectangleMatcher(Rectangle r, double error, PointType pointType) { + this.r = r; + this.pointType = pointType; + this.error = error; + } + + @Override + protected boolean matchesSafely(Rectangle other) { + // For geo bounds, longitude of (-180, 180) and (epsilon, -epsilon) are actually very close, since both encompass the entire globe. + boolean wrapAroundWorkAround = pointType == PointType.GEO && r.getMinX() >= r.getMaxX(); + boolean matchMinX = Matchers.closeTo(r.getMinX(), error).matches(other.getMinX()) + || (wrapAroundWorkAround && Matchers.closeTo(r.getMinX() - 180, error).matches(other.getMinX())) + || (wrapAroundWorkAround && Matchers.closeTo(r.getMinX(), error).matches(other.getMinX() - 180)); + boolean matchMaxX = Matchers.closeTo(r.getMaxX(), error).matches(other.getMaxX()) + || (wrapAroundWorkAround && Matchers.closeTo(r.getMaxX() + 180, error).matches(other.getMaxX())) + || (wrapAroundWorkAround && Matchers.closeTo(r.getMaxX(), error).matches(other.getMaxX() + 180)); + + return matchMinX + && matchMaxX + && Matchers.closeTo(r.getMaxY(), error).matches(other.getMaxY()) + && Matchers.closeTo(r.getMinY(), error).matches(other.getMinY()); + } + + @Override + public void describeMismatchSafely(Rectangle rectangle, Description description) { + description.appendText("was ").appendValue(rectangle); + } + + @Override + public void describeTo(Description description) { + description.appendValue(" " + r); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/WellKnownBinaryBytesRefMatcher.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/WellKnownBinaryBytesRefMatcher.java new file mode 100644 index 0000000000000..535bb820458cd --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/WellKnownBinaryBytesRefMatcher.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.geometry.utils.GeometryValidator; +import org.elasticsearch.geometry.utils.WellKnownBinary; +import org.hamcrest.Description; +import org.hamcrest.Matcher; +import org.hamcrest.TypeSafeMatcher; + +/** A wrapper for matching geometries encoded as WKB in a BytesRef. */ +public class WellKnownBinaryBytesRefMatcher extends TypeSafeMatcher { + private final Matcher matcher; + + public WellKnownBinaryBytesRefMatcher(Matcher matcher) { + this.matcher = matcher; + } + + @Override + public boolean matchesSafely(BytesRef bytesRef) { + return matcher.matches(fromBytesRef(bytesRef)); + } + + @Override + public void describeMismatchSafely(BytesRef bytesRef, Description description) { + matcher.describeMismatch(fromBytesRef(bytesRef), description); + } + + @SuppressWarnings("unchecked") + private G fromBytesRef(BytesRef bytesRef) { + return (G) WellKnownBinary.fromWKB(GeometryValidator.NOOP, false /* coerce */, bytesRef.bytes, bytesRef.offset, bytesRef.length); + } + + @Override + public void describeTo(Description description) { + matcher.describeTo(description); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java index df1675ba22568..c086245d6fd61 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java @@ -277,9 +277,11 @@ private void evaluate(Expression evaluableExpression) { } private void resolveExpression(Expression expression, Consumer onAggregator, Consumer onEvaluableExpression) { - logger.info( - "Test Values: " + testCase.getData().stream().map(TestCaseSupplier.TypedData::toString).collect(Collectors.joining(",")) - ); + String valuesString = testCase.getData().stream().map(TestCaseSupplier.TypedData::toString).collect(Collectors.joining(",")); + if (valuesString.length() > 200) { + valuesString = valuesString.substring(0, 200) + "..."; + } + logger.info("Test Values: " + valuesString); if (testCase.getExpectedTypeError() != null) { assertTypeResolutionFailure(expression); return; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/MultiRowTestCaseSupplier.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/MultiRowTestCaseSupplier.java index 775ca45bfa124..bb0d2e57c3440 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/MultiRowTestCaseSupplier.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/MultiRowTestCaseSupplier.java @@ -9,9 +9,11 @@ import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.geo.GeometryTestUtils; import org.elasticsearch.geo.ShapeTestUtils; +import org.elasticsearch.geometry.Geometry; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.versionfield.Version; @@ -19,11 +21,11 @@ import java.math.BigInteger; import java.util.ArrayList; import java.util.List; +import java.util.function.Function; import java.util.function.Supplier; import static org.elasticsearch.test.ESTestCase.randomBoolean; import static org.elasticsearch.test.ESTestCase.randomList; -import static org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes.CARTESIAN; import static org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes.GEO; import static org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier.TypedDataSupplier; @@ -263,9 +265,7 @@ public static List dateCases(int minRows, int maxRows) { } /** - * * Generate cases for {@link DataType#DATE_NANOS}. - * */ public static List dateNanosCases(int minRows, int maxRows) { List cases = new ArrayList<>(); @@ -370,53 +370,58 @@ public static List versionCases(int minRows, int maxRows) { return cases; } - public static List geoPointCases(int minRows, int maxRows, boolean withAltitude) { - List cases = new ArrayList<>(); + public enum IncludingAltitude { + YES, + NO + } - addSuppliers( - cases, + public static List geoPointCases(int minRows, int maxRows, IncludingAltitude withAltitude) { + return spatialCases(minRows, maxRows, withAltitude, "geo_point", DataType.GEO_POINT, GeometryTestUtils::randomPoint); + } + + public static List geoShapeCasesWithoutCircle(int minRows, int maxRows, IncludingAltitude includingAltitude) { + return spatialCases( minRows, maxRows, - "", - DataType.GEO_POINT, - () -> GEO.asWkb(GeometryTestUtils.randomPoint(false)) + includingAltitude, + "geo_shape", + DataType.GEO_SHAPE, + b -> GeometryTestUtils.randomGeometryWithoutCircle(0, b) ); - - if (withAltitude) { - addSuppliers( - cases, - minRows, - maxRows, - "", - DataType.GEO_POINT, - () -> GEO.asWkb(GeometryTestUtils.randomPoint(true)) - ); - } - - return cases; } - public static List cartesianPointCases(int minRows, int maxRows, boolean withAltitude) { - List cases = new ArrayList<>(); - - addSuppliers( - cases, + public static List cartesianShapeCasesWithoutCircle(int minRows, int maxRows, IncludingAltitude includingAltitude) { + return spatialCases( minRows, maxRows, - "", - DataType.CARTESIAN_POINT, - () -> CARTESIAN.asWkb(ShapeTestUtils.randomPoint(false)) + includingAltitude, + "geo_shape", + DataType.CARTESIAN_SHAPE, + b -> ShapeTestUtils.randomGeometryWithoutCircle(0, b) ); + } - if (withAltitude) { - addSuppliers( - cases, - minRows, - maxRows, - "", - DataType.CARTESIAN_POINT, - () -> CARTESIAN.asWkb(ShapeTestUtils.randomPoint(true)) - ); + public static List cartesianPointCases(int minRows, int maxRows, IncludingAltitude includingAltitude) { + return spatialCases(minRows, maxRows, includingAltitude, "cartesian_point", DataType.CARTESIAN_POINT, ShapeTestUtils::randomPoint); + } + + @SuppressWarnings("fallthrough") + private static List spatialCases( + int minRows, + int maxRows, + IncludingAltitude includingAltitude, + String name, + DataType type, + Function gen + ) { + List cases = new ArrayList<>(); + + switch (includingAltitude) { + case YES: + addSuppliers(cases, minRows, maxRows, Strings.format("", name), type, () -> GEO.asWkb(gen.apply(true))); + // Explicit fallthrough: always generate a case without altitude. + case NO: + addSuppliers(cases, minRows, maxRows, Strings.format("", name), type, () -> GEO.asWkb(gen.apply(false))); } return cases; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountTests.java index 131072acff870..0485714959f63 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractAggregationTestCase; import org.elasticsearch.xpack.esql.expression.function.MultiRowTestCaseSupplier; +import org.elasticsearch.xpack.esql.expression.function.MultiRowTestCaseSupplier.IncludingAltitude; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.math.BigInteger; @@ -44,8 +45,8 @@ public static Iterable parameters() { MultiRowTestCaseSupplier.booleanCases(1, 1000), MultiRowTestCaseSupplier.ipCases(1, 1000), MultiRowTestCaseSupplier.versionCases(1, 1000), - MultiRowTestCaseSupplier.geoPointCases(1, 1000, true), - MultiRowTestCaseSupplier.cartesianPointCases(1, 1000, true), + MultiRowTestCaseSupplier.geoPointCases(1, 1000, IncludingAltitude.YES), + MultiRowTestCaseSupplier.cartesianPointCases(1, 1000, IncludingAltitude.YES), MultiRowTestCaseSupplier.stringCases(1, 1000, DataType.KEYWORD), MultiRowTestCaseSupplier.stringCases(1, 1000, DataType.TEXT), MultiRowTestCaseSupplier.stringCases(1, 1000, DataType.SEMANTIC_TEXT) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroidTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroidTests.java index 15ea029a05554..b92b32aa7ad09 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroidTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroidTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.xpack.esql.expression.function.AbstractAggregationTestCase; import org.elasticsearch.xpack.esql.expression.function.FunctionName; import org.elasticsearch.xpack.esql.expression.function.MultiRowTestCaseSupplier; +import org.elasticsearch.xpack.esql.expression.function.MultiRowTestCaseSupplier.IncludingAltitude; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.hamcrest.BaseMatcher; import org.hamcrest.Description; @@ -41,8 +42,8 @@ public SpatialCentroidTests(@Name("TestCase") Supplier parameters() { var suppliers = Stream.of( - MultiRowTestCaseSupplier.geoPointCases(1, 1000, true), - MultiRowTestCaseSupplier.cartesianPointCases(1, 1000, true) + MultiRowTestCaseSupplier.geoPointCases(1, 1000, IncludingAltitude.NO), + MultiRowTestCaseSupplier.cartesianPointCases(1, 1000, IncludingAltitude.NO) ).flatMap(List::stream).map(SpatialCentroidTests::makeSupplier).toList(); // The withNoRowsExpectingNull() cases don't work here, as this aggregator doesn't return nulls. diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialExtentTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialExtentTests.java new file mode 100644 index 0000000000000..a1faa537ba052 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialExtentTests.java @@ -0,0 +1,102 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.aggregate; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.aggregation.spatial.PointType; +import org.elasticsearch.geometry.Rectangle; +import org.elasticsearch.geometry.utils.GeometryValidator; +import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor; +import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor.WrapLongitude; +import org.elasticsearch.geometry.utils.WellKnownBinary; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.RectangleMatcher; +import org.elasticsearch.xpack.esql.expression.WellKnownBinaryBytesRefMatcher; +import org.elasticsearch.xpack.esql.expression.function.AbstractAggregationTestCase; +import org.elasticsearch.xpack.esql.expression.function.FunctionName; +import org.elasticsearch.xpack.esql.expression.function.MultiRowTestCaseSupplier; +import org.elasticsearch.xpack.esql.expression.function.MultiRowTestCaseSupplier.IncludingAltitude; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; + +import java.util.List; +import java.util.function.Supplier; +import java.util.stream.Stream; + +@FunctionName("st_extent_agg") +public class SpatialExtentTests extends AbstractAggregationTestCase { + public SpatialExtentTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + var suppliers = Stream.of( + MultiRowTestCaseSupplier.geoPointCases(1, 1000, IncludingAltitude.NO), + MultiRowTestCaseSupplier.cartesianPointCases(1, 1000, IncludingAltitude.NO), + MultiRowTestCaseSupplier.geoShapeCasesWithoutCircle(1, 1000, IncludingAltitude.NO), + MultiRowTestCaseSupplier.cartesianShapeCasesWithoutCircle(1, 1000, IncludingAltitude.NO) + ).flatMap(List::stream).map(SpatialExtentTests::makeSupplier).toList(); + + // The withNoRowsExpectingNull() cases don't work here, as this aggregator doesn't return nulls. + // return parameterSuppliersFromTypedDataWithDefaultChecks(suppliers); + return parameterSuppliersFromTypedData(randomizeBytesRefsOffset(suppliers)); + } + + @Override + protected Expression build(Source source, List args) { + return new SpatialExtent(source, args.get(0)); + } + + private static TestCaseSupplier makeSupplier(TestCaseSupplier.TypedDataSupplier fieldSupplier) { + return new TestCaseSupplier(List.of(fieldSupplier.type()), () -> { + PointType pointType = switch (fieldSupplier.type()) { + case DataType.CARTESIAN_POINT, DataType.CARTESIAN_SHAPE -> PointType.CARTESIAN; + case DataType.GEO_POINT, DataType.GEO_SHAPE -> PointType.GEO; + default -> throw new IllegalArgumentException("Unsupported type: " + fieldSupplier.type()); + }; + var pointVisitor = switch (pointType) { + case CARTESIAN -> new SpatialEnvelopeVisitor.CartesianPointVisitor(); + case GEO -> new SpatialEnvelopeVisitor.GeoPointVisitor(WrapLongitude.WRAP); + }; + + var fieldTypedData = fieldSupplier.get(); + DataType expectedType = DataType.isSpatialGeo(fieldTypedData.type()) ? DataType.GEO_SHAPE : DataType.CARTESIAN_SHAPE; + fieldTypedData.multiRowData() + .stream() + .map(value -> (BytesRef) value) + .map(value -> WellKnownBinary.fromWKB(GeometryValidator.NOOP, false, value.bytes, value.offset, value.length)) + .forEach(g -> g.visit(new SpatialEnvelopeVisitor(pointVisitor))); + assert pointVisitor.isValid(); + Rectangle result = pointVisitor.getResult(); + return new TestCaseSupplier.TestCase( + List.of(fieldTypedData), + "SpatialExtent[field=Attribute[channel=0]]", + expectedType, + new WellKnownBinaryBytesRefMatcher<>( + RectangleMatcher.closeTo( + new Rectangle( + // Since we use integers locally which are later decoded to doubles, all computation is effectively done using + // floats, not doubles. + (float) result.getMinX(), + (float) result.getMaxX(), + (float) result.getMaxY(), + (float) result.getMinY() + ), + 1e-3, + pointType + ) + ) + ); + }); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelopeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelopeTests.java index ac87d45491447..9f629d9127673 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelopeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StEnvelopeTests.java @@ -13,6 +13,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.geometry.Point; import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor; +import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor.WrapLongitude; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; @@ -74,7 +75,9 @@ private static BytesRef valueOf(BytesRef wkb, boolean geo) { if (geometry instanceof Point) { return wkb; } - var envelope = geo ? SpatialEnvelopeVisitor.visitGeo(geometry, true) : SpatialEnvelopeVisitor.visitCartesian(geometry); + var envelope = geo + ? SpatialEnvelopeVisitor.visitGeo(geometry, WrapLongitude.WRAP) + : SpatialEnvelopeVisitor.visitCartesian(geometry); if (envelope.isPresent()) { return UNSPECIFIED.asWkb(envelope.get()); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMaxTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMaxTests.java index dc6e61e44f599..9205879fa1cb9 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMaxTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMaxTests.java @@ -13,6 +13,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.geometry.Point; import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor; +import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor.WrapLongitude; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; @@ -61,7 +62,9 @@ private static double valueOf(BytesRef wkb, boolean geo) { if (geometry instanceof Point point) { return point.getX(); } - var envelope = geo ? SpatialEnvelopeVisitor.visitGeo(geometry, true) : SpatialEnvelopeVisitor.visitCartesian(geometry); + var envelope = geo + ? SpatialEnvelopeVisitor.visitGeo(geometry, WrapLongitude.WRAP) + : SpatialEnvelopeVisitor.visitCartesian(geometry); if (envelope.isPresent()) { return envelope.get().getMaxX(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMinTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMinTests.java index 8c06d18b1e281..3603bff9656fe 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMinTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMinTests.java @@ -13,6 +13,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.geometry.Point; import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor; +import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor.WrapLongitude; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; @@ -61,7 +62,9 @@ private static double valueOf(BytesRef wkb, boolean geo) { if (geometry instanceof Point point) { return point.getX(); } - var envelope = geo ? SpatialEnvelopeVisitor.visitGeo(geometry, true) : SpatialEnvelopeVisitor.visitCartesian(geometry); + var envelope = geo + ? SpatialEnvelopeVisitor.visitGeo(geometry, WrapLongitude.WRAP) + : SpatialEnvelopeVisitor.visitCartesian(geometry); if (envelope.isPresent()) { return envelope.get().getMinX(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMaxTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMaxTests.java index 7222d7517f7ff..cb2a03c3a9473 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMaxTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMaxTests.java @@ -13,6 +13,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.geometry.Point; import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor; +import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor.WrapLongitude; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; @@ -61,7 +62,9 @@ private static double valueOf(BytesRef wkb, boolean geo) { if (geometry instanceof Point point) { return point.getY(); } - var envelope = geo ? SpatialEnvelopeVisitor.visitGeo(geometry, true) : SpatialEnvelopeVisitor.visitCartesian(geometry); + var envelope = geo + ? SpatialEnvelopeVisitor.visitGeo(geometry, WrapLongitude.WRAP) + : SpatialEnvelopeVisitor.visitCartesian(geometry); if (envelope.isPresent()) { return envelope.get().getMaxY(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinTests.java index 843c7bb649114..0c191f6dc4c5b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinTests.java @@ -13,6 +13,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.geometry.Point; import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor; +import org.elasticsearch.geometry.utils.SpatialEnvelopeVisitor.WrapLongitude; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; @@ -61,7 +62,9 @@ private static double valueOf(BytesRef wkb, boolean geo) { if (geometry instanceof Point point) { return point.getY(); } - var envelope = geo ? SpatialEnvelopeVisitor.visitGeo(geometry, true) : SpatialEnvelopeVisitor.visitCartesian(geometry); + var envelope = geo + ? SpatialEnvelopeVisitor.visitGeo(geometry, WrapLongitude.WRAP) + : SpatialEnvelopeVisitor.visitCartesian(geometry); if (envelope.isPresent()) { return envelope.get().getMinY(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index ec1d55a0fc58f..dc3ae0a3388cb 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.geometry.Polygon; import org.elasticsearch.geometry.ShapeType; import org.elasticsearch.index.IndexMode; +import org.elasticsearch.index.mapper.MappedFieldType.FieldExtractPreference; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.ExistsQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; @@ -64,6 +65,7 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.Count; import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialAggregateFunction; import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialCentroid; +import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialExtent; import org.elasticsearch.xpack.esql.expression.function.aggregate.Sum; import org.elasticsearch.xpack.esql.expression.function.fulltext.Match; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Round; @@ -253,7 +255,7 @@ public void init() { "mapping-airports_no_doc_values.json", functionRegistry, enrichResolution, - new TestConfigurableSearchStats().exclude(Config.DOC_VALUES, "location") + new TestConfigurableSearchStats().exclude(Config.DOC_VALUES, "location").exclude(Config.DOC_VALUES, "city_location") ); this.airportsNotIndexed = makeTestDataSource( "airports-not-indexed", @@ -2804,7 +2806,7 @@ public void testPartialAggFoldingOutputForSyntheticAgg() { * Also note that the type converting function is removed when it does not actually convert the type, * ensuring that ReferenceAttributes are not created for the same field, and the optimization can still work. */ - public void testSpatialTypesAndStatsUseDocValues() { + public void testSpatialTypesAndStatsCentroidUseDocValues() { for (String query : new String[] { "from airports | stats centroid = st_centroid_agg(location)", "from airports | stats centroid = st_centroid_agg(to_geopoint(location))", @@ -2838,6 +2840,129 @@ public void testSpatialTypesAndStatsUseDocValues() { } } + /** + * Before local optimizations: + * + * LimitExec[1000[INTEGER]] + * \_AggregateExec[[],[SPATIALSTEXTENT(location{f}#48,true[BOOLEAN]) AS extent],FINAL,[minNegX{r}#52, minPosX{r}#53, maxNegX{r}#54, + * maxPosX{r}#55, maxY{r}#56, minY{r}#57],null] + * \_ExchangeExec[[minNegX{r}#52, minPosX{r}#53, maxNegX{r}#54, maxPosX{r}#55, maxY{r}#56, minY{r}#57],true] + * \_FragmentExec[filter=null, estimatedRowSize=0, reducer=[], fragment=[ + * Aggregate[STANDARD,[],[SPATIALSTEXTENT(location{f}#48,true[BOOLEAN]) AS extent]] + * \_EsRelation[airports][abbrev{f}#44, city{f}#50, city_location{f}#51, coun..]]] + * + * After local optimizations: + * + * LimitExec[1000[INTEGER]] + * \_AggregateExec[[],[SPATIALSTEXTENT(location{f}#48,true[BOOLEAN]) AS extent],FINAL,[minNegX{r}#52, minPosX{r}#53, maxNegX{r}#54, + * maxPosX{r}#55, maxY{r}#56, minY{r}#57],21] + * \_ExchangeExec[[minNegX{r}#52, minPosX{r}#53, maxNegX{r}#54, maxPosX{r}#55, maxY{r}#56, minY{r}#57],true] + * \_AggregateExec[[],[SPATIALSTEXTENT(location{f}#48,true[BOOLEAN]) AS extent],INITIAL,[ + * minNegX{r}#73, minPosX{r}#74, maxNegX{rb#75, maxPosX{r}#76, maxY{r}#77, minY{r}#78],21] + * \_FieldExtractExec[location{f}#48][location{f}#48] + * \_EsQueryExec[airports], indexMode[standard], query[{"exists":{"field":"location","boost":1.0}}][ + * _doc{f}#79], limit[], sort[] estimatedRowSize[25] + * + * Note the FieldExtractExec has 'location' set for stats: FieldExtractExec[location{f}#9][location{f}#9] + *

    + * Also note that the type converting function is removed when it does not actually convert the type, + * ensuring that ReferenceAttributes are not created for the same field, and the optimization can still work. + */ + public void testSpatialTypesAndStatsExtentUseDocValues() { + for (String query : new String[] { + "from airports | stats extent = st_extent_agg(location)", + "from airports | stats extent = st_extent_agg(to_geopoint(location))", + "from airports | eval location = to_geopoint(location) | stats extent = st_extent_agg(location)" }) { + for (boolean withDocValues : new boolean[] { false, true }) { + var testData = withDocValues ? airports : airportsNoDocValues; + var plan = physicalPlan(query, testData); + + var limit = as(plan, LimitExec.class); + var agg = as(limit.child(), AggregateExec.class); + // Before optimization the aggregation does not use doc-values + assertAggregation(agg, "extent", SpatialExtent.class, GEO_POINT, false); + + var exchange = as(agg.child(), ExchangeExec.class); + var fragment = as(exchange.child(), FragmentExec.class); + var fAgg = as(fragment.fragment(), Aggregate.class); + as(fAgg.child(), EsRelation.class); + + // Now optimize the plan and assert the aggregation uses doc-values + var optimized = optimizedPlan(plan, testData.stats); + limit = as(optimized, LimitExec.class); + agg = as(limit.child(), AggregateExec.class); + // Above the exchange (in coordinator) the aggregation is not using doc-values + assertAggregation(agg, "extent", SpatialExtent.class, GEO_POINT, false); + exchange = as(agg.child(), ExchangeExec.class); + agg = as(exchange.child(), AggregateExec.class); + // below the exchange (in data node) the aggregation is using doc-values + assertAggregation(agg, "extent", SpatialExtent.class, GEO_POINT, withDocValues); + assertChildIsGeoPointExtract(withDocValues ? agg : as(agg.child(), FilterExec.class), withDocValues); + } + } + } + + /** + * Before local optimizations: + * + * LimitExec[1000[INTEGER]] + * \_AggregateExec[[],[SPATIALSTEXTENT(location{f}#48,true[BOOLEAN]) AS extent],FINAL,[minNegX{r}#52, minPosX{r}#53, maxNegX{r}#54, + * maxPosX{r}#55, maxY{r}#56, minY{r}#57],null] + * \_ExchangeExec[[minNegX{r}#52, minPosX{r}#53, maxNegX{r}#54, maxPosX{r}#55, maxY{r}#56, minY{r}#57],true] + * \_FragmentExec[filter=null, estimatedRowSize=0, reducer=[], fragment=[ + * Aggregate[STANDARD,[],[SPATIALSTEXTENT(location{f}#48,true[BOOLEAN]) AS extent]] + * \_EsRelation[airports][abbrev{f}#44, city{f}#50, city_location{f}#51, coun..]]] + * + * After local optimizations: + * + * LimitExec[1000[INTEGER]] + * \_AggregateExec[[],[SPATIALSTEXTENT(location{f}#48,true[BOOLEAN]) AS extent],FINAL,[minNegX{r}#52, minPosX{r}#53, maxNegX{r}#54, + * maxPosX{r}#55, maxY{r}#56, minY{r}#57],21] + * \_ExchangeExec[[minNegX{r}#52, minPosX{r}#53, maxNegX{r}#54, maxPosX{r}#55, maxY{r}#56, minY{r}#57],true] + * \_AggregateExec[[],[SPATIALSTEXTENT(location{f}#48,true[BOOLEAN]) AS extent],INITIAL,[ + * minNegX{r}#73, minPosX{r}#74, maxNegX{rb#75, maxPosX{r}#76, maxY{r}#77, minY{r}#78],21] + * \_FieldExtractExec[location{f}#48][location{f}#48] + * \_EsQueryExec[airports], indexMode[standard], query[{"exists":{"field":"location","boost":1.0}}][ + * _doc{f}#79], limit[], sort[] estimatedRowSize[25] + * + * Note the FieldExtractExec has 'location' set for stats: FieldExtractExec[location{f}#9][location{f}#9] + *

    + * Also note that the type converting function is removed when it does not actually convert the type, + * ensuring that ReferenceAttributes are not created for the same field, and the optimization can still work. + */ + public void testSpatialTypesAndStatsExtentAndCentroidUseDocValues() { + for (String query : new String[] { + "from airports | stats extent = st_extent_agg(location), centroid = st_centroid_agg(location)", + "from airports | stats extent = st_extent_agg(location), centroid = st_centroid_agg(city_location)", }) { + for (boolean withDocValues : new boolean[] { false, true }) { + var testData = withDocValues ? airports : airportsNoDocValues; + var plan = physicalPlan(query, testData); + + var limit = as(plan, LimitExec.class); + var agg = as(limit.child(), AggregateExec.class); + // Before optimization the aggregation does not use doc-values + assertAggregation(agg, "extent", SpatialExtent.class, GEO_POINT, false); + + var exchange = as(agg.child(), ExchangeExec.class); + var fragment = as(exchange.child(), FragmentExec.class); + var fAgg = as(fragment.fragment(), Aggregate.class); + as(fAgg.child(), EsRelation.class); + + // Now optimize the plan and assert the aggregation uses doc-values + var optimized = optimizedPlan(plan, testData.stats); + limit = as(optimized, LimitExec.class); + agg = as(limit.child(), AggregateExec.class); + // Above the exchange (in coordinator) the aggregation is not using doc-values + assertAggregation(agg, "extent", SpatialExtent.class, GEO_POINT, false); + exchange = as(agg.child(), ExchangeExec.class); + agg = as(exchange.child(), AggregateExec.class); + // below the exchange (in data node) the aggregation is using doc-values + assertAggregation(agg, "extent", SpatialExtent.class, GEO_POINT, withDocValues); + assertChildIsGeoPointExtract(withDocValues ? agg : as(agg.child(), FilterExec.class), withDocValues); + } + } + } + /** * This test does not have real index fields, and therefor asserts that doc-values field extraction does NOT occur. * Before local optimizations: @@ -6805,7 +6930,11 @@ private static void assertAggregation( var aggFunc = assertAggregation(plan, aliasName, aggClass); var aggField = as(aggFunc.field(), Attribute.class); var spatialAgg = as(aggFunc, SpatialAggregateFunction.class); - assertThat("Expected spatial aggregation to use doc-values", spatialAgg.useDocValues(), equalTo(useDocValues)); + assertThat( + "Expected spatial aggregation to use doc-values", + spatialAgg.fieldExtractPreference(), + equalTo(useDocValues ? FieldExtractPreference.DOC_VALUES : FieldExtractPreference.NONE) + ); assertThat("", aggField.dataType(), equalTo(fieldType)); } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml index 81f65668722fc..2a4cde9a680e9 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml @@ -92,7 +92,7 @@ setup: - gt: {esql.functions.to_long: $functions_to_long} - match: {esql.functions.coalesce: $functions_coalesce} # Testing for the entire function set isn't feasbile, so we just check that we return the correct count as an approximation. - - length: {esql.functions: 128} # check the "sister" test below for a likely update to the same esql.functions length check + - length: {esql.functions: 129} # check the "sister" test below for a likely update to the same esql.functions length check --- "Basic ESQL usage output (telemetry) non-snapshot version": @@ -163,4 +163,4 @@ setup: - match: {esql.functions.cos: $functions_cos} - gt: {esql.functions.to_long: $functions_to_long} - match: {esql.functions.coalesce: $functions_coalesce} - - length: {esql.functions: 124} # check the "sister" test above for a likely update to the same esql.functions length check + - length: {esql.functions: 125} # check the "sister" test above for a likely update to the same esql.functions length check From 48c892ceece16fd0f20bcda4659474d338b47842 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 13 Dec 2024 22:25:36 +1100 Subject: [PATCH 12/32] Mute org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizerTests testPlanSanityCheckWithBinaryPlans #118656 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 95beeb7aa8f8d..d5b933b96d73b 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -317,6 +317,9 @@ tests: - class: org.elasticsearch.reservedstate.service.FileSettingsServiceTests method: testInvalidJSON issue: https://github.com/elastic/elasticsearch/issues/116521 +- class: org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizerTests + method: testPlanSanityCheckWithBinaryPlans + issue: https://github.com/elastic/elasticsearch/issues/118656 # Examples: # From a765f89c212c436ff36f554020365ea913728ac8 Mon Sep 17 00:00:00 2001 From: Ioana Tagirta Date: Fri, 13 Dec 2024 12:59:14 +0100 Subject: [PATCH 13/32] Make search functions translation aware (#118355) * Introduce TranslationAware interface * Serialize query builder * Fix EsqlNodeSubclassTests * Add javadoc * Address review comments * Revert changes on making constructors private --- .../org/elasticsearch/TransportVersions.java | 1 + .../core/expression/TranslationAware.java | 20 ++++++++ .../TranslationAwareExpressionQuery.java | 35 ++++++++++++++ .../function/EsqlFunctionRegistry.java | 6 +-- .../function/fulltext/FullTextFunction.java | 46 ++++++++++++++++++- .../expression/function/fulltext/Kql.java | 38 ++++++++++++--- .../expression/function/fulltext/Match.java | 33 +++++++++++-- .../function/fulltext/QueryString.java | 42 ++++++++++++++--- .../expression/function/fulltext/Term.java | 33 +++++++++++-- .../planner/EsqlExpressionTranslators.java | 5 ++ 10 files changed, 234 insertions(+), 25 deletions(-) create mode 100644 x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TranslationAware.java create mode 100644 x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/TranslationAwareExpressionQuery.java diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 4135b1f0b8e9a..388123e86c882 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -138,6 +138,7 @@ static TransportVersion def(int id) { public static final TransportVersion ADD_DATA_STREAM_OPTIONS_TO_TEMPLATES = def(8_805_00_0); public static final TransportVersion KNN_QUERY_RESCORE_OVERSAMPLE = def(8_806_00_0); public static final TransportVersion SEMANTIC_QUERY_LENIENT = def(8_807_00_0); + public static final TransportVersion ESQL_QUERY_BUILDER_IN_SEARCH_FUNCTIONS = def(8_808_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TranslationAware.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TranslationAware.java new file mode 100644 index 0000000000000..b1ac2b36314fa --- /dev/null +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TranslationAware.java @@ -0,0 +1,20 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.core.expression; + +import org.elasticsearch.xpack.esql.core.planner.TranslatorHandler; +import org.elasticsearch.xpack.esql.core.querydsl.query.Query; + +/** + * Expressions can implement this interface to control how they would be translated and pushed down as Lucene queries. + * When an expression implements {@link TranslationAware}, we call {@link #asQuery(TranslatorHandler)} to get the + * {@link Query} translation, instead of relying on the registered translators from EsqlExpressionTranslators. + */ +public interface TranslationAware { + Query asQuery(TranslatorHandler translatorHandler); +} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/TranslationAwareExpressionQuery.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/TranslationAwareExpressionQuery.java new file mode 100644 index 0000000000000..92a42d3053b68 --- /dev/null +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/TranslationAwareExpressionQuery.java @@ -0,0 +1,35 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.core.querydsl.query; + +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * Expressions that store their own {@link QueryBuilder} and implement + * {@link org.elasticsearch.xpack.esql.core.expression.TranslationAware} can use {@link TranslationAwareExpressionQuery} + * to wrap their {@link QueryBuilder}, instead of using the other existing {@link Query} implementations. + */ +public class TranslationAwareExpressionQuery extends Query { + private final QueryBuilder queryBuilder; + + public TranslationAwareExpressionQuery(Source source, QueryBuilder queryBuilder) { + super(source); + this.queryBuilder = queryBuilder; + } + + @Override + public QueryBuilder asBuilder() { + return queryBuilder; + } + + @Override + protected String innerToString() { + return queryBuilder.toString(); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index 50d0d2438d8a1..1ccc22eb3a6a4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -416,7 +416,7 @@ private static FunctionDefinition[][] functions() { def(MvSum.class, MvSum::new, "mv_sum"), def(Split.class, Split::new, "split") }, // fulltext functions - new FunctionDefinition[] { def(Match.class, Match::new, "match"), def(QueryString.class, QueryString::new, "qstr") } }; + new FunctionDefinition[] { def(Match.class, bi(Match::new), "match"), def(QueryString.class, uni(QueryString::new), "qstr") } }; } @@ -426,9 +426,9 @@ private static FunctionDefinition[][] snapshotFunctions() { // The delay() function is for debug/snapshot environments only and should never be enabled in a non-snapshot build. // This is an experimental function and can be removed without notice. def(Delay.class, Delay::new, "delay"), - def(Kql.class, Kql::new, "kql"), + def(Kql.class, uni(Kql::new), "kql"), def(Rate.class, Rate::withUnresolvedTimestamp, "rate"), - def(Term.class, Term::new, "term") } }; + def(Term.class, bi(Term::new), "term") } }; } public EsqlFunctionRegistry snapshotRegistry() { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java index 78dc05af8f342..432d2d5f07429 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java @@ -8,14 +8,21 @@ package org.elasticsearch.xpack.esql.expression.function.fulltext; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Nullability; +import org.elasticsearch.xpack.esql.core.expression.TranslationAware; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.expression.function.Function; +import org.elasticsearch.xpack.esql.core.planner.ExpressionTranslator; +import org.elasticsearch.xpack.esql.core.planner.TranslatorHandler; +import org.elasticsearch.xpack.esql.core.querydsl.query.Query; +import org.elasticsearch.xpack.esql.core.querydsl.query.TranslationAwareExpressionQuery; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import java.util.List; +import java.util.Objects; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isNotNullAndFoldable; @@ -26,13 +33,15 @@ * These functions needs to be pushed down to Lucene queries to be executed - there's no Evaluator for them, but depend on * {@link org.elasticsearch.xpack.esql.optimizer.LocalPhysicalPlanOptimizer} to rewrite them into Lucene queries. */ -public abstract class FullTextFunction extends Function { +public abstract class FullTextFunction extends Function implements TranslationAware { private final Expression query; + private final QueryBuilder queryBuilder; - protected FullTextFunction(Source source, Expression query, List children) { + protected FullTextFunction(Source source, Expression query, List children, QueryBuilder queryBuilder) { super(source, children); this.query = query; + this.queryBuilder = queryBuilder; } @Override @@ -116,4 +125,37 @@ public Nullability nullable() { public String functionType() { return "function"; } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), queryBuilder); + } + + @Override + public boolean equals(Object obj) { + if (false == super.equals(obj)) { + return false; + } + + return Objects.equals(queryBuilder, ((FullTextFunction) obj).queryBuilder); + } + + @Override + public Query asQuery(TranslatorHandler translatorHandler) { + if (queryBuilder != null) { + return new TranslationAwareExpressionQuery(source(), queryBuilder); + } + + ExpressionTranslator translator = translator(); + return translator.translate(this, translatorHandler); + } + + public QueryBuilder queryBuilder() { + return queryBuilder; + } + + @SuppressWarnings("rawtypes") + protected abstract ExpressionTranslator translator(); + + public abstract Expression replaceQueryBuilder(QueryBuilder queryBuilder); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Kql.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Kql.java index c03902373c02e..1f7bcadd259a0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Kql.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Kql.java @@ -7,16 +7,20 @@ package org.elasticsearch.xpack.esql.expression.function.fulltext; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.planner.ExpressionTranslator; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; +import org.elasticsearch.xpack.esql.planner.EsqlExpressionTranslators; import org.elasticsearch.xpack.esql.querydsl.query.KqlQuery; import java.io.IOException; @@ -26,7 +30,7 @@ * Full text function that performs a {@link KqlQuery} . */ public class Kql extends FullTextFunction { - public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Kql", Kql::new); + public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Kql", Kql::readFrom); @FunctionInfo( returnType = "boolean", @@ -42,17 +46,30 @@ public Kql( description = "Query string in KQL query string format." ) Expression queryString ) { - super(source, queryString, List.of(queryString)); + super(source, queryString, List.of(queryString), null); } - private Kql(StreamInput in) throws IOException { - this(Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class)); + public Kql(Source source, Expression queryString, QueryBuilder queryBuilder) { + super(source, queryString, List.of(queryString), queryBuilder); + } + + private static Kql readFrom(StreamInput in) throws IOException { + Source source = Source.readFrom((PlanStreamInput) in); + Expression query = in.readNamedWriteable(Expression.class); + QueryBuilder queryBuilder = null; + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_QUERY_BUILDER_IN_SEARCH_FUNCTIONS)) { + queryBuilder = in.readOptionalNamedWriteable(QueryBuilder.class); + } + return new Kql(source, query, queryBuilder); } @Override public void writeTo(StreamOutput out) throws IOException { source().writeTo(out); out.writeNamedWriteable(query()); + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_QUERY_BUILDER_IN_SEARCH_FUNCTIONS)) { + out.writeOptionalNamedWriteable(queryBuilder()); + } } @Override @@ -62,12 +79,21 @@ public String getWriteableName() { @Override public Expression replaceChildren(List newChildren) { - return new Kql(source(), newChildren.get(0)); + return new Kql(source(), newChildren.get(0), queryBuilder()); } @Override protected NodeInfo info() { - return NodeInfo.create(this, Kql::new, query()); + return NodeInfo.create(this, Kql::new, query(), queryBuilder()); } + @Override + protected ExpressionTranslator translator() { + return new EsqlExpressionTranslators.KqlFunctionTranslator(); + } + + @Override + public Expression replaceQueryBuilder(QueryBuilder queryBuilder) { + return new Kql(source(), query(), queryBuilder); + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java index 2b9a7c73a5853..0b2268fe1b022 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java @@ -8,15 +8,18 @@ package org.elasticsearch.xpack.esql.expression.function.fulltext; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.xpack.esql.capabilities.Validatable; import org.elasticsearch.xpack.esql.common.Failure; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; +import org.elasticsearch.xpack.esql.core.planner.ExpressionTranslator; import org.elasticsearch.xpack.esql.core.querydsl.query.QueryStringQuery; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -27,6 +30,7 @@ import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.AbstractConvertFunction; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; +import org.elasticsearch.xpack.esql.planner.EsqlExpressionTranslators; import org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter; import java.io.IOException; @@ -109,7 +113,11 @@ public Match( description = "Value to find in the provided field." ) Expression matchQuery ) { - super(source, matchQuery, List.of(field, matchQuery)); + this(source, field, matchQuery, null); + } + + public Match(Source source, Expression field, Expression matchQuery, QueryBuilder queryBuilder) { + super(source, matchQuery, List.of(field, matchQuery), queryBuilder); this.field = field; } @@ -117,7 +125,11 @@ private static Match readFrom(StreamInput in) throws IOException { Source source = Source.readFrom((PlanStreamInput) in); Expression field = in.readNamedWriteable(Expression.class); Expression query = in.readNamedWriteable(Expression.class); - return new Match(source, field, query); + QueryBuilder queryBuilder = null; + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_QUERY_BUILDER_IN_SEARCH_FUNCTIONS)) { + queryBuilder = in.readOptionalNamedWriteable(QueryBuilder.class); + } + return new Match(source, field, query, queryBuilder); } @Override @@ -125,6 +137,9 @@ public void writeTo(StreamOutput out) throws IOException { source().writeTo(out); out.writeNamedWriteable(field()); out.writeNamedWriteable(query()); + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_QUERY_BUILDER_IN_SEARCH_FUNCTIONS)) { + out.writeOptionalNamedWriteable(queryBuilder()); + } } @Override @@ -224,12 +239,12 @@ public Object queryAsObject() { @Override public Expression replaceChildren(List newChildren) { - return new Match(source(), newChildren.get(0), newChildren.get(1)); + return new Match(source(), newChildren.get(0), newChildren.get(1), queryBuilder()); } @Override protected NodeInfo info() { - return NodeInfo.create(this, Match::new, field, query()); + return NodeInfo.create(this, Match::new, field, query(), queryBuilder()); } protected TypeResolutions.ParamOrdinal queryParamOrdinal() { @@ -245,6 +260,16 @@ public String functionType() { return isOperator() ? "operator" : super.functionType(); } + @Override + protected ExpressionTranslator translator() { + return new EsqlExpressionTranslators.MatchFunctionTranslator(); + } + + @Override + public Expression replaceQueryBuilder(QueryBuilder queryBuilder) { + return new Match(source(), field, query(), queryBuilder); + } + @Override public String functionName() { return isOperator() ? ":" : super.functionName(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/QueryString.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/QueryString.java index bd79661534b76..ea21411d09173 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/QueryString.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/QueryString.java @@ -7,10 +7,13 @@ package org.elasticsearch.xpack.esql.expression.function.fulltext; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.planner.ExpressionTranslator; import org.elasticsearch.xpack.esql.core.querydsl.query.QueryStringQuery; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -18,6 +21,7 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; +import org.elasticsearch.xpack.esql.planner.EsqlExpressionTranslators; import java.io.IOException; import java.util.List; @@ -27,7 +31,11 @@ */ public class QueryString extends FullTextFunction { - public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "QStr", QueryString::new); + public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( + Expression.class, + "QStr", + QueryString::readFrom + ); @FunctionInfo( returnType = "boolean", @@ -44,17 +52,30 @@ public QueryString( description = "Query string in Lucene query string format." ) Expression queryString ) { - super(source, queryString, List.of(queryString)); + super(source, queryString, List.of(queryString), null); } - private QueryString(StreamInput in) throws IOException { - this(Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class)); + public QueryString(Source source, Expression queryString, QueryBuilder queryBuilder) { + super(source, queryString, List.of(queryString), queryBuilder); + } + + private static QueryString readFrom(StreamInput in) throws IOException { + Source source = Source.readFrom((PlanStreamInput) in); + Expression query = in.readNamedWriteable(Expression.class); + QueryBuilder queryBuilder = null; + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_QUERY_BUILDER_IN_SEARCH_FUNCTIONS)) { + queryBuilder = in.readOptionalNamedWriteable(QueryBuilder.class); + } + return new QueryString(source, query, queryBuilder); } @Override public void writeTo(StreamOutput out) throws IOException { source().writeTo(out); out.writeNamedWriteable(query()); + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_QUERY_BUILDER_IN_SEARCH_FUNCTIONS)) { + out.writeOptionalNamedWriteable(queryBuilder()); + } } @Override @@ -69,12 +90,21 @@ public String functionName() { @Override public Expression replaceChildren(List newChildren) { - return new QueryString(source(), newChildren.get(0)); + return new QueryString(source(), newChildren.get(0), queryBuilder()); } @Override protected NodeInfo info() { - return NodeInfo.create(this, QueryString::new, query()); + return NodeInfo.create(this, QueryString::new, query(), queryBuilder()); } + @Override + protected ExpressionTranslator translator() { + return new EsqlExpressionTranslators.QueryStringFunctionTranslator(); + } + + @Override + public Expression replaceQueryBuilder(QueryBuilder queryBuilder) { + return new QueryString(source(), query(), queryBuilder); + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Term.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Term.java index 125a5b02b6e1c..ff8085cd1b44b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Term.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Term.java @@ -7,15 +7,18 @@ package org.elasticsearch.xpack.esql.expression.function.fulltext; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.xpack.esql.capabilities.Validatable; import org.elasticsearch.xpack.esql.common.Failure; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; +import org.elasticsearch.xpack.esql.core.planner.ExpressionTranslator; import org.elasticsearch.xpack.esql.core.querydsl.query.TermQuery; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -23,6 +26,7 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; +import org.elasticsearch.xpack.esql.planner.EsqlExpressionTranslators; import java.io.IOException; import java.util.List; @@ -56,7 +60,11 @@ public Term( description = "Term you wish to find in the provided field." ) Expression termQuery ) { - super(source, termQuery, List.of(field, termQuery)); + this(source, field, termQuery, null); + } + + public Term(Source source, Expression field, Expression termQuery, QueryBuilder queryBuilder) { + super(source, termQuery, List.of(field, termQuery), queryBuilder); this.field = field; } @@ -64,7 +72,11 @@ private static Term readFrom(StreamInput in) throws IOException { Source source = Source.readFrom((PlanStreamInput) in); Expression field = in.readNamedWriteable(Expression.class); Expression query = in.readNamedWriteable(Expression.class); - return new Term(source, field, query); + QueryBuilder queryBuilder = null; + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_QUERY_BUILDER_IN_SEARCH_FUNCTIONS)) { + queryBuilder = in.readOptionalNamedWriteable(QueryBuilder.class); + } + return new Term(source, field, query, queryBuilder); } @Override @@ -72,6 +84,9 @@ public void writeTo(StreamOutput out) throws IOException { source().writeTo(out); out.writeNamedWriteable(field()); out.writeNamedWriteable(query()); + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_QUERY_BUILDER_IN_SEARCH_FUNCTIONS)) { + out.writeOptionalNamedWriteable(queryBuilder()); + } } @Override @@ -101,18 +116,28 @@ public void validate(Failures failures) { @Override public Expression replaceChildren(List newChildren) { - return new Term(source(), newChildren.get(0), newChildren.get(1)); + return new Term(source(), newChildren.get(0), newChildren.get(1), queryBuilder()); } @Override protected NodeInfo info() { - return NodeInfo.create(this, Term::new, field, query()); + return NodeInfo.create(this, Term::new, field, query(), queryBuilder()); } protected TypeResolutions.ParamOrdinal queryParamOrdinal() { return SECOND; } + @Override + protected ExpressionTranslator translator() { + return new EsqlExpressionTranslators.TermFunctionTranslator(); + } + + @Override + public Expression replaceQueryBuilder(QueryBuilder queryBuilder) { + return new Term(source(), field, query(), queryBuilder); + } + public Expression field() { return field; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java index 7820f0f657f7f..43bbf9a5f4ff1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java @@ -16,6 +16,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; +import org.elasticsearch.xpack.esql.core.expression.TranslationAware; import org.elasticsearch.xpack.esql.core.expression.TypedAttribute; import org.elasticsearch.xpack.esql.core.expression.function.scalar.ScalarFunction; import org.elasticsearch.xpack.esql.core.expression.predicate.Range; @@ -100,7 +101,11 @@ public final class EsqlExpressionTranslators { ); public static Query toQuery(Expression e, TranslatorHandler handler) { + if (e instanceof TranslationAware ta) { + return ta.asQuery(handler); + } Query translation = null; + for (ExpressionTranslator translator : QUERY_TRANSLATORS) { translation = translator.translate(e, handler); if (translation != null) { From ccdea4a21cd1fe195e5005d81f0a3a49e1f91c67 Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Fri, 13 Dec 2024 13:41:46 +0100 Subject: [PATCH 14/32] ESQL: tests for LOOKUP JOIN with non-unique join keys (#118471) Add a csv dataset and tests for `LOOKUP JOIN` where the join keys are not unique. In particular, add tests that include MVs and nulls to see how `LOOKUP JOIN` treats these. --- .../xpack/esql/CsvTestsDataLoader.java | 3 + .../resources/languages_non_unique_key.csv | 10 +++ .../src/main/resources/lookup-join.csv-spec | 78 ++++++++++++++++++- 3 files changed, 90 insertions(+), 1 deletion(-) create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/languages_non_unique_key.csv diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java index dbeb54996733a..3b656ded94dd7 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java @@ -63,6 +63,8 @@ public class CsvTestsDataLoader { private static final TestsDataset LANGUAGES = new TestsDataset("languages"); private static final TestsDataset LANGUAGES_LOOKUP = LANGUAGES.withIndex("languages_lookup") .withSetting("languages_lookup-settings.json"); + private static final TestsDataset LANGUAGES_LOOKUP_NON_UNIQUE_KEY = LANGUAGES_LOOKUP.withIndex("languages_lookup_non_unique_key") + .withData("languages_non_unique_key.csv"); private static final TestsDataset ALERTS = new TestsDataset("alerts"); private static final TestsDataset UL_LOGS = new TestsDataset("ul_logs"); private static final TestsDataset SAMPLE_DATA = new TestsDataset("sample_data"); @@ -114,6 +116,7 @@ public class CsvTestsDataLoader { Map.entry(APPS_SHORT.indexName, APPS_SHORT), Map.entry(LANGUAGES.indexName, LANGUAGES), Map.entry(LANGUAGES_LOOKUP.indexName, LANGUAGES_LOOKUP), + Map.entry(LANGUAGES_LOOKUP_NON_UNIQUE_KEY.indexName, LANGUAGES_LOOKUP_NON_UNIQUE_KEY), Map.entry(UL_LOGS.indexName, UL_LOGS), Map.entry(SAMPLE_DATA.indexName, SAMPLE_DATA), Map.entry(MV_SAMPLE_DATA.indexName, MV_SAMPLE_DATA), diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/languages_non_unique_key.csv b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/languages_non_unique_key.csv new file mode 100644 index 0000000000000..1578762f8d1cb --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/languages_non_unique_key.csv @@ -0,0 +1,10 @@ +language_code:integer,language_name:keyword,country:keyword +1,English,Canada +1,English, +1,,United Kingdom +1,English,United States of America +2,German,[Germany,Austria] +2,German,Switzerland +2,German, +4,Quenya, +5,,Atlantis diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec index 12e333c0ed9f2..f6704d33934af 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec @@ -3,7 +3,6 @@ // Reuses the sample dataset and commands from enrich.csv-spec // -//TODO: this sometimes returns null instead of the looked up value (likely related to the execution order) basicOnTheDataNode required_capability: join_lookup_v5 @@ -102,6 +101,83 @@ emp_no:integer | language_code:integer | language_name:keyword 10003 | 4 | German ; +nonUniqueLeftKeyOnTheDataNode +required_capability: join_lookup_v5 + +FROM employees +| WHERE emp_no <= 10030 +| EVAL language_code = emp_no % 10 +| WHERE language_code < 3 +| LOOKUP JOIN languages_lookup ON language_code +| SORT emp_no +| KEEP emp_no, language_code, language_name +; + +emp_no:integer | language_code:integer | language_name:keyword +10001 |1 | English +10002 |2 | French +10010 |0 | null +10011 |1 | English +10012 |2 | French +10020 |0 | null +10021 |1 | English +10022 |2 | French +10030 |0 | null +; + +nonUniqueRightKeyOnTheDataNode +required_capability: join_lookup_v5 + +FROM employees +| EVAL language_code = emp_no % 10 +| LOOKUP JOIN languages_lookup_non_unique_key ON language_code +| WHERE emp_no > 10090 AND emp_no < 10096 +| SORT emp_no +| EVAL country = MV_SORT(country) +| KEEP emp_no, language_code, language_name, country +; + +emp_no:integer | language_code:integer | language_name:keyword | country:keyword +10091 | 1 | [English, English, English] | [Canada, United Kingdom, United States of America] +10092 | 2 | [German, German, German] | [Austria, Germany, Switzerland] +10093 | 3 | null | null +10094 | 4 | Quenya | null +10095 | 5 | null | Atlantis +; + +nonUniqueRightKeyOnTheCoordinator +required_capability: join_lookup_v5 + +FROM employees +| SORT emp_no +| LIMIT 5 +| EVAL language_code = emp_no % 10 +| LOOKUP JOIN languages_lookup_non_unique_key ON language_code +| EVAL country = MV_SORT(country) +| KEEP emp_no, language_code, language_name, country +; + +emp_no:integer | language_code:integer | language_name:keyword | country:keyword +10001 | 1 | [English, English, English] | [Canada, United Kingdom, United States of America] +10002 | 2 | [German, German, German] | [Austria, Germany, Switzerland] +10003 | 3 | null | null +10004 | 4 | Quenya | null +10005 | 5 | null | Atlantis +; + +nonUniqueRightKeyFromRow +required_capability: join_lookup_v5 + +ROW language_code = 2 +| LOOKUP JOIN languages_lookup_non_unique_key ON language_code +| DROP country.keyword +| EVAL country = MV_SORT(country) +; + +language_code:integer | language_name:keyword | country:keyword +2 | [German, German, German] | [Austria, Germany, Switzerland] +; + lookupIPFromRow required_capability: join_lookup_v5 From 71f98221cd8e0813e24d04f68cb03f1cc7e3f639 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Fri, 13 Dec 2024 14:09:06 +0100 Subject: [PATCH 15/32] Fix bug in InferenceUpgradeTestCase.getConfigsWithBreakingChangeHandling (#118624) We need to load the two fields from the same response. Otherwise, we can have a sort of race where we load "endpoints" from pre-8.15 as empty and then load "models" from a post-8.15 node also empty, resulting in an empty list because we took the wrong info from either response. closes #118163 --- .../xpack/application/InferenceUpgradeTestCase.java | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/InferenceUpgradeTestCase.java b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/InferenceUpgradeTestCase.java index 58335eb53b366..d38503a884092 100644 --- a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/InferenceUpgradeTestCase.java +++ b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/InferenceUpgradeTestCase.java @@ -19,7 +19,6 @@ import java.util.LinkedList; import java.util.List; import java.util.Map; -import java.util.Objects; import static org.elasticsearch.core.Strings.format; @@ -112,13 +111,10 @@ protected void put(String inferenceId, String modelConfig, TaskType taskType) th @SuppressWarnings("unchecked") // in version 8.15, there was a breaking change where "models" was renamed to "endpoints" LinkedList> getConfigsWithBreakingChangeHandling(TaskType testTaskType, String oldClusterId) throws IOException { - + var response = get(testTaskType, oldClusterId); LinkedList> configs; - configs = new LinkedList<>( - (List>) Objects.requireNonNullElse((get(testTaskType, oldClusterId).get("endpoints")), List.of()) - ); - configs.addAll(Objects.requireNonNullElse((List>) get(testTaskType, oldClusterId).get("models"), List.of())); - + configs = new LinkedList<>((List>) response.getOrDefault("endpoints", List.of())); + configs.addAll((List>) response.getOrDefault("models", List.of())); return configs; } } From 21b7afd6692105fc8f79b9c02ef4999c6e9a0b18 Mon Sep 17 00:00:00 2001 From: "elastic-renovate-prod[bot]" <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> Date: Fri, 13 Dec 2024 13:13:52 +0000 Subject: [PATCH 16/32] Update docker.elastic.co/wolfi/chainguard-base:latest Docker digest to 1b51ff6 (#117902) Co-authored-by: elastic-renovate-prod[bot] <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> Co-authored-by: Rene Groeschke --- .../main/java/org/elasticsearch/gradle/internal/DockerBase.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java index 0d7bcea168df8..d54eb798ce783 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java @@ -22,7 +22,7 @@ public enum DockerBase { // Chainguard based wolfi image with latest jdk // This is usually updated via renovatebot // spotless:off - WOLFI("docker.elastic.co/wolfi/chainguard-base:latest@sha256:32f06b169bb4b0f257fbb10e8c8379f06d3ee1355c89b3327cb623781a29590e", + WOLFI("docker.elastic.co/wolfi/chainguard-base:latest@sha256:1b51ff6dba78c98d3e02b0cd64a8ce3238c7a40408d21e3af12a329d44db6f23", "-wolfi", "apk" ), From 528593b55f26005fec910bfb89f9468ec7798d1f Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Fri, 13 Dec 2024 15:00:29 +0100 Subject: [PATCH 17/32] [DOCS] Link to Elastic Rerank model landing page (#118574) - Add link to Python notebook - Fix heading level --- docs/reference/inference/service-elasticsearch.asciidoc | 7 ++++++- docs/reference/search/retriever.asciidoc | 7 ++++++- .../search/search-your-data/retrievers-examples.asciidoc | 3 +-- 3 files changed, 13 insertions(+), 4 deletions(-) diff --git a/docs/reference/inference/service-elasticsearch.asciidoc b/docs/reference/inference/service-elasticsearch.asciidoc index cd06e6d7b2f64..bf7e2976bbe63 100644 --- a/docs/reference/inference/service-elasticsearch.asciidoc +++ b/docs/reference/inference/service-elasticsearch.asciidoc @@ -153,7 +153,12 @@ For further details, refer to the {ml-docs}/ml-nlp-elser.html[ELSER model docume [[inference-example-elastic-reranker]] ==== Elastic Rerank via the `elasticsearch` service -The following example shows how to create an {infer} endpoint called `my-elastic-rerank` to perform a `rerank` task type using the built-in Elastic Rerank cross-encoder model. +The following example shows how to create an {infer} endpoint called `my-elastic-rerank` to perform a `rerank` task type using the built-in {ml-docs}/ml-nlp-rerank.html[Elastic Rerank] cross-encoder model. + +[TIP] +==== +Refer to this https://github.com/elastic/elasticsearch-labs/blob/main/notebooks/search/12-semantic-reranking-elastic-rerank.ipynb[Python notebook] for an end-to-end example using Elastic Rerank. +==== The API request below will automatically download the Elastic Rerank model if it isn't already downloaded and then deploy the model. Once deployed, the model can be used for semantic re-ranking with a <>. diff --git a/docs/reference/search/retriever.asciidoc b/docs/reference/search/retriever.asciidoc index cb04d4fb6fbf1..f20e9148bf5e7 100644 --- a/docs/reference/search/retriever.asciidoc +++ b/docs/reference/search/retriever.asciidoc @@ -442,7 +442,12 @@ If the child retriever already specifies any filters, then this top-level filter [[text-similarity-reranker-retriever-example-elastic-rerank]] ==== Example: Elastic Rerank -This examples demonstrates how to deploy the Elastic Rerank model and use it to re-rank search results using the `text_similarity_reranker` retriever. +[TIP] +==== +Refer to this https://github.com/elastic/elasticsearch-labs/blob/main/notebooks/search/12-semantic-reranking-elastic-rerank.ipynb[Python notebook] for an end-to-end example using Elastic Rerank. +==== + +This example demonstrates how to deploy the {ml-docs}/ml-nlp-rerank.html[Elastic Rerank] model and use it to re-rank search results using the `text_similarity_reranker` retriever. Follow these steps: diff --git a/docs/reference/search/search-your-data/retrievers-examples.asciidoc b/docs/reference/search/search-your-data/retrievers-examples.asciidoc index ad1cc32dcee01..5cada8960aeab 100644 --- a/docs/reference/search/search-your-data/retrievers-examples.asciidoc +++ b/docs/reference/search/search-your-data/retrievers-examples.asciidoc @@ -1,9 +1,8 @@ [[retrievers-examples]] +=== Retrievers examples Learn how to combine different retrievers in these hands-on examples. -=== Retrievers examples - [discrete] [[retrievers-examples-setup]] ==== Add example data From 1e26791515184a965444ea45a300691d17cffb8d Mon Sep 17 00:00:00 2001 From: Mark Tozzi Date: Fri, 13 Dec 2024 09:25:52 -0500 Subject: [PATCH 18/32] Esql bucket function for date nanos (#118474) This adds support for running the bucket function over a date nanos field. Code wise, this just delegates to DateTrunc, which already supports date nanos, so most of the PR is just tests and the auto-generated docs. Resolves #118031 --- docs/changelog/118474.yaml | 6 + .../functions/kibana/definition/bucket.json | 306 ++++++++++++++++++ .../esql/functions/types/bucket.asciidoc | 11 + .../src/main/resources/date_nanos.csv-spec | 74 +++++ .../xpack/esql/action/EsqlCapabilities.java | 5 + .../expression/function/grouping/Bucket.java | 12 +- .../function/grouping/BucketTests.java | 91 +++++- 7 files changed, 498 insertions(+), 7 deletions(-) create mode 100644 docs/changelog/118474.yaml diff --git a/docs/changelog/118474.yaml b/docs/changelog/118474.yaml new file mode 100644 index 0000000000000..1b0c6942eb323 --- /dev/null +++ b/docs/changelog/118474.yaml @@ -0,0 +1,6 @@ +pr: 118474 +summary: Esql bucket function for date nanos +area: ES|QL +type: enhancement +issues: + - 118031 diff --git a/docs/reference/esql/functions/kibana/definition/bucket.json b/docs/reference/esql/functions/kibana/definition/bucket.json index 660e1be49fda9..18802f5ff8fef 100644 --- a/docs/reference/esql/functions/kibana/definition/bucket.json +++ b/docs/reference/esql/functions/kibana/definition/bucket.json @@ -310,6 +310,312 @@ "variadic" : false, "returnType" : "date" }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Numeric or date expression from which to derive buckets." + }, + { + "name" : "buckets", + "type" : "date_period", + "optional" : false, + "description" : "Target number of buckets, or desired bucket size if `from` and `to` parameters are omitted." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Numeric or date expression from which to derive buckets." + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "Target number of buckets, or desired bucket size if `from` and `to` parameters are omitted." + }, + { + "name" : "from", + "type" : "date", + "optional" : true, + "description" : "Start of the range. Can be a number, a date or a date expressed as a string." + }, + { + "name" : "to", + "type" : "date", + "optional" : true, + "description" : "End of the range. Can be a number, a date or a date expressed as a string." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Numeric or date expression from which to derive buckets." + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "Target number of buckets, or desired bucket size if `from` and `to` parameters are omitted." + }, + { + "name" : "from", + "type" : "date", + "optional" : true, + "description" : "Start of the range. Can be a number, a date or a date expressed as a string." + }, + { + "name" : "to", + "type" : "keyword", + "optional" : true, + "description" : "End of the range. Can be a number, a date or a date expressed as a string." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Numeric or date expression from which to derive buckets." + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "Target number of buckets, or desired bucket size if `from` and `to` parameters are omitted." + }, + { + "name" : "from", + "type" : "date", + "optional" : true, + "description" : "Start of the range. Can be a number, a date or a date expressed as a string." + }, + { + "name" : "to", + "type" : "text", + "optional" : true, + "description" : "End of the range. Can be a number, a date or a date expressed as a string." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Numeric or date expression from which to derive buckets." + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "Target number of buckets, or desired bucket size if `from` and `to` parameters are omitted." + }, + { + "name" : "from", + "type" : "keyword", + "optional" : true, + "description" : "Start of the range. Can be a number, a date or a date expressed as a string." + }, + { + "name" : "to", + "type" : "date", + "optional" : true, + "description" : "End of the range. Can be a number, a date or a date expressed as a string." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Numeric or date expression from which to derive buckets." + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "Target number of buckets, or desired bucket size if `from` and `to` parameters are omitted." + }, + { + "name" : "from", + "type" : "keyword", + "optional" : true, + "description" : "Start of the range. Can be a number, a date or a date expressed as a string." + }, + { + "name" : "to", + "type" : "keyword", + "optional" : true, + "description" : "End of the range. Can be a number, a date or a date expressed as a string." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Numeric or date expression from which to derive buckets." + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "Target number of buckets, or desired bucket size if `from` and `to` parameters are omitted." + }, + { + "name" : "from", + "type" : "keyword", + "optional" : true, + "description" : "Start of the range. Can be a number, a date or a date expressed as a string." + }, + { + "name" : "to", + "type" : "text", + "optional" : true, + "description" : "End of the range. Can be a number, a date or a date expressed as a string." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Numeric or date expression from which to derive buckets." + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "Target number of buckets, or desired bucket size if `from` and `to` parameters are omitted." + }, + { + "name" : "from", + "type" : "text", + "optional" : true, + "description" : "Start of the range. Can be a number, a date or a date expressed as a string." + }, + { + "name" : "to", + "type" : "date", + "optional" : true, + "description" : "End of the range. Can be a number, a date or a date expressed as a string." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Numeric or date expression from which to derive buckets." + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "Target number of buckets, or desired bucket size if `from` and `to` parameters are omitted." + }, + { + "name" : "from", + "type" : "text", + "optional" : true, + "description" : "Start of the range. Can be a number, a date or a date expressed as a string." + }, + { + "name" : "to", + "type" : "keyword", + "optional" : true, + "description" : "End of the range. Can be a number, a date or a date expressed as a string." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Numeric or date expression from which to derive buckets." + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "Target number of buckets, or desired bucket size if `from` and `to` parameters are omitted." + }, + { + "name" : "from", + "type" : "text", + "optional" : true, + "description" : "Start of the range. Can be a number, a date or a date expressed as a string." + }, + { + "name" : "to", + "type" : "text", + "optional" : true, + "description" : "End of the range. Can be a number, a date or a date expressed as a string." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Numeric or date expression from which to derive buckets." + }, + { + "name" : "buckets", + "type" : "time_duration", + "optional" : false, + "description" : "Target number of buckets, or desired bucket size if `from` and `to` parameters are omitted." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/types/bucket.asciidoc b/docs/reference/esql/functions/types/bucket.asciidoc index 172e84b6f7860..2e6985e6bc4ed 100644 --- a/docs/reference/esql/functions/types/bucket.asciidoc +++ b/docs/reference/esql/functions/types/bucket.asciidoc @@ -16,6 +16,17 @@ date | integer | text | date | date date | integer | text | keyword | date date | integer | text | text | date date | time_duration | | | date +date_nanos | date_period | | | date_nanos +date_nanos | integer | date | date | date_nanos +date_nanos | integer | date | keyword | date_nanos +date_nanos | integer | date | text | date_nanos +date_nanos | integer | keyword | date | date_nanos +date_nanos | integer | keyword | keyword | date_nanos +date_nanos | integer | keyword | text | date_nanos +date_nanos | integer | text | date | date_nanos +date_nanos | integer | text | keyword | date_nanos +date_nanos | integer | text | text | date_nanos +date_nanos | time_duration | | | date_nanos double | double | | | double double | integer | double | double | double double | integer | double | integer | double diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec index 0d113c0422562..f4b5c98d596ae 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec @@ -549,6 +549,80 @@ yr:date_nanos | mo:date_nanos | mn:date_nanos 2023-01-01T00:00:00.000000000Z | 2023-10-01T00:00:00.000000000Z | 2023-10-23T12:10:00.000000000Z | 2023-10-23T12:15:03.360000000Z ; +Bucket Date nanos by Year +required_capability: date_trunc_date_nanos +required_capability: date_nanos_bucket + +FROM date_nanos +| WHERE millis > "2020-01-01" +| STATS ct = count(*) BY yr = BUCKET(nanos, 1 year); + +ct:long | yr:date_nanos +8 | 2023-01-01T00:00:00.000000000Z +; + +Bucket Date nanos by Year, range version +required_capability: date_trunc_date_nanos +required_capability: date_nanos_bucket + +FROM date_nanos +| WHERE millis > "2020-01-01" +| STATS ct = count(*) BY yr = BUCKET(nanos, 5, "1999-01-01", NOW()); + +ct:long | yr:date_nanos +8 | 2023-01-01T00:00:00.000000000Z +; + +Bucket Date nanos by Month +required_capability: date_trunc_date_nanos +required_capability: date_nanos_bucket + +FROM date_nanos +| WHERE millis > "2020-01-01" +| STATS ct = count(*) BY mo = BUCKET(nanos, 1 month); + +ct:long | mo:date_nanos +8 | 2023-10-01T00:00:00.000000000Z +; + +Bucket Date nanos by Month, range version +required_capability: date_trunc_date_nanos +required_capability: date_nanos_bucket + +FROM date_nanos +| WHERE millis > "2020-01-01" +| STATS ct = count(*) BY mo = BUCKET(nanos, 20, "2023-01-01", "2023-12-31"); + +ct:long | mo:date_nanos +8 | 2023-10-01T00:00:00.000000000Z +; + +Bucket Date nanos by Week, range version +required_capability: date_trunc_date_nanos +required_capability: date_nanos_bucket + +FROM date_nanos +| WHERE millis > "2020-01-01" +| STATS ct = count(*) BY mo = BUCKET(nanos, 55, "2023-01-01", "2023-12-31"); + +ct:long | mo:date_nanos +8 | 2023-10-23T00:00:00.000000000Z +; +Bucket Date nanos by 10 minutes +required_capability: date_trunc_date_nanos +required_capability: date_nanos_bucket + +FROM date_nanos +| WHERE millis > "2020-01-01" +| STATS ct = count(*) BY mn = BUCKET(nanos, 10 minutes); + +ct:long | mn:date_nanos +4 | 2023-10-23T13:50:00.000000000Z +1 | 2023-10-23T13:30:00.000000000Z +1 | 2023-10-23T12:20:00.000000000Z +2 | 2023-10-23T12:10:00.000000000Z +; + Add date nanos required_capability: date_nanos_add_subtract diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 4cf3162fcca3b..649ec1eba9785 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -367,6 +367,11 @@ public enum Cap { */ DATE_TRUNC_DATE_NANOS(), + /** + * Support date nanos values as the field argument to bucket + */ + DATE_NANOS_BUCKET(), + /** * support aggregations on date nanos */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java index 9e40b85fd6590..347d542f5212d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java @@ -90,7 +90,7 @@ public class Bucket extends GroupingFunction implements Validatable, TwoOptional private final Expression to; @FunctionInfo( - returnType = { "double", "date" }, + returnType = { "double", "date", "date_nanos" }, description = """ Creates groups of values - buckets - out of a datetime or numeric input. The size of the buckets can either be provided directly, or chosen based on a recommended count and values range.""", @@ -169,7 +169,7 @@ public Bucket( Source source, @Param( name = "field", - type = { "integer", "long", "double", "date" }, + type = { "integer", "long", "double", "date", "date_nanos" }, description = "Numeric or date expression from which to derive buckets." ) Expression field, @Param( @@ -241,7 +241,7 @@ public boolean foldable() { @Override public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { - if (field.dataType() == DataType.DATETIME) { + if (field.dataType() == DataType.DATETIME || field.dataType() == DataType.DATE_NANOS) { Rounding.Prepared preparedRounding; if (buckets.dataType().isWholeNumber()) { int b = ((Number) buckets.fold()).intValue(); @@ -314,8 +314,8 @@ private double pickRounding(int buckets, double from, double to) { } // supported parameter type combinations (1st, 2nd, 3rd, 4th): - // datetime, integer, string/datetime, string/datetime - // datetime, rounding/duration, -, - + // datetime/date_nanos, integer, string/datetime, string/datetime + // datetime/date_nanos, rounding/duration, -, - // numeric, integer, numeric, numeric // numeric, numeric, -, - @Override @@ -329,7 +329,7 @@ protected TypeResolution resolveType() { return TypeResolution.TYPE_RESOLVED; } - if (fieldType == DataType.DATETIME) { + if (fieldType == DataType.DATETIME || fieldType == DataType.DATE_NANOS) { TypeResolution resolution = isType( buckets, dt -> dt.isWholeNumber() || DataType.isTemporalAmount(dt), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/grouping/BucketTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/grouping/BucketTests.java index 7e7d91cdf76f4..f01b06c23e8a8 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/grouping/BucketTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/grouping/BucketTests.java @@ -12,15 +12,19 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Rounding; +import org.elasticsearch.common.time.DateUtils; import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.logging.LogManager; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.hamcrest.Matcher; +import org.hamcrest.Matchers; import java.time.Duration; +import java.time.Instant; import java.time.Period; import java.util.ArrayList; import java.util.List; @@ -38,6 +42,7 @@ public BucketTests(@Name("TestCase") Supplier testCas public static Iterable parameters() { List suppliers = new ArrayList<>(); dateCases(suppliers, "fixed date", () -> DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2023-02-17T09:00:00.00Z")); + dateNanosCases(suppliers, "fixed date nanos", () -> DateUtils.toLong(Instant.parse("2023-02-17T09:00:00.00Z"))); dateCasesWithSpan( suppliers, "fixed date with period", @@ -54,6 +59,22 @@ public static Iterable parameters() { Duration.ofDays(1L), "[86400000 in Z][fixed]" ); + dateNanosCasesWithSpan( + suppliers, + "fixed date nanos with period", + () -> DateUtils.toLong(Instant.parse("2023-01-01T00:00:00.00Z")), + DataType.DATE_PERIOD, + Period.ofYears(1), + "[YEAR_OF_CENTURY in Z][fixed to midnight]" + ); + dateNanosCasesWithSpan( + suppliers, + "fixed date nanos with duration", + () -> DateUtils.toLong(Instant.parse("2023-02-17T09:00:00.00Z")), + DataType.TIME_DURATION, + Duration.ofDays(1L), + "[86400000 in Z][fixed]" + ); numberCases(suppliers, "fixed long", DataType.LONG, () -> 100L); numberCasesWithSpan(suppliers, "fixed long with span", DataType.LONG, () -> 100L); numberCases(suppliers, "fixed int", DataType.INTEGER, () -> 100); @@ -142,6 +163,62 @@ private static void dateCasesWithSpan( })); } + private static void dateNanosCasesWithSpan( + List suppliers, + String name, + LongSupplier date, + DataType spanType, + Object span, + String spanStr + ) { + suppliers.add(new TestCaseSupplier(name, List.of(DataType.DATE_NANOS, spanType), () -> { + List args = new ArrayList<>(); + args.add(new TestCaseSupplier.TypedData(date.getAsLong(), DataType.DATE_NANOS, "field")); + args.add(new TestCaseSupplier.TypedData(span, spanType, "buckets").forceLiteral()); + return new TestCaseSupplier.TestCase( + args, + Matchers.startsWith("DateTruncDateNanosEvaluator[fieldVal=Attribute[channel=0], rounding=Rounding["), + DataType.DATE_NANOS, + resultsMatcher(args) + ); + })); + } + + private static void dateNanosCases(List suppliers, String name, LongSupplier date) { + for (DataType fromType : DATE_BOUNDS_TYPE) { + for (DataType toType : DATE_BOUNDS_TYPE) { + suppliers.add(new TestCaseSupplier(name, List.of(DataType.DATE_NANOS, DataType.INTEGER, fromType, toType), () -> { + List args = new ArrayList<>(); + args.add(new TestCaseSupplier.TypedData(date.getAsLong(), DataType.DATE_NANOS, "field")); + // TODO more "from" and "to" and "buckets" + args.add(new TestCaseSupplier.TypedData(50, DataType.INTEGER, "buckets").forceLiteral()); + args.add(dateBound("from", fromType, "2023-02-01T00:00:00.00Z")); + args.add(dateBound("to", toType, "2023-03-01T09:00:00.00Z")); + return new TestCaseSupplier.TestCase( + args, + Matchers.startsWith("DateTruncDateNanosEvaluator[fieldVal=Attribute[channel=0], rounding=Rounding["), + DataType.DATE_NANOS, + resultsMatcher(args) + ); + })); + // same as above, but a low bucket count and datetime bounds that match it (at hour span) + suppliers.add(new TestCaseSupplier(name, List.of(DataType.DATE_NANOS, DataType.INTEGER, fromType, toType), () -> { + List args = new ArrayList<>(); + args.add(new TestCaseSupplier.TypedData(date.getAsLong(), DataType.DATE_NANOS, "field")); + args.add(new TestCaseSupplier.TypedData(4, DataType.INTEGER, "buckets").forceLiteral()); + args.add(dateBound("from", fromType, "2023-02-17T09:00:00Z")); + args.add(dateBound("to", toType, "2023-02-17T12:00:00Z")); + return new TestCaseSupplier.TestCase( + args, + Matchers.startsWith("DateTruncDateNanosEvaluator[fieldVal=Attribute[channel=0], rounding=Rounding["), + DataType.DATE_NANOS, + equalTo(Rounding.builder(Rounding.DateTimeUnit.HOUR_OF_DAY).build().prepareForUnknown().round(date.getAsLong())) + ); + })); + } + } + } + private static final DataType[] NUMBER_BOUNDS_TYPES = new DataType[] { DataType.INTEGER, DataType.LONG, DataType.DOUBLE }; private static void numberCases(List suppliers, String name, DataType numberType, Supplier number) { @@ -221,7 +298,19 @@ private static TestCaseSupplier.TypedData keywordDateLiteral(String name, DataTy private static Matcher resultsMatcher(List typedData) { if (typedData.get(0).type() == DataType.DATETIME) { long millis = ((Number) typedData.get(0).data()).longValue(); - return equalTo(Rounding.builder(Rounding.DateTimeUnit.DAY_OF_MONTH).build().prepareForUnknown().round(millis)); + long expected = Rounding.builder(Rounding.DateTimeUnit.DAY_OF_MONTH).build().prepareForUnknown().round(millis); + LogManager.getLogger(getTestClass()).info("Expected: " + Instant.ofEpochMilli(expected)); + LogManager.getLogger(getTestClass()).info("Input: " + Instant.ofEpochMilli(millis)); + return equalTo(expected); + } + if (typedData.get(0).type() == DataType.DATE_NANOS) { + long nanos = ((Number) typedData.get(0).data()).longValue(); + long expected = DateUtils.toNanoSeconds( + Rounding.builder(Rounding.DateTimeUnit.DAY_OF_MONTH).build().prepareForUnknown().round(DateUtils.toMilliSeconds(nanos)) + ); + LogManager.getLogger(getTestClass()).info("Expected: " + DateUtils.toInstant(expected)); + LogManager.getLogger(getTestClass()).info("Input: " + DateUtils.toInstant(nanos)); + return equalTo(expected); } return equalTo(((Number) typedData.get(0).data()).doubleValue()); } From a583a38518f321d6092637e66151fc0bd1fe4d1d Mon Sep 17 00:00:00 2001 From: Tommaso Teofili Date: Fri, 13 Dec 2024 15:38:52 +0100 Subject: [PATCH 19/32] fix typo in muted CSV test for scoring in ES|QL (#118665) --- muted-tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/muted-tests.yml b/muted-tests.yml index d5b933b96d73b..36dfc306b0147 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -174,7 +174,7 @@ tests: - class: "org.elasticsearch.xpack.esql.qa.mixed.MixedClusterEsqlSpecIT" method: "test {scoring.*}" issue: https://github.com/elastic/elasticsearch/issues/117641 -- class: "org.elasticsearch.xpack.esql.qa.mixed.MultilusterEsqlSpecIT" +- class: "org.elasticsearch.xpack.esql.qa.mixed.MultiClusterEsqlSpecIT" method: "test {scoring.*}" issue: https://github.com/elastic/elasticsearch/issues/118460 - class: "org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT" From 44a231acf2471f39497b7740ddc349306c97cd6a Mon Sep 17 00:00:00 2001 From: Max Hniebergall <137079448+maxhniebergall@users.noreply.github.com> Date: Fri, 13 Dec 2024 09:53:33 -0500 Subject: [PATCH 20/32] [Inference API] Replace ElasticsearchTimeoutException with ElasticsearchStatusException (#118618) * Replace ElasticsearchTimeoutException with ElasticsearchTimeoutException with 408 status to avoid cuasing 503s --- .../xpack/inference/external/http/sender/RequestTask.java | 8 ++++++-- .../external/http/sender/HttpRequestSenderTests.java | 8 +++++--- .../external/http/sender/RequestExecutorServiceTests.java | 5 +++-- .../inference/external/http/sender/RequestTaskTests.java | 5 +++-- 4 files changed, 17 insertions(+), 9 deletions(-) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTask.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTask.java index 9ccb93a0858ae..e5c29adeb9176 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTask.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTask.java @@ -7,13 +7,14 @@ package org.elasticsearch.xpack.inference.external.http.sender; -import org.elasticsearch.ElasticsearchTimeoutException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ListenerTimeouts; import org.elasticsearch.common.Strings; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.threadpool.ThreadPool; import java.util.Objects; @@ -64,7 +65,10 @@ private ActionListener getListener( threadPool.executor(UTILITY_THREAD_POOL_NAME), notificationListener, (ignored) -> notificationListener.onFailure( - new ElasticsearchTimeoutException(Strings.format("Request timed out waiting to be sent after [%s]", timeout)) + new ElasticsearchStatusException( + Strings.format("Request timed out waiting to be sent after [%s]", timeout), + RestStatus.REQUEST_TIMEOUT + ) ) ); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderTests.java index 79f6aa8164b75..b3e7db6009204 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderTests.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.inference.external.http.sender; import org.apache.http.HttpHeaders; -import org.elasticsearch.ElasticsearchTimeoutException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -162,12 +162,13 @@ public void testHttpRequestSender_Throws_WhenATimeoutOccurs() throws Exception { PlainActionFuture listener = new PlainActionFuture<>(); sender.send(RequestManagerTests.createMock(), new DocumentsOnlyInput(List.of()), TimeValue.timeValueNanos(1), listener); - var thrownException = expectThrows(ElasticsearchTimeoutException.class, () -> listener.actionGet(TIMEOUT)); + var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); assertThat( thrownException.getMessage(), is(format("Request timed out waiting to be sent after [%s]", TimeValue.timeValueNanos(1))) ); + assertThat(thrownException.status().getStatus(), is(408)); } } @@ -187,12 +188,13 @@ public void testHttpRequestSenderWithTimeout_Throws_WhenATimeoutOccurs() throws PlainActionFuture listener = new PlainActionFuture<>(); sender.send(RequestManagerTests.createMock(), new DocumentsOnlyInput(List.of()), TimeValue.timeValueNanos(1), listener); - var thrownException = expectThrows(ElasticsearchTimeoutException.class, () -> listener.actionGet(TIMEOUT)); + var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); assertThat( thrownException.getMessage(), is(format("Request timed out waiting to be sent after [%s]", TimeValue.timeValueNanos(1))) ); + assertThat(thrownException.status().getStatus(), is(408)); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceTests.java index e09e4968571e5..7e29fad56812d 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceTests.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.inference.external.http.sender; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.ElasticsearchTimeoutException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.Strings; @@ -238,12 +238,13 @@ public void testExecute_CallsOnFailure_WhenRequestTimesOut() { var listener = new PlainActionFuture(); service.execute(RequestManagerTests.createMock(), new DocumentsOnlyInput(List.of()), TimeValue.timeValueNanos(1), listener); - var thrownException = expectThrows(ElasticsearchTimeoutException.class, () -> listener.actionGet(TIMEOUT)); + var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); assertThat( thrownException.getMessage(), is(format("Request timed out waiting to be sent after [%s]", TimeValue.timeValueNanos(1))) ); + assertThat(thrownException.status().getStatus(), is(408)); } public void testExecute_PreservesThreadContext() throws InterruptedException, ExecutionException, TimeoutException { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTaskTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTaskTests.java index c839c266e9320..e37a1a213569e 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTaskTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTaskTests.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.inference.external.http.sender; -import org.elasticsearch.ElasticsearchTimeoutException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.core.TimeValue; @@ -86,13 +86,14 @@ public void testRequest_ReturnsTimeoutException() { listener ); - var thrownException = expectThrows(ElasticsearchTimeoutException.class, () -> listener.actionGet(TIMEOUT)); + var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); assertThat( thrownException.getMessage(), is(format("Request timed out waiting to be sent after [%s]", TimeValue.timeValueMillis(1))) ); assertTrue(requestTask.hasCompleted()); assertTrue(requestTask.getRequestCompletedFunction().get()); + assertThat(thrownException.status().getStatus(), is(408)); } public void testRequest_DoesNotCallOnFailureTwiceWhenTimingOut() throws Exception { From 1bad1cf6b2256f594361059a8090d2707aa54001 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lorenzo=20Dematt=C3=A9?= Date: Fri, 13 Dec 2024 16:21:42 +0100 Subject: [PATCH 21/32] Implementing the correct exit functions (Runtime) (#118657) --- .../entitlement/bridge/EntitlementChecker.java | 6 +++++- .../qa/common/RestEntitlementsCheckAction.java | 17 ++++++++++++----- .../api/ElasticsearchEntitlementChecker.java | 7 ++++++- .../Java23ElasticsearchEntitlementChecker.java | 4 ++-- 4 files changed, 25 insertions(+), 9 deletions(-) diff --git a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java index ad0f14bcf4478..a6b8a31fc3894 100644 --- a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java +++ b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java @@ -13,7 +13,11 @@ import java.net.URLStreamHandlerFactory; public interface EntitlementChecker { - void check$java_lang_System$exit(Class callerClass, int status); + + // Exit the JVM process + void check$$exit(Class callerClass, Runtime runtime, int status); + + void check$$halt(Class callerClass, Runtime runtime, int status); // URLClassLoader ctor void check$java_net_URLClassLoader$(Class callerClass, URL[] urls); diff --git a/libs/entitlement/qa/common/src/main/java/org/elasticsearch/entitlement/qa/common/RestEntitlementsCheckAction.java b/libs/entitlement/qa/common/src/main/java/org/elasticsearch/entitlement/qa/common/RestEntitlementsCheckAction.java index e63fa4f3b726b..1ac4a7506eacb 100644 --- a/libs/entitlement/qa/common/src/main/java/org/elasticsearch/entitlement/qa/common/RestEntitlementsCheckAction.java +++ b/libs/entitlement/qa/common/src/main/java/org/elasticsearch/entitlement/qa/common/RestEntitlementsCheckAction.java @@ -47,14 +47,21 @@ static CheckAction serverAndPlugin(Runnable action) { } private static final Map checkActions = Map.ofEntries( - entry("system_exit", CheckAction.serverOnly(RestEntitlementsCheckAction::systemExit)), + entry("runtime_exit", CheckAction.serverOnly(RestEntitlementsCheckAction::runtimeExit)), + entry("runtime_halt", CheckAction.serverOnly(RestEntitlementsCheckAction::runtimeHalt)), entry("create_classloader", CheckAction.serverAndPlugin(RestEntitlementsCheckAction::createClassLoader)) ); - @SuppressForbidden(reason = "Specifically testing System.exit") - private static void systemExit() { - logger.info("Calling System.exit(123);"); - System.exit(123); + @SuppressForbidden(reason = "Specifically testing Runtime.exit") + private static void runtimeExit() { + logger.info("Calling Runtime.exit;"); + Runtime.getRuntime().exit(123); + } + + @SuppressForbidden(reason = "Specifically testing Runtime.halt") + private static void runtimeHalt() { + logger.info("Calling Runtime.halt;"); + Runtime.getRuntime().halt(123); } private static void createClassLoader() { diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java index aa63b630ed7cd..a5ca0543ad15a 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java @@ -28,7 +28,12 @@ public ElasticsearchEntitlementChecker(PolicyManager policyManager) { } @Override - public void check$java_lang_System$exit(Class callerClass, int status) { + public void check$$exit(Class callerClass, Runtime runtime, int status) { + policyManager.checkExitVM(callerClass); + } + + @Override + public void check$$halt(Class callerClass, Runtime runtime, int status) { policyManager.checkExitVM(callerClass); } diff --git a/libs/entitlement/src/main23/java/org/elasticsearch/entitlement/runtime/api/Java23ElasticsearchEntitlementChecker.java b/libs/entitlement/src/main23/java/org/elasticsearch/entitlement/runtime/api/Java23ElasticsearchEntitlementChecker.java index d0f9f4f48609c..912d76ecfc01a 100644 --- a/libs/entitlement/src/main23/java/org/elasticsearch/entitlement/runtime/api/Java23ElasticsearchEntitlementChecker.java +++ b/libs/entitlement/src/main23/java/org/elasticsearch/entitlement/runtime/api/Java23ElasticsearchEntitlementChecker.java @@ -19,8 +19,8 @@ public Java23ElasticsearchEntitlementChecker(PolicyManager policyManager) { } @Override - public void check$java_lang_System$exit(Class callerClass, int status) { + public void check$$exit(Class callerClass, Runtime runtime, int status) { // TODO: this is just an example, we shouldn't really override a method implemented in the superclass - super.check$java_lang_System$exit(callerClass, status); + super.check$$exit(callerClass, runtime, status); } } From 5411b93d493ddc81682b49cf6cb9bac2607c4f2c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lorenzo=20Dematt=C3=A9?= Date: Fri, 13 Dec 2024 16:24:54 +0100 Subject: [PATCH 22/32] Entitlements tools: public callers finder (#116257) * WIP: Tool to find all public caller from a starting list of (JDK) methods. * Add public-callers-finder tool, extract common stuff to common module * Adjustments to visibility/functions and classes and modules to print out * Spotless * Missing gradle configuration * Add details in README as requested in PR * Update ASM version * Including protected methods --- .../entitlement/tools/ExternalAccess.java | 68 ++++++ .../entitlement/tools/Utils.java | 37 +++- .../tools/public-callers-finder/README.md | 50 +++++ .../tools/public-callers-finder/build.gradle | 61 ++++++ .../licenses/asm-LICENSE.txt | 26 +++ .../licenses/asm-NOTICE.txt | 1 + .../FindUsagesClassVisitor.java | 141 +++++++++++++ .../tools/publiccallersfinder/Main.java | 197 ++++++++++++++++++ .../{src => }/README.md | 0 .../tools/securitymanager/scanner/Main.java | 50 ++--- .../scanner/SecurityCheckClassVisitor.java | 22 +- 11 files changed, 603 insertions(+), 50 deletions(-) create mode 100644 libs/entitlement/tools/common/src/main/java/org/elasticsearch/entitlement/tools/ExternalAccess.java create mode 100644 libs/entitlement/tools/public-callers-finder/README.md create mode 100644 libs/entitlement/tools/public-callers-finder/build.gradle create mode 100644 libs/entitlement/tools/public-callers-finder/licenses/asm-LICENSE.txt create mode 100644 libs/entitlement/tools/public-callers-finder/licenses/asm-NOTICE.txt create mode 100644 libs/entitlement/tools/public-callers-finder/src/main/java/org/elasticsearch/entitlement/tools/publiccallersfinder/FindUsagesClassVisitor.java create mode 100644 libs/entitlement/tools/public-callers-finder/src/main/java/org/elasticsearch/entitlement/tools/publiccallersfinder/Main.java rename libs/entitlement/tools/securitymanager-scanner/{src => }/README.md (100%) diff --git a/libs/entitlement/tools/common/src/main/java/org/elasticsearch/entitlement/tools/ExternalAccess.java b/libs/entitlement/tools/common/src/main/java/org/elasticsearch/entitlement/tools/ExternalAccess.java new file mode 100644 index 0000000000000..cd049a91fa4da --- /dev/null +++ b/libs/entitlement/tools/common/src/main/java/org/elasticsearch/entitlement/tools/ExternalAccess.java @@ -0,0 +1,68 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.tools; + +import java.util.Arrays; +import java.util.EnumSet; +import java.util.stream.Collectors; + +public enum ExternalAccess { + PUBLIC_CLASS, + PUBLIC_METHOD, + PROTECTED_METHOD; + + private static final String DELIMITER = ":"; + + public static String toString(EnumSet externalAccesses) { + return externalAccesses.stream().map(Enum::toString).collect(Collectors.joining(DELIMITER)); + } + + public static EnumSet fromPermissions( + boolean packageExported, + boolean publicClass, + boolean publicMethod, + boolean protectedMethod + ) { + if (publicMethod && protectedMethod) { + throw new IllegalArgumentException(); + } + + EnumSet externalAccesses = EnumSet.noneOf(ExternalAccess.class); + if (publicMethod) { + externalAccesses.add(ExternalAccess.PUBLIC_METHOD); + } else if (protectedMethod) { + externalAccesses.add(ExternalAccess.PROTECTED_METHOD); + } + + if (packageExported && publicClass) { + externalAccesses.add(ExternalAccess.PUBLIC_CLASS); + } + return externalAccesses; + } + + public static boolean isExternallyAccessible(EnumSet access) { + return access.contains(ExternalAccess.PUBLIC_CLASS) + && (access.contains(ExternalAccess.PUBLIC_METHOD) || access.contains(ExternalAccess.PROTECTED_METHOD)); + } + + public static EnumSet fromString(String accessAsString) { + if ("PUBLIC".equals(accessAsString)) { + return EnumSet.of(ExternalAccess.PUBLIC_CLASS, ExternalAccess.PUBLIC_METHOD); + } + if ("PUBLIC-METHOD".equals(accessAsString)) { + return EnumSet.of(ExternalAccess.PUBLIC_METHOD); + } + if ("PRIVATE".equals(accessAsString)) { + return EnumSet.noneOf(ExternalAccess.class); + } + + return EnumSet.copyOf(Arrays.stream(accessAsString.split(DELIMITER)).map(ExternalAccess::valueOf).toList()); + } +} diff --git a/libs/entitlement/tools/common/src/main/java/org/elasticsearch/entitlement/tools/Utils.java b/libs/entitlement/tools/common/src/main/java/org/elasticsearch/entitlement/tools/Utils.java index c72e550a529cd..c6a71f55db4c6 100644 --- a/libs/entitlement/tools/common/src/main/java/org/elasticsearch/entitlement/tools/Utils.java +++ b/libs/entitlement/tools/common/src/main/java/org/elasticsearch/entitlement/tools/Utils.java @@ -11,16 +11,28 @@ import java.io.IOException; import java.lang.module.ModuleDescriptor; +import java.net.URI; import java.nio.file.FileSystem; +import java.nio.file.FileSystems; import java.nio.file.Files; +import java.nio.file.Path; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; public class Utils { - public static Map> findModuleExports(FileSystem fs) throws IOException { + private static final Set EXCLUDED_MODULES = Set.of( + "java.desktop", + "jdk.jartool", + "jdk.jdi", + "java.security.jgss", + "jdk.jshell" + ); + + private static Map> findModuleExports(FileSystem fs) throws IOException { var modulesExports = new HashMap>(); try (var stream = Files.walk(fs.getPath("modules"))) { stream.filter(p -> p.getFileName().toString().equals("module-info.class")).forEach(x -> { @@ -42,4 +54,27 @@ public static Map> findModuleExports(FileSystem fs) throws I return modulesExports; } + public interface JdkModuleConsumer { + void accept(String moduleName, List moduleClasses, Set moduleExports); + } + + public static void walkJdkModules(JdkModuleConsumer c) throws IOException { + + FileSystem fs = FileSystems.getFileSystem(URI.create("jrt:/")); + + var moduleExports = Utils.findModuleExports(fs); + + try (var stream = Files.walk(fs.getPath("modules"))) { + var modules = stream.filter(x -> x.toString().endsWith(".class")) + .collect(Collectors.groupingBy(x -> x.subpath(1, 2).toString())); + + for (var kv : modules.entrySet()) { + var moduleName = kv.getKey(); + if (Utils.EXCLUDED_MODULES.contains(moduleName) == false) { + var thisModuleExports = moduleExports.get(moduleName); + c.accept(moduleName, kv.getValue(), thisModuleExports); + } + } + } + } } diff --git a/libs/entitlement/tools/public-callers-finder/README.md b/libs/entitlement/tools/public-callers-finder/README.md new file mode 100644 index 0000000000000..794576b3409a8 --- /dev/null +++ b/libs/entitlement/tools/public-callers-finder/README.md @@ -0,0 +1,50 @@ +This tool scans the JDK on which it is running. It takes a list of methods (compatible with the output of the `securitymanager-scanner` tool), and looks for the "public surface" of these methods (i.e. any class/method accessible from regular Java code that calls into the original list, directly or transitively). + +It acts basically as a recursive "Find Usages" in Intellij, stopping at the first fully accessible point (public method on a public class). +The tool scans every method in every class inside the same java module; e.g. +if you have a private method `File#normalizedList`, it will scan `java.base` to find +public methods like `File#list(String)`, `File#list(FilenameFilter, String)` and +`File#listFiles(File)`. + +The tool considers implemented interfaces (directly); e.g. if we're looking at a +method `C.m`, where `C implements I`, it will look for calls to `I.m`. It will +also consider (indirectly) calls to `S.m` (where `S` is a supertype of `C`), as +it treats calls to `super` in `S.m` as regular calls (e.g. `example() -> S.m() -> C.m()`). + + +In order to run the tool, use: +```shell +./gradlew :libs:entitlement:tools:public-callers-finder:run [] +``` +Where `input-file` is a CSV file (columns separated by `TAB`) that contains the following columns: +Module name +1. unused +2. unused +3. unused +4. Fully qualified class name (ASM style, with `/` separators) +5. Method name +6. Method descriptor (ASM signature) +7. Visibility (PUBLIC/PUBLIC-METHOD/PRIVATE) + +And `bubble-up-from-public` is a boolean (`true|false`) indicating if the code should stop at the first public method (`false`: default, recommended) or continue to find usages recursively even after reaching the "public surface". + +The output of the tool is another CSV file, with one line for each entry-point, columns separated by `TAB` + +1. Module name +2. File name (from source root) +3. Line number +4. Fully qualified class name (ASM style, with `/` separators) +5. Method name +6. Method descriptor (ASM signature) +7. Visibility (PUBLIC/PUBLIC-METHOD/PRIVATE) +8. Original caller Module name +9. Original caller Class name (ASM style, with `/` separators) +10. Original caller Method name +11. Original caller Visibility + +Examples: +``` +java.base DeleteOnExitHook.java 50 java/io/DeleteOnExitHook$1 run ()V PUBLIC java.base java/io/File delete PUBLIC +java.base ZipFile.java 254 java/util/zip/ZipFile (Ljava/io/File;ILjava/nio/charset/Charset;)V PUBLIC java.base java/io/File delete PUBLIC +java.logging FileHandler.java 279 java/util/logging/FileHandler ()V PUBLIC java.base java/io/File delete PUBLIC +``` diff --git a/libs/entitlement/tools/public-callers-finder/build.gradle b/libs/entitlement/tools/public-callers-finder/build.gradle new file mode 100644 index 0000000000000..083b1a43b9794 --- /dev/null +++ b/libs/entitlement/tools/public-callers-finder/build.gradle @@ -0,0 +1,61 @@ +plugins { + id 'application' +} + +apply plugin: 'elasticsearch.build' +apply plugin: 'elasticsearch.publish' + +tasks.named("dependencyLicenses").configure { + mapping from: /asm-.*/, to: 'asm' +} + +group = 'org.elasticsearch.entitlement.tools' + +ext { + javaMainClass = "org.elasticsearch.entitlement.tools.publiccallersfinder.Main" +} + +application { + mainClass.set(javaMainClass) + applicationDefaultJvmArgs = [ + '--add-exports', 'java.base/sun.security.util=ALL-UNNAMED', + '--add-opens', 'java.base/java.lang=ALL-UNNAMED', + '--add-opens', 'java.base/java.net=ALL-UNNAMED', + '--add-opens', 'java.base/java.net.spi=ALL-UNNAMED', + '--add-opens', 'java.base/java.util.concurrent=ALL-UNNAMED', + '--add-opens', 'java.base/javax.crypto=ALL-UNNAMED', + '--add-opens', 'java.base/javax.security.auth=ALL-UNNAMED', + '--add-opens', 'java.base/jdk.internal.logger=ALL-UNNAMED', + '--add-opens', 'java.base/sun.nio.ch=ALL-UNNAMED', + '--add-opens', 'jdk.management.jfr/jdk.management.jfr=ALL-UNNAMED', + '--add-opens', 'java.logging/java.util.logging=ALL-UNNAMED', + '--add-opens', 'java.logging/sun.util.logging.internal=ALL-UNNAMED', + '--add-opens', 'java.naming/javax.naming.ldap.spi=ALL-UNNAMED', + '--add-opens', 'java.rmi/sun.rmi.runtime=ALL-UNNAMED', + '--add-opens', 'jdk.dynalink/jdk.dynalink=ALL-UNNAMED', + '--add-opens', 'jdk.dynalink/jdk.dynalink.linker=ALL-UNNAMED', + '--add-opens', 'java.desktop/sun.awt=ALL-UNNAMED', + '--add-opens', 'java.sql.rowset/javax.sql.rowset.spi=ALL-UNNAMED', + '--add-opens', 'java.sql/java.sql=ALL-UNNAMED', + '--add-opens', 'java.xml.crypto/com.sun.org.apache.xml.internal.security.utils=ALL-UNNAMED' + ] +} + +repositories { + mavenCentral() +} + +dependencies { + compileOnly(project(':libs:core')) + implementation 'org.ow2.asm:asm:9.7.1' + implementation 'org.ow2.asm:asm-util:9.7.1' + implementation(project(':libs:entitlement:tools:common')) +} + +tasks.named('forbiddenApisMain').configure { + replaceSignatureFiles 'jdk-signatures' +} + +tasks.named("thirdPartyAudit").configure { + ignoreMissingClasses() +} diff --git a/libs/entitlement/tools/public-callers-finder/licenses/asm-LICENSE.txt b/libs/entitlement/tools/public-callers-finder/licenses/asm-LICENSE.txt new file mode 100644 index 0000000000000..afb064f2f2666 --- /dev/null +++ b/libs/entitlement/tools/public-callers-finder/licenses/asm-LICENSE.txt @@ -0,0 +1,26 @@ +Copyright (c) 2012 France Télécom +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. +3. Neither the name of the copyright holders nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF +THE POSSIBILITY OF SUCH DAMAGE. diff --git a/libs/entitlement/tools/public-callers-finder/licenses/asm-NOTICE.txt b/libs/entitlement/tools/public-callers-finder/licenses/asm-NOTICE.txt new file mode 100644 index 0000000000000..8d1c8b69c3fce --- /dev/null +++ b/libs/entitlement/tools/public-callers-finder/licenses/asm-NOTICE.txt @@ -0,0 +1 @@ + diff --git a/libs/entitlement/tools/public-callers-finder/src/main/java/org/elasticsearch/entitlement/tools/publiccallersfinder/FindUsagesClassVisitor.java b/libs/entitlement/tools/public-callers-finder/src/main/java/org/elasticsearch/entitlement/tools/publiccallersfinder/FindUsagesClassVisitor.java new file mode 100644 index 0000000000000..6f136d0977e3f --- /dev/null +++ b/libs/entitlement/tools/public-callers-finder/src/main/java/org/elasticsearch/entitlement/tools/publiccallersfinder/FindUsagesClassVisitor.java @@ -0,0 +1,141 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.tools.publiccallersfinder; + +import org.elasticsearch.entitlement.tools.ExternalAccess; +import org.objectweb.asm.ClassVisitor; +import org.objectweb.asm.Label; +import org.objectweb.asm.MethodVisitor; +import org.objectweb.asm.Type; + +import java.lang.constant.ClassDesc; +import java.lang.reflect.AccessFlag; +import java.util.EnumSet; +import java.util.Set; + +import static org.objectweb.asm.Opcodes.ACC_PROTECTED; +import static org.objectweb.asm.Opcodes.ACC_PUBLIC; +import static org.objectweb.asm.Opcodes.ASM9; + +class FindUsagesClassVisitor extends ClassVisitor { + + private int classAccess; + private boolean accessibleViaInterfaces; + + record MethodDescriptor(String className, String methodName, String methodDescriptor) {} + + record EntryPoint( + String moduleName, + String source, + int line, + String className, + String methodName, + String methodDescriptor, + EnumSet access + ) {} + + interface CallerConsumer { + void accept(String source, int line, String className, String methodName, String methodDescriptor, EnumSet access); + } + + private final Set moduleExports; + private final MethodDescriptor methodToFind; + private final CallerConsumer callers; + private String className; + private String source; + + protected FindUsagesClassVisitor(Set moduleExports, MethodDescriptor methodToFind, CallerConsumer callers) { + super(ASM9); + this.moduleExports = moduleExports; + this.methodToFind = methodToFind; + this.callers = callers; + } + + @Override + public void visit(int version, int access, String name, String signature, String superName, String[] interfaces) { + super.visit(version, access, name, signature, superName, interfaces); + this.className = name; + this.classAccess = access; + if (interfaces.length > 0) { + this.accessibleViaInterfaces = findAccessibility(interfaces, moduleExports); + } + } + + private static boolean findAccessibility(String[] interfaces, Set moduleExports) { + var accessibleViaInterfaces = false; + for (var interfaceName : interfaces) { + if (moduleExports.contains(getPackageName(interfaceName))) { + var interfaceType = Type.getObjectType(interfaceName); + try { + var clazz = Class.forName(interfaceType.getClassName()); + if (clazz.accessFlags().contains(AccessFlag.PUBLIC)) { + accessibleViaInterfaces = true; + } + } catch (ClassNotFoundException ignored) {} + } + } + return accessibleViaInterfaces; + } + + @Override + public void visitSource(String source, String debug) { + super.visitSource(source, debug); + this.source = source; + } + + @Override + public MethodVisitor visitMethod(int access, String name, String descriptor, String signature, String[] exceptions) { + return new FindUsagesMethodVisitor(super.visitMethod(access, name, descriptor, signature, exceptions), name, descriptor, access); + } + + private static String getPackageName(String className) { + return ClassDesc.ofInternalName(className).packageName(); + } + + private class FindUsagesMethodVisitor extends MethodVisitor { + + private final String methodName; + private int line; + private final String methodDescriptor; + private final int methodAccess; + + protected FindUsagesMethodVisitor(MethodVisitor mv, String methodName, String methodDescriptor, int methodAccess) { + super(ASM9, mv); + this.methodName = methodName; + this.methodDescriptor = methodDescriptor; + this.methodAccess = methodAccess; + } + + @Override + public void visitMethodInsn(int opcode, String owner, String name, String descriptor, boolean isInterface) { + super.visitMethodInsn(opcode, owner, name, descriptor, isInterface); + + if (methodToFind.className.equals(owner)) { + if (methodToFind.methodName.equals(name)) { + if (methodToFind.methodDescriptor == null || methodToFind.methodDescriptor.equals(descriptor)) { + EnumSet externalAccess = ExternalAccess.fromPermissions( + moduleExports.contains(getPackageName(className)), + accessibleViaInterfaces || (classAccess & ACC_PUBLIC) != 0, + (methodAccess & ACC_PUBLIC) != 0, + (methodAccess & ACC_PROTECTED) != 0 + ); + callers.accept(source, line, className, methodName, methodDescriptor, externalAccess); + } + } + } + } + + @Override + public void visitLineNumber(int line, Label start) { + super.visitLineNumber(line, start); + this.line = line; + } + } +} diff --git a/libs/entitlement/tools/public-callers-finder/src/main/java/org/elasticsearch/entitlement/tools/publiccallersfinder/Main.java b/libs/entitlement/tools/public-callers-finder/src/main/java/org/elasticsearch/entitlement/tools/publiccallersfinder/Main.java new file mode 100644 index 0000000000000..60b3a3c9f3c8e --- /dev/null +++ b/libs/entitlement/tools/public-callers-finder/src/main/java/org/elasticsearch/entitlement/tools/publiccallersfinder/Main.java @@ -0,0 +1,197 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.tools.publiccallersfinder; + +import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.entitlement.tools.ExternalAccess; +import org.elasticsearch.entitlement.tools.Utils; +import org.objectweb.asm.ClassReader; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayDeque; +import java.util.ArrayList; +import java.util.Collection; +import java.util.EnumSet; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +public class Main { + + private static final String SEPARATOR = "\t"; + + record CallChain(FindUsagesClassVisitor.EntryPoint entryPoint, CallChain next) {} + + interface UsageConsumer { + void usageFound(CallChain originalEntryPoint, CallChain newMethod); + } + + private static void findTransitiveUsages( + Collection firstLevelCallers, + List classesToScan, + Set moduleExports, + boolean bubbleUpFromPublic, + UsageConsumer usageConsumer + ) { + for (var caller : firstLevelCallers) { + var methodsToCheck = new ArrayDeque<>(Set.of(caller)); + var methodsSeen = new HashSet(); + + while (methodsToCheck.isEmpty() == false) { + var methodToCheck = methodsToCheck.removeFirst(); + var m = methodToCheck.entryPoint(); + var visitor2 = new FindUsagesClassVisitor( + moduleExports, + new FindUsagesClassVisitor.MethodDescriptor(m.className(), m.methodName(), m.methodDescriptor()), + (source, line, className, methodName, methodDescriptor, access) -> { + var newMethod = new CallChain( + new FindUsagesClassVisitor.EntryPoint( + m.moduleName(), + source, + line, + className, + methodName, + methodDescriptor, + access + ), + methodToCheck + ); + + var notSeenBefore = methodsSeen.add(newMethod.entryPoint()); + if (notSeenBefore) { + if (ExternalAccess.isExternallyAccessible(access)) { + usageConsumer.usageFound(caller.next(), newMethod); + } + if (access.contains(ExternalAccess.PUBLIC_METHOD) == false || bubbleUpFromPublic) { + methodsToCheck.add(newMethod); + } + } + } + ); + + for (var classFile : classesToScan) { + try { + ClassReader cr = new ClassReader(Files.newInputStream(classFile)); + cr.accept(visitor2, 0); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + } + } + } + + private static void identifyTopLevelEntryPoints( + FindUsagesClassVisitor.MethodDescriptor methodToFind, + String methodToFindModule, + EnumSet methodToFindAccess, + boolean bubbleUpFromPublic + ) throws IOException { + + Utils.walkJdkModules((moduleName, moduleClasses, moduleExports) -> { + var originalCallers = new ArrayList(); + var visitor = new FindUsagesClassVisitor( + moduleExports, + methodToFind, + (source, line, className, methodName, methodDescriptor, access) -> originalCallers.add( + new CallChain( + new FindUsagesClassVisitor.EntryPoint(moduleName, source, line, className, methodName, methodDescriptor, access), + new CallChain( + new FindUsagesClassVisitor.EntryPoint( + methodToFindModule, + "", + 0, + methodToFind.className(), + methodToFind.methodName(), + methodToFind.methodDescriptor(), + methodToFindAccess + ), + null + ) + ) + ) + ); + + for (var classFile : moduleClasses) { + try { + ClassReader cr = new ClassReader(Files.newInputStream(classFile)); + cr.accept(visitor, 0); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + originalCallers.stream().filter(c -> ExternalAccess.isExternallyAccessible(c.entryPoint().access())).forEach(c -> { + var originalCaller = c.next(); + printRow(getEntryPointString(c.entryPoint().moduleName(), c.entryPoint()), getOriginalEntryPointString(originalCaller)); + }); + var firstLevelCallers = bubbleUpFromPublic ? originalCallers : originalCallers.stream().filter(Main::isNotFullyPublic).toList(); + + if (firstLevelCallers.isEmpty() == false) { + findTransitiveUsages( + firstLevelCallers, + moduleClasses, + moduleExports, + bubbleUpFromPublic, + (originalEntryPoint, newMethod) -> printRow( + getEntryPointString(moduleName, newMethod.entryPoint()), + getOriginalEntryPointString(originalEntryPoint) + ) + ); + } + }); + } + + private static boolean isNotFullyPublic(CallChain c) { + return (c.entryPoint().access().contains(ExternalAccess.PUBLIC_CLASS) + && c.entryPoint().access().contains(ExternalAccess.PUBLIC_METHOD)) == false; + } + + @SuppressForbidden(reason = "This tool prints the CSV to stdout") + private static void printRow(String entryPointString, String originalEntryPoint) { + System.out.println(entryPointString + SEPARATOR + originalEntryPoint); + } + + private static String getEntryPointString(String moduleName, FindUsagesClassVisitor.EntryPoint e) { + return moduleName + SEPARATOR + e.source() + SEPARATOR + e.line() + SEPARATOR + e.className() + SEPARATOR + e.methodName() + + SEPARATOR + e.methodDescriptor() + SEPARATOR + ExternalAccess.toString(e.access()); + } + + private static String getOriginalEntryPointString(CallChain originalCallChain) { + return originalCallChain.entryPoint().moduleName() + SEPARATOR + originalCallChain.entryPoint().className() + SEPARATOR + + originalCallChain.entryPoint().methodName() + SEPARATOR + ExternalAccess.toString(originalCallChain.entryPoint().access()); + } + + interface MethodDescriptorConsumer { + void accept(FindUsagesClassVisitor.MethodDescriptor methodDescriptor, String moduleName, EnumSet access) + throws IOException; + } + + private static void parseCsv(Path csvPath, MethodDescriptorConsumer methodConsumer) throws IOException { + var lines = Files.readAllLines(csvPath); + for (var l : lines) { + var tokens = l.split(SEPARATOR); + var moduleName = tokens[0]; + var className = tokens[3]; + var methodName = tokens[4]; + var methodDescriptor = tokens[5]; + var access = ExternalAccess.fromString(tokens[6]); + methodConsumer.accept(new FindUsagesClassVisitor.MethodDescriptor(className, methodName, methodDescriptor), moduleName, access); + } + } + + public static void main(String[] args) throws IOException { + var csvFilePath = Path.of(args[0]); + boolean bubbleUpFromPublic = args.length >= 2 && Boolean.parseBoolean(args[1]); + parseCsv(csvFilePath, (method, module, access) -> identifyTopLevelEntryPoints(method, module, access, bubbleUpFromPublic)); + } +} diff --git a/libs/entitlement/tools/securitymanager-scanner/src/README.md b/libs/entitlement/tools/securitymanager-scanner/README.md similarity index 100% rename from libs/entitlement/tools/securitymanager-scanner/src/README.md rename to libs/entitlement/tools/securitymanager-scanner/README.md diff --git a/libs/entitlement/tools/securitymanager-scanner/src/main/java/org/elasticsearch/entitlement/tools/securitymanager/scanner/Main.java b/libs/entitlement/tools/securitymanager-scanner/src/main/java/org/elasticsearch/entitlement/tools/securitymanager/scanner/Main.java index bea49e0296e67..7c2dd69d60f0c 100644 --- a/libs/entitlement/tools/securitymanager-scanner/src/main/java/org/elasticsearch/entitlement/tools/securitymanager/scanner/Main.java +++ b/libs/entitlement/tools/securitymanager-scanner/src/main/java/org/elasticsearch/entitlement/tools/securitymanager/scanner/Main.java @@ -10,47 +10,35 @@ package org.elasticsearch.entitlement.tools.securitymanager.scanner; import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.entitlement.tools.ExternalAccess; import org.elasticsearch.entitlement.tools.Utils; import org.objectweb.asm.ClassReader; import java.io.IOException; -import java.net.URI; -import java.nio.file.FileSystem; -import java.nio.file.FileSystems; import java.nio.file.Files; import java.util.HashMap; import java.util.List; -import java.util.Set; public class Main { - static final Set excludedModules = Set.of("java.desktop"); - private static void identifySMChecksEntryPoints() throws IOException { - FileSystem fs = FileSystems.getFileSystem(URI.create("jrt:/")); - - var moduleExports = Utils.findModuleExports(fs); - var callers = new HashMap>(); var visitor = new SecurityCheckClassVisitor(callers); - try (var stream = Files.walk(fs.getPath("modules"))) { - stream.filter(x -> x.toString().endsWith(".class")).forEach(x -> { - var moduleName = x.subpath(1, 2).toString(); - if (excludedModules.contains(moduleName) == false) { - try { - ClassReader cr = new ClassReader(Files.newInputStream(x)); - visitor.setCurrentModule(moduleName, moduleExports.get(moduleName)); - var path = x.getNameCount() > 3 ? x.subpath(2, x.getNameCount() - 1).toString() : ""; - visitor.setCurrentSourcePath(path); - cr.accept(visitor, 0); - } catch (IOException e) { - throw new RuntimeException(e); - } + Utils.walkJdkModules((moduleName, moduleClasses, moduleExports) -> { + for (var classFile : moduleClasses) { + try { + ClassReader cr = new ClassReader(Files.newInputStream(classFile)); + visitor.setCurrentModule(moduleName, moduleExports); + var path = classFile.getNameCount() > 3 ? classFile.subpath(2, classFile.getNameCount() - 1).toString() : ""; + visitor.setCurrentSourcePath(path); + cr.accept(visitor, 0); + } catch (IOException e) { + throw new RuntimeException(e); } - }); - } + } + }); printToStdout(callers); } @@ -68,16 +56,8 @@ private static void printToStdout(HashMap excludedClasses = Set.of(SECURITY_MANAGER_INTERNAL_NAME); - enum ExternalAccess { - CLASS, - METHOD - } - record CallerInfo( String moduleName, String source, @@ -208,15 +205,12 @@ public void visitMethodInsn(int opcode, String owner, String name, String descri || opcode == INVOKEDYNAMIC) { if (SECURITY_MANAGER_INTERNAL_NAME.equals(owner)) { - EnumSet externalAccesses = EnumSet.noneOf(ExternalAccess.class); - if (moduleExports.contains(getPackageName(className))) { - if ((methodAccess & ACC_PUBLIC) != 0) { - externalAccesses.add(ExternalAccess.METHOD); - } - if ((classAccess & ACC_PUBLIC) != 0) { - externalAccesses.add(ExternalAccess.CLASS); - } - } + EnumSet externalAccesses = ExternalAccess.fromPermissions( + moduleExports.contains(getPackageName(className)), + (classAccess & ACC_PUBLIC) != 0, + (methodAccess & ACC_PUBLIC) != 0, + (methodAccess & ACC_PROTECTED) != 0 + ); if (name.equals("checkPermission")) { var callers = callerInfoByMethod.computeIfAbsent(name, ignored -> new ArrayList<>()); From 0441555503593cf40fcb04b6edc49da3734a0738 Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Fri, 13 Dec 2024 16:46:25 +0100 Subject: [PATCH 23/32] ESQL: Disable grok.OverwriteName* on pre-8.13 BWC tests (#118655) This prevents two tests in `grok` and `dissect` suites - `overwriteName` and `overwriteNameWhere` and one in the `stats` suite - `byStringAndLongWithAlias` - to run against pre-8.13.0 versions. Reason being that coordinators prior to that version can generate invalid node plans, that'd fail (verification) on 8.18+ nodes. --- .../qa/testFixtures/src/main/resources/dissect.csv-spec | 6 ++++-- .../esql/qa/testFixtures/src/main/resources/grok.csv-spec | 6 ++++-- .../esql/qa/testFixtures/src/main/resources/stats.csv-spec | 3 ++- 3 files changed, 10 insertions(+), 5 deletions(-) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/dissect.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/dissect.csv-spec index 38f09d2e3c56e..cde5427bf37d6 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/dissect.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/dissect.csv-spec @@ -223,7 +223,8 @@ null | null | null ; -overwriteName +// the query is incorrectly physically plan (fails the verification) in pre-8.13.0 versions +overwriteName#[skip:-8.12.99] from employees | sort emp_no asc | eval full_name = concat(first_name, " ", last_name) | dissect full_name "%{emp_no} %{b}" | keep full_name, emp_no, b | limit 3; full_name:keyword | emp_no:keyword | b:keyword @@ -244,7 +245,8 @@ emp_no:integer | first_name:keyword | rest:keyword ; -overwriteNameWhere +// the query is incorrectly physically plan (fails the verification) in pre-8.13.0 versions +overwriteNameWhere#[skip:-8.12.99] from employees | sort emp_no asc | eval full_name = concat(first_name, " ", last_name) | dissect full_name "%{emp_no} %{b}" | where emp_no == "Bezalel" | keep full_name, emp_no, b | limit 3; full_name:keyword | emp_no:keyword | b:keyword diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/grok.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/grok.csv-spec index 98c88d06caa75..eece1bdfbffa4 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/grok.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/grok.csv-spec @@ -199,7 +199,8 @@ null | null | null ; -overwriteName +// the query is incorrectly physically plan (fails the verification) in pre-8.13.0 versions +overwriteName#[skip:-8.12.99] from employees | sort emp_no asc | eval full_name = concat(first_name, " ", last_name) | grok full_name "%{WORD:emp_no} %{WORD:b}" | keep full_name, emp_no, b | limit 3; full_name:keyword | emp_no:keyword | b:keyword @@ -209,7 +210,8 @@ Parto Bamford | Parto | Bamford ; -overwriteNameWhere +// the query is incorrectly physically plan (fails the verification) in pre-8.13.0 versions +overwriteNameWhere#[skip:-8.12.99] from employees | sort emp_no asc | eval full_name = concat(first_name, " ", last_name) | grok full_name "%{WORD:emp_no} %{WORD:b}" | where emp_no == "Bezalel" | keep full_name, emp_no, b | limit 3; full_name:keyword | emp_no:keyword | b:keyword diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index d76f4c05d955f..100c0d716d65c 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -564,7 +564,8 @@ c:long | gender:keyword | trunk_worked_seconds:long 0 | null | 200000000 ; -byStringAndLongWithAlias +// the query is incorrectly physically plan (fails the verification) in pre-8.13.0 versions +byStringAndLongWithAlias#[skip:-8.12.99] FROM employees | EVAL trunk_worked_seconds = avg_worked_seconds / 100000000 * 100000000 | RENAME gender as g, trunk_worked_seconds as tws From cf7cb4bbb95996136a49340cc72acb850a703b44 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 13 Dec 2024 15:58:16 +0000 Subject: [PATCH 24/32] Bump versions after 8.17.0 release --- .buildkite/pipelines/intake.yml | 2 +- .buildkite/pipelines/periodic-packaging.yml | 6 +++--- .buildkite/pipelines/periodic.yml | 10 +++++----- .ci/bwcVersions | 2 +- .ci/snapshotBwcVersions | 2 +- server/src/main/java/org/elasticsearch/Version.java | 1 + .../resources/org/elasticsearch/TransportVersions.csv | 1 + .../org/elasticsearch/index/IndexVersions.csv | 1 + 8 files changed, 14 insertions(+), 11 deletions(-) diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index 6c8b8edfcbac1..6e15d64154960 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -56,7 +56,7 @@ steps: timeout_in_minutes: 300 matrix: setup: - BWC_VERSION: ["8.16.2", "8.17.0", "8.18.0", "9.0.0"] + BWC_VERSION: ["8.16.2", "8.17.1", "8.18.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index 2fbcd075b9719..9619de3c2c98b 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -301,8 +301,8 @@ steps: env: BWC_VERSION: 8.16.2 - - label: "{{matrix.image}} / 8.17.0 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.17.0 + - label: "{{matrix.image}} / 8.17.1 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.17.1 timeout_in_minutes: 300 matrix: setup: @@ -315,7 +315,7 @@ steps: machineType: custom-16-32768 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 8.17.0 + BWC_VERSION: 8.17.1 - label: "{{matrix.image}} / 8.18.0 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.18.0 diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 94c9020a794a2..f2d169cd2b30d 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -325,8 +325,8 @@ steps: - signal_reason: agent_stop limit: 3 - - label: 8.17.0 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.17.0#bwcTest + - label: 8.17.1 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.17.1#bwcTest timeout_in_minutes: 300 agents: provider: gcp @@ -335,7 +335,7 @@ steps: buildDirectory: /dev/shm/bk preemptible: true env: - BWC_VERSION: 8.17.0 + BWC_VERSION: 8.17.1 retry: automatic: - exit_status: "-1" @@ -448,7 +448,7 @@ steps: setup: ES_RUNTIME_JAVA: - openjdk21 - BWC_VERSION: ["8.16.2", "8.17.0", "8.18.0", "9.0.0"] + BWC_VERSION: ["8.16.2", "8.17.1", "8.18.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 @@ -490,7 +490,7 @@ steps: ES_RUNTIME_JAVA: - openjdk21 - openjdk23 - BWC_VERSION: ["8.16.2", "8.17.0", "8.18.0", "9.0.0"] + BWC_VERSION: ["8.16.2", "8.17.1", "8.18.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.ci/bwcVersions b/.ci/bwcVersions index 79de891452117..3cb983373138f 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -16,6 +16,6 @@ BWC_VERSION: - "8.14.3" - "8.15.5" - "8.16.2" - - "8.17.0" + - "8.17.1" - "8.18.0" - "9.0.0" diff --git a/.ci/snapshotBwcVersions b/.ci/snapshotBwcVersions index 5514fc376a285..e05c0774c9819 100644 --- a/.ci/snapshotBwcVersions +++ b/.ci/snapshotBwcVersions @@ -1,5 +1,5 @@ BWC_VERSION: - "8.16.2" - - "8.17.0" + - "8.17.1" - "8.18.0" - "9.0.0" diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index f03505de310d5..47c43eadcfb03 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -192,6 +192,7 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_8_16_1 = new Version(8_16_01_99); public static final Version V_8_16_2 = new Version(8_16_02_99); public static final Version V_8_17_0 = new Version(8_17_00_99); + public static final Version V_8_17_1 = new Version(8_17_01_99); public static final Version V_8_18_0 = new Version(8_18_00_99); public static final Version V_9_0_0 = new Version(9_00_00_99); public static final Version CURRENT = V_9_0_0; diff --git a/server/src/main/resources/org/elasticsearch/TransportVersions.csv b/server/src/main/resources/org/elasticsearch/TransportVersions.csv index faeb7fe848159..08db0822dfef5 100644 --- a/server/src/main/resources/org/elasticsearch/TransportVersions.csv +++ b/server/src/main/resources/org/elasticsearch/TransportVersions.csv @@ -135,3 +135,4 @@ 8.15.5,8702003 8.16.0,8772001 8.16.1,8772004 +8.17.0,8797002 diff --git a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv index 1fc8bd8648ad6..afe696f31d323 100644 --- a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv +++ b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv @@ -135,3 +135,4 @@ 8.15.5,8512000 8.16.0,8518000 8.16.1,8518000 +8.17.0,8521000 From a68269b1763b9ed843d4e778373f38233c426aa5 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 13 Dec 2024 15:59:29 +0000 Subject: [PATCH 25/32] Prune changelogs after 8.17.0 release --- docs/changelog/104683.yaml | 5 ----- docs/changelog/112881.yaml | 5 ----- docs/changelog/112989.yaml | 5 ----- docs/changelog/113194.yaml | 5 ----- docs/changelog/113713.yaml | 5 ----- docs/changelog/113920.yaml | 5 ----- docs/changelog/114334.yaml | 7 ------- docs/changelog/114482.yaml | 5 ----- docs/changelog/114484.yaml | 6 ------ docs/changelog/114620.yaml | 5 ----- docs/changelog/114665.yaml | 6 ------ docs/changelog/114681.yaml | 6 ------ docs/changelog/114742.yaml | 5 ----- docs/changelog/114819.yaml | 6 ------ docs/changelog/114855.yaml | 5 ----- docs/changelog/114862.yaml | 5 ----- docs/changelog/114869.yaml | 5 ----- docs/changelog/114899.yaml | 5 ----- docs/changelog/114924.yaml | 5 ----- docs/changelog/114934.yaml | 6 ------ docs/changelog/114964.yaml | 6 ------ docs/changelog/115041.yaml | 6 ------ docs/changelog/115091.yaml | 7 ------- docs/changelog/115102.yaml | 6 ------ docs/changelog/115142.yaml | 6 ------ docs/changelog/115266.yaml | 6 ------ docs/changelog/115359.yaml | 6 ------ docs/changelog/115414.yaml | 9 --------- docs/changelog/115585.yaml | 6 ------ docs/changelog/115640.yaml | 6 ------ docs/changelog/115655.yaml | 5 ----- docs/changelog/115678.yaml | 5 ----- docs/changelog/115687.yaml | 5 ----- docs/changelog/115744.yaml | 6 ------ docs/changelog/115792.yaml | 5 ----- docs/changelog/115797.yaml | 6 ------ docs/changelog/115807.yaml | 5 ----- docs/changelog/115812.yaml | 5 ----- docs/changelog/115814.yaml | 6 ------ docs/changelog/115858.yaml | 5 ----- docs/changelog/115994.yaml | 5 ----- docs/changelog/116021.yaml | 6 ------ docs/changelog/116082.yaml | 5 ----- docs/changelog/116128.yaml | 5 ----- docs/changelog/116211.yaml | 5 ----- docs/changelog/116325.yaml | 5 ----- docs/changelog/116346.yaml | 5 ----- docs/changelog/116348.yaml | 5 ----- docs/changelog/116431.yaml | 5 ----- docs/changelog/116437.yaml | 5 ----- docs/changelog/116447.yaml | 5 ----- docs/changelog/116515.yaml | 5 ----- docs/changelog/116583.yaml | 7 ------- docs/changelog/116591.yaml | 5 ----- docs/changelog/116656.yaml | 6 ------ docs/changelog/116664.yaml | 6 ------ docs/changelog/116689.yaml | 10 ---------- docs/changelog/116809.yaml | 5 ----- docs/changelog/116819.yaml | 5 ----- docs/changelog/116931.yaml | 5 ----- docs/changelog/116953.yaml | 6 ------ docs/changelog/116957.yaml | 5 ----- docs/changelog/116962.yaml | 5 ----- docs/changelog/116980.yaml | 6 ------ docs/changelog/117080.yaml | 5 ----- docs/changelog/117105.yaml | 6 ------ docs/changelog/117189.yaml | 5 ----- docs/changelog/117213.yaml | 6 ------ docs/changelog/117271.yaml | 5 ----- docs/changelog/117294.yaml | 5 ----- docs/changelog/117297.yaml | 5 ----- docs/changelog/117312.yaml | 5 ----- docs/changelog/117316.yaml | 5 ----- docs/changelog/117350.yaml | 5 ----- docs/changelog/117404.yaml | 5 ----- docs/changelog/117503.yaml | 6 ------ docs/changelog/117551.yaml | 5 ----- docs/changelog/117575.yaml | 5 ----- docs/changelog/117595.yaml | 5 ----- docs/changelog/117657.yaml | 5 ----- docs/changelog/117762.yaml | 6 ------ docs/changelog/117792.yaml | 6 ------ docs/changelog/117842.yaml | 5 ----- docs/changelog/117865.yaml | 5 ----- docs/changelog/117914.yaml | 5 ----- docs/changelog/117920.yaml | 6 ------ docs/changelog/117953.yaml | 5 ----- docs/changelog/118354.yaml | 5 ----- docs/changelog/118370.yaml | 6 ------ docs/changelog/118378.yaml | 5 ----- 90 files changed, 493 deletions(-) delete mode 100644 docs/changelog/104683.yaml delete mode 100644 docs/changelog/112881.yaml delete mode 100644 docs/changelog/112989.yaml delete mode 100644 docs/changelog/113194.yaml delete mode 100644 docs/changelog/113713.yaml delete mode 100644 docs/changelog/113920.yaml delete mode 100644 docs/changelog/114334.yaml delete mode 100644 docs/changelog/114482.yaml delete mode 100644 docs/changelog/114484.yaml delete mode 100644 docs/changelog/114620.yaml delete mode 100644 docs/changelog/114665.yaml delete mode 100644 docs/changelog/114681.yaml delete mode 100644 docs/changelog/114742.yaml delete mode 100644 docs/changelog/114819.yaml delete mode 100644 docs/changelog/114855.yaml delete mode 100644 docs/changelog/114862.yaml delete mode 100644 docs/changelog/114869.yaml delete mode 100644 docs/changelog/114899.yaml delete mode 100644 docs/changelog/114924.yaml delete mode 100644 docs/changelog/114934.yaml delete mode 100644 docs/changelog/114964.yaml delete mode 100644 docs/changelog/115041.yaml delete mode 100644 docs/changelog/115091.yaml delete mode 100644 docs/changelog/115102.yaml delete mode 100644 docs/changelog/115142.yaml delete mode 100644 docs/changelog/115266.yaml delete mode 100644 docs/changelog/115359.yaml delete mode 100644 docs/changelog/115414.yaml delete mode 100644 docs/changelog/115585.yaml delete mode 100644 docs/changelog/115640.yaml delete mode 100644 docs/changelog/115655.yaml delete mode 100644 docs/changelog/115678.yaml delete mode 100644 docs/changelog/115687.yaml delete mode 100644 docs/changelog/115744.yaml delete mode 100644 docs/changelog/115792.yaml delete mode 100644 docs/changelog/115797.yaml delete mode 100644 docs/changelog/115807.yaml delete mode 100644 docs/changelog/115812.yaml delete mode 100644 docs/changelog/115814.yaml delete mode 100644 docs/changelog/115858.yaml delete mode 100644 docs/changelog/115994.yaml delete mode 100644 docs/changelog/116021.yaml delete mode 100644 docs/changelog/116082.yaml delete mode 100644 docs/changelog/116128.yaml delete mode 100644 docs/changelog/116211.yaml delete mode 100644 docs/changelog/116325.yaml delete mode 100644 docs/changelog/116346.yaml delete mode 100644 docs/changelog/116348.yaml delete mode 100644 docs/changelog/116431.yaml delete mode 100644 docs/changelog/116437.yaml delete mode 100644 docs/changelog/116447.yaml delete mode 100644 docs/changelog/116515.yaml delete mode 100644 docs/changelog/116583.yaml delete mode 100644 docs/changelog/116591.yaml delete mode 100644 docs/changelog/116656.yaml delete mode 100644 docs/changelog/116664.yaml delete mode 100644 docs/changelog/116689.yaml delete mode 100644 docs/changelog/116809.yaml delete mode 100644 docs/changelog/116819.yaml delete mode 100644 docs/changelog/116931.yaml delete mode 100644 docs/changelog/116953.yaml delete mode 100644 docs/changelog/116957.yaml delete mode 100644 docs/changelog/116962.yaml delete mode 100644 docs/changelog/116980.yaml delete mode 100644 docs/changelog/117080.yaml delete mode 100644 docs/changelog/117105.yaml delete mode 100644 docs/changelog/117189.yaml delete mode 100644 docs/changelog/117213.yaml delete mode 100644 docs/changelog/117271.yaml delete mode 100644 docs/changelog/117294.yaml delete mode 100644 docs/changelog/117297.yaml delete mode 100644 docs/changelog/117312.yaml delete mode 100644 docs/changelog/117316.yaml delete mode 100644 docs/changelog/117350.yaml delete mode 100644 docs/changelog/117404.yaml delete mode 100644 docs/changelog/117503.yaml delete mode 100644 docs/changelog/117551.yaml delete mode 100644 docs/changelog/117575.yaml delete mode 100644 docs/changelog/117595.yaml delete mode 100644 docs/changelog/117657.yaml delete mode 100644 docs/changelog/117762.yaml delete mode 100644 docs/changelog/117792.yaml delete mode 100644 docs/changelog/117842.yaml delete mode 100644 docs/changelog/117865.yaml delete mode 100644 docs/changelog/117914.yaml delete mode 100644 docs/changelog/117920.yaml delete mode 100644 docs/changelog/117953.yaml delete mode 100644 docs/changelog/118354.yaml delete mode 100644 docs/changelog/118370.yaml delete mode 100644 docs/changelog/118378.yaml diff --git a/docs/changelog/104683.yaml b/docs/changelog/104683.yaml deleted file mode 100644 index d4f40b59cfd91..0000000000000 --- a/docs/changelog/104683.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104683 -summary: "Feature: re-structure document ID generation favoring _id inverted index compression" -area: Logs -type: enhancement -issues: [] diff --git a/docs/changelog/112881.yaml b/docs/changelog/112881.yaml deleted file mode 100644 index a8a0d542f8201..0000000000000 --- a/docs/changelog/112881.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 112881 -summary: "ESQL: Remove parent from `FieldAttribute`" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/112989.yaml b/docs/changelog/112989.yaml deleted file mode 100644 index 364f012f94420..0000000000000 --- a/docs/changelog/112989.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 112989 -summary: Upgrade Bouncy Castle FIPS dependencies -area: Security -type: upgrade -issues: [] diff --git a/docs/changelog/113194.yaml b/docs/changelog/113194.yaml deleted file mode 100644 index 132659321c65e..0000000000000 --- a/docs/changelog/113194.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 113194 -summary: Add Search Phase APM metrics -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/113713.yaml b/docs/changelog/113713.yaml deleted file mode 100644 index c5478c95e464d..0000000000000 --- a/docs/changelog/113713.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 113713 -summary: Adding inference endpoint validation for `AzureAiStudioService` -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/113920.yaml b/docs/changelog/113920.yaml deleted file mode 100644 index 4699ae6d7dd65..0000000000000 --- a/docs/changelog/113920.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 113920 -summary: Add initial support for `semantic_text` field type -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/114334.yaml b/docs/changelog/114334.yaml deleted file mode 100644 index d0fefe40c6970..0000000000000 --- a/docs/changelog/114334.yaml +++ /dev/null @@ -1,7 +0,0 @@ -pr: 114334 -summary: Don't return TEXT type for functions that take TEXT -area: ES|QL -type: bug -issues: - - 111537 - - 114333 diff --git a/docs/changelog/114482.yaml b/docs/changelog/114482.yaml deleted file mode 100644 index a5e2e981f7adc..0000000000000 --- a/docs/changelog/114482.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 114482 -summary: Remove snapshot build restriction for match and qstr functions -area: ES|QL -type: feature -issues: [] diff --git a/docs/changelog/114484.yaml b/docs/changelog/114484.yaml deleted file mode 100644 index 48f54ad0218bb..0000000000000 --- a/docs/changelog/114484.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 114484 -summary: Add `docvalue_fields` Support for `dense_vector` Fields -area: Search -type: enhancement -issues: - - 108470 diff --git a/docs/changelog/114620.yaml b/docs/changelog/114620.yaml deleted file mode 100644 index 92498db92061f..0000000000000 --- a/docs/changelog/114620.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 114620 -summary: "ES|QL: add metrics for functions" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/114665.yaml b/docs/changelog/114665.yaml deleted file mode 100644 index b90bb799bd896..0000000000000 --- a/docs/changelog/114665.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 114665 -summary: Fixing remote ENRICH by pushing the Enrich inside `FragmentExec` -area: ES|QL -type: bug -issues: - - 105095 diff --git a/docs/changelog/114681.yaml b/docs/changelog/114681.yaml deleted file mode 100644 index 2a9901114e56f..0000000000000 --- a/docs/changelog/114681.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 114681 -summary: "Support for unsigned 64 bit numbers in Cpu stats" -area: Infra/Core -type: enhancement -issues: - - 112274 diff --git a/docs/changelog/114742.yaml b/docs/changelog/114742.yaml deleted file mode 100644 index 5bd3dad4400b8..0000000000000 --- a/docs/changelog/114742.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 114742 -summary: Adding support for additional mapping to simulate ingest API -area: Ingest Node -type: enhancement -issues: [] diff --git a/docs/changelog/114819.yaml b/docs/changelog/114819.yaml deleted file mode 100644 index f8d03f7024801..0000000000000 --- a/docs/changelog/114819.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 114819 -summary: Don't use a `BytesStreamOutput` to copy keys in `BytesRefBlockHash` -area: EQL -type: bug -issues: - - 114599 diff --git a/docs/changelog/114855.yaml b/docs/changelog/114855.yaml deleted file mode 100644 index daa6b985a14cf..0000000000000 --- a/docs/changelog/114855.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 114855 -summary: Add query rules retriever -area: Relevance -type: enhancement -issues: [ ] diff --git a/docs/changelog/114862.yaml b/docs/changelog/114862.yaml deleted file mode 100644 index fb5f05fb8e2f9..0000000000000 --- a/docs/changelog/114862.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 114862 -summary: "[Inference API] Add API to get configuration of inference services" -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/114869.yaml b/docs/changelog/114869.yaml deleted file mode 100644 index 755418e7ce4d9..0000000000000 --- a/docs/changelog/114869.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 114869 -summary: Standardize error code when bulk body is invalid -area: CRUD -type: bug -issues: [] diff --git a/docs/changelog/114899.yaml b/docs/changelog/114899.yaml deleted file mode 100644 index 399aa5cf35409..0000000000000 --- a/docs/changelog/114899.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 114899 -summary: "ES|QL: Fix stats by constant expression" -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/114924.yaml b/docs/changelog/114924.yaml deleted file mode 100644 index 536f446ef790d..0000000000000 --- a/docs/changelog/114924.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 114924 -summary: Reducing error-level stack trace logging for normal events in `GeoIpDownloader` -area: Ingest Node -type: bug -issues: [] diff --git a/docs/changelog/114934.yaml b/docs/changelog/114934.yaml deleted file mode 100644 index 68628993b1c80..0000000000000 --- a/docs/changelog/114934.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 114934 -summary: "[ES|QL] To_DatePeriod and To_TimeDuration return better error messages on\ - \ `union_type` fields" -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/114964.yaml b/docs/changelog/114964.yaml deleted file mode 100644 index 8274aeb76a937..0000000000000 --- a/docs/changelog/114964.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 114964 -summary: Add a `monitor_stats` privilege and allow that privilege for remote cluster - privileges -area: Authorization -type: enhancement -issues: [] diff --git a/docs/changelog/115041.yaml b/docs/changelog/115041.yaml deleted file mode 100644 index f4c047c1569ec..0000000000000 --- a/docs/changelog/115041.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 115041 -summary: Increase default `queue_capacity` to 10_000 and decrease max `queue_capacity` - to 100_000 -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/115091.yaml b/docs/changelog/115091.yaml deleted file mode 100644 index 762bcca5e8c52..0000000000000 --- a/docs/changelog/115091.yaml +++ /dev/null @@ -1,7 +0,0 @@ -pr: 115091 -summary: Added stricter range type checks and runtime warnings for ENRICH -area: ES|QL -type: bug -issues: - - 107357 - - 116799 diff --git a/docs/changelog/115102.yaml b/docs/changelog/115102.yaml deleted file mode 100644 index f679bb6c223a6..0000000000000 --- a/docs/changelog/115102.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 115102 -summary: Watch Next Run Interval Resets On Shard Move or Node Restart -area: Watcher -type: bug -issues: - - 111433 diff --git a/docs/changelog/115142.yaml b/docs/changelog/115142.yaml deleted file mode 100644 index 2af968ae156da..0000000000000 --- a/docs/changelog/115142.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 115142 -summary: Attempt to clean up index before remote transfer -area: Recovery -type: enhancement -issues: - - 104473 diff --git a/docs/changelog/115266.yaml b/docs/changelog/115266.yaml deleted file mode 100644 index 1d7fb1368c0e8..0000000000000 --- a/docs/changelog/115266.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 115266 -summary: ES|QL CCS uses `skip_unavailable` setting for handling disconnected remote - clusters -area: ES|QL -type: enhancement -issues: [ 114531 ] diff --git a/docs/changelog/115359.yaml b/docs/changelog/115359.yaml deleted file mode 100644 index 65b3086dfc8d0..0000000000000 --- a/docs/changelog/115359.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 115359 -summary: Adding support for simulate ingest mapping adddition for indices with mappings - that do not come from templates -area: Ingest Node -type: enhancement -issues: [] diff --git a/docs/changelog/115414.yaml b/docs/changelog/115414.yaml deleted file mode 100644 index 7475b765bb30e..0000000000000 --- a/docs/changelog/115414.yaml +++ /dev/null @@ -1,9 +0,0 @@ -pr: 115414 -summary: Mitigate IOSession timeouts -area: Machine Learning -type: bug -issues: - - 114385 - - 114327 - - 114105 - - 114232 diff --git a/docs/changelog/115585.yaml b/docs/changelog/115585.yaml deleted file mode 100644 index 02eecfc3d7d2b..0000000000000 --- a/docs/changelog/115585.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 115459 -summary: Adds access to flags no_sub_matches and no_overlapping_matches to hyphenation-decompounder-tokenfilter -area: Search -type: enhancement -issues: - - 97849 diff --git a/docs/changelog/115640.yaml b/docs/changelog/115640.yaml deleted file mode 100644 index 5c4a943a9697d..0000000000000 --- a/docs/changelog/115640.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 115640 -summary: Fix NPE on plugin sync -area: Infra/CLI -type: bug -issues: - - 114818 diff --git a/docs/changelog/115655.yaml b/docs/changelog/115655.yaml deleted file mode 100644 index 7184405867657..0000000000000 --- a/docs/changelog/115655.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 115655 -summary: Better sizing `BytesRef` for Strings in Queries -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/115678.yaml b/docs/changelog/115678.yaml deleted file mode 100644 index 31240eae1ebb4..0000000000000 --- a/docs/changelog/115678.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 115678 -summary: "ESQL: extract common filter from aggs" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/115687.yaml b/docs/changelog/115687.yaml deleted file mode 100644 index 1180b4627c635..0000000000000 --- a/docs/changelog/115687.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 115687 -summary: Add default ILM policies and switch to ILM for apm-data plugin -area: Data streams -type: feature -issues: [] diff --git a/docs/changelog/115744.yaml b/docs/changelog/115744.yaml deleted file mode 100644 index 9b8c91e59f451..0000000000000 --- a/docs/changelog/115744.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 115744 -summary: Use `SearchStats` instead of field.isAggregatable in data node planning -area: ES|QL -type: bug -issues: - - 115737 diff --git a/docs/changelog/115792.yaml b/docs/changelog/115792.yaml deleted file mode 100644 index 2945a64e3043a..0000000000000 --- a/docs/changelog/115792.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 115792 -summary: Add ES|QL `bit_length` function -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/115797.yaml b/docs/changelog/115797.yaml deleted file mode 100644 index 8adf51887c28a..0000000000000 --- a/docs/changelog/115797.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 115797 -summary: Enable `_tier` based coordinator rewrites for all indices (not just mounted - indices) -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/115807.yaml b/docs/changelog/115807.yaml deleted file mode 100644 index d17cabca4bd03..0000000000000 --- a/docs/changelog/115807.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 115807 -summary: "[Inference API] Improve chunked results error message" -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/115812.yaml b/docs/changelog/115812.yaml deleted file mode 100644 index c45c97041eb00..0000000000000 --- a/docs/changelog/115812.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 115812 -summary: "Prohibit changes to index mode, source, and sort settings during resize" -area: Logs -type: bug -issues: [] diff --git a/docs/changelog/115814.yaml b/docs/changelog/115814.yaml deleted file mode 100644 index 34f1213272d6f..0000000000000 --- a/docs/changelog/115814.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 115814 -summary: "[ES|QL] Implicit casting string literal to intervals" -area: ES|QL -type: enhancement -issues: - - 115352 diff --git a/docs/changelog/115858.yaml b/docs/changelog/115858.yaml deleted file mode 100644 index 0c0408fa656f8..0000000000000 --- a/docs/changelog/115858.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 115858 -summary: "ESQL: optimise aggregations filtered by false/null into evals" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/115994.yaml b/docs/changelog/115994.yaml deleted file mode 100644 index ac090018c8a12..0000000000000 --- a/docs/changelog/115994.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 115994 -summary: Add logsdb telemetry -area: Logs -type: enhancement -issues: [] diff --git a/docs/changelog/116021.yaml b/docs/changelog/116021.yaml deleted file mode 100644 index 58c84b26805b2..0000000000000 --- a/docs/changelog/116021.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 116021 -summary: Fields caps does not honour ignore_unavailable -area: Search -type: bug -issues: - - 107767 diff --git a/docs/changelog/116082.yaml b/docs/changelog/116082.yaml deleted file mode 100644 index 35ca5fb1ea82e..0000000000000 --- a/docs/changelog/116082.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 116082 -summary: Add support for bitwise inner-product in painless -area: Vector Search -type: enhancement -issues: [] diff --git a/docs/changelog/116128.yaml b/docs/changelog/116128.yaml deleted file mode 100644 index 7c38c0529c50d..0000000000000 --- a/docs/changelog/116128.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 116128 -summary: Add num docs and size to logsdb telemetry -area: Logs -type: enhancement -issues: [] diff --git a/docs/changelog/116211.yaml b/docs/changelog/116211.yaml deleted file mode 100644 index 6f55b1b2fef34..0000000000000 --- a/docs/changelog/116211.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 116211 -summary: Use underlying `ByteBuf` `refCount` for `ReleasableBytesReference` -area: Network -type: bug -issues: [] diff --git a/docs/changelog/116325.yaml b/docs/changelog/116325.yaml deleted file mode 100644 index b8cd16dc85773..0000000000000 --- a/docs/changelog/116325.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 116325 -summary: Adjust analyze limit exception to be a `bad_request` -area: Analysis -type: bug -issues: [] diff --git a/docs/changelog/116346.yaml b/docs/changelog/116346.yaml deleted file mode 100644 index 1dcace88a98c0..0000000000000 --- a/docs/changelog/116346.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 116346 -summary: "[ESQL] Fix Binary Comparisons on Date Nanos" -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/116348.yaml b/docs/changelog/116348.yaml deleted file mode 100644 index 927ffc5a6121d..0000000000000 --- a/docs/changelog/116348.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 116348 -summary: "ESQL: Honor skip_unavailable setting for nonmatching indices errors at planning time" -area: ES|QL -type: enhancement -issues: [ 114531 ] diff --git a/docs/changelog/116431.yaml b/docs/changelog/116431.yaml deleted file mode 100644 index 50c6baf1d01c7..0000000000000 --- a/docs/changelog/116431.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 116431 -summary: Adds support for `input_type` field to Vertex inference service -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/116437.yaml b/docs/changelog/116437.yaml deleted file mode 100644 index 94c2464db9980..0000000000000 --- a/docs/changelog/116437.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 116437 -summary: Ensure class resource stream is closed in `ResourceUtils` -area: Indices APIs -type: enhancement -issues: [] diff --git a/docs/changelog/116447.yaml b/docs/changelog/116447.yaml deleted file mode 100644 index 8c0cea4b54578..0000000000000 --- a/docs/changelog/116447.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 116447 -summary: Adding a deprecation info API warning for data streams with old indices -area: Data streams -type: enhancement -issues: [] diff --git a/docs/changelog/116515.yaml b/docs/changelog/116515.yaml deleted file mode 100644 index 6c0d473361e52..0000000000000 --- a/docs/changelog/116515.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 116515 -summary: Esql/lookup join grammar -area: ES|QL -type: feature -issues: [] diff --git a/docs/changelog/116583.yaml b/docs/changelog/116583.yaml deleted file mode 100644 index 3dc8337fe5b86..0000000000000 --- a/docs/changelog/116583.yaml +++ /dev/null @@ -1,7 +0,0 @@ -pr: 116583 -summary: Fix NPE in `EnrichLookupService` on mixed clusters with <8.14 versions -area: ES|QL -type: bug -issues: - - 116529 - - 116544 diff --git a/docs/changelog/116591.yaml b/docs/changelog/116591.yaml deleted file mode 100644 index 60ef241e197b3..0000000000000 --- a/docs/changelog/116591.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 116591 -summary: "Add support for `BYTE_LENGTH` scalar function" -area: ES|QL -type: feature -issues: [] diff --git a/docs/changelog/116656.yaml b/docs/changelog/116656.yaml deleted file mode 100644 index eb5d5a1cfc201..0000000000000 --- a/docs/changelog/116656.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 116656 -summary: _validate does not honour ignore_unavailable -area: Search -type: bug -issues: - - 116594 diff --git a/docs/changelog/116664.yaml b/docs/changelog/116664.yaml deleted file mode 100644 index 36915fca39731..0000000000000 --- a/docs/changelog/116664.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 116664 -summary: Hides `hugging_face_elser` service from the `GET _inference/_services API` -area: Machine Learning -type: bug -issues: - - 116644 diff --git a/docs/changelog/116689.yaml b/docs/changelog/116689.yaml deleted file mode 100644 index 0b1d1646868aa..0000000000000 --- a/docs/changelog/116689.yaml +++ /dev/null @@ -1,10 +0,0 @@ -pr: 116689 -summary: Deprecate `_source.mode` in mappings -area: Mapping -type: deprecation -issues: [] -deprecation: - title: Deprecate `_source.mode` in mappings - area: Mapping - details: Configuring `_source.mode` in mappings is deprecated and will be removed in future versions. Use `index.mapping.source.mode` index setting instead. - impact: Use `index.mapping.source.mode` index setting instead diff --git a/docs/changelog/116809.yaml b/docs/changelog/116809.yaml deleted file mode 100644 index 61dbeb233d576..0000000000000 --- a/docs/changelog/116809.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 116809 -summary: "Distinguish `LicensedFeature` by family field" -area: License -type: bug -issues: [] diff --git a/docs/changelog/116819.yaml b/docs/changelog/116819.yaml deleted file mode 100644 index afe06c583fe55..0000000000000 --- a/docs/changelog/116819.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 116819 -summary: ESQL - Add match operator (:) -area: Search -type: feature -issues: [] diff --git a/docs/changelog/116931.yaml b/docs/changelog/116931.yaml deleted file mode 100644 index 8b31d236ff137..0000000000000 --- a/docs/changelog/116931.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 116931 -summary: Enable built-in Inference Endpoints and default for Semantic Text -area: "Machine Learning" -type: enhancement -issues: [] diff --git a/docs/changelog/116953.yaml b/docs/changelog/116953.yaml deleted file mode 100644 index 33616510d8fd0..0000000000000 --- a/docs/changelog/116953.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 116953 -summary: Fix false positive date detection with trailing dot -area: Mapping -type: bug -issues: - - 116946 diff --git a/docs/changelog/116957.yaml b/docs/changelog/116957.yaml deleted file mode 100644 index 1020190de180d..0000000000000 --- a/docs/changelog/116957.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 116957 -summary: Propagate scoring function through random sampler -area: Machine Learning -type: bug -issues: [ 110134 ] diff --git a/docs/changelog/116962.yaml b/docs/changelog/116962.yaml deleted file mode 100644 index 8f16b00e3f9fc..0000000000000 --- a/docs/changelog/116962.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 116962 -summary: "Add special case for elastic reranker in inference API" -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/116980.yaml b/docs/changelog/116980.yaml deleted file mode 100644 index 140324fd40b92..0000000000000 --- a/docs/changelog/116980.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 116980 -summary: "ESQL: Fix sorts containing `_source`" -area: ES|QL -type: bug -issues: - - 116659 diff --git a/docs/changelog/117080.yaml b/docs/changelog/117080.yaml deleted file mode 100644 index 5909f966e0fa2..0000000000000 --- a/docs/changelog/117080.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 117080 -summary: Esql Enable Date Nanos (tech preview) -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/117105.yaml b/docs/changelog/117105.yaml deleted file mode 100644 index de56c4d521a62..0000000000000 --- a/docs/changelog/117105.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 117105 -summary: Fix long metric deserialize & add - auto-resize needs to be set manually -area: CCS -type: bug -issues: - - 116914 diff --git a/docs/changelog/117189.yaml b/docs/changelog/117189.yaml deleted file mode 100644 index e89c2d81506d9..0000000000000 --- a/docs/changelog/117189.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 117189 -summary: Fix deberta tokenizer bug caused by bug in normalizer -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/117213.yaml b/docs/changelog/117213.yaml deleted file mode 100644 index 3b4cd0cee966c..0000000000000 --- a/docs/changelog/117213.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 117213 -summary: Fix reconstituting version string from components -area: Ingest Node -type: bug -issues: - - 116950 diff --git a/docs/changelog/117271.yaml b/docs/changelog/117271.yaml deleted file mode 100644 index 1a328279b9635..0000000000000 --- a/docs/changelog/117271.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 117271 -summary: Don't skip shards in coord rewrite if timestamp is an alias -area: Search -type: bug -issues: [] diff --git a/docs/changelog/117294.yaml b/docs/changelog/117294.yaml deleted file mode 100644 index f6e80690de7ff..0000000000000 --- a/docs/changelog/117294.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 117294 -summary: Always Emit Inference ID in Semantic Text Mapping -area: Mapping -type: bug -issues: [] diff --git a/docs/changelog/117297.yaml b/docs/changelog/117297.yaml deleted file mode 100644 index 4a0051bbae644..0000000000000 --- a/docs/changelog/117297.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 117297 -summary: Fix CCS exchange when multi cluster aliases point to same cluster -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/117312.yaml b/docs/changelog/117312.yaml deleted file mode 100644 index 302b91388ef2b..0000000000000 --- a/docs/changelog/117312.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 117312 -summary: Add missing `async_search` query parameters to rest-api-spec -area: Search -type: bug -issues: [] diff --git a/docs/changelog/117316.yaml b/docs/changelog/117316.yaml deleted file mode 100644 index 69474d68a8190..0000000000000 --- a/docs/changelog/117316.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 117316 -summary: Fix validation of SORT by aggregate functions -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/117350.yaml b/docs/changelog/117350.yaml deleted file mode 100644 index dca54f2037a87..0000000000000 --- a/docs/changelog/117350.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 117350 -summary: "Improve halfbyte transposition performance, marginally improving bbq performance" -area: Vector Search -type: enhancement -issues: [] diff --git a/docs/changelog/117404.yaml b/docs/changelog/117404.yaml deleted file mode 100644 index 0bab171956ca9..0000000000000 --- a/docs/changelog/117404.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 117404 -summary: Correct bit * byte and bit * float script comparisons -area: Vector Search -type: bug -issues: [] diff --git a/docs/changelog/117503.yaml b/docs/changelog/117503.yaml deleted file mode 100644 index d48741262b581..0000000000000 --- a/docs/changelog/117503.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 117503 -summary: Fix COUNT filter pushdown -area: ES|QL -type: bug -issues: - - 115522 diff --git a/docs/changelog/117551.yaml b/docs/changelog/117551.yaml deleted file mode 100644 index 081dd9203d82a..0000000000000 --- a/docs/changelog/117551.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 117551 -summary: Fix stats by constant expresson with alias -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/117575.yaml b/docs/changelog/117575.yaml deleted file mode 100644 index 781444ae97be5..0000000000000 --- a/docs/changelog/117575.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 117575 -summary: Fix enrich cache size setting name -area: Ingest Node -type: bug -issues: [] diff --git a/docs/changelog/117595.yaml b/docs/changelog/117595.yaml deleted file mode 100644 index 9360c372ac97e..0000000000000 --- a/docs/changelog/117595.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 117595 -summary: Fix for Deberta tokenizer when input sequence exceeds 512 tokens -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/117657.yaml b/docs/changelog/117657.yaml deleted file mode 100644 index 0a72e9dabe9e8..0000000000000 --- a/docs/changelog/117657.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 117657 -summary: Ignore cancellation exceptions -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/117762.yaml b/docs/changelog/117762.yaml deleted file mode 100644 index 123432e0f0507..0000000000000 --- a/docs/changelog/117762.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 117762 -summary: "Parse the contents of dynamic objects for [subobjects:false]" -area: Mapping -type: bug -issues: - - 117544 diff --git a/docs/changelog/117792.yaml b/docs/changelog/117792.yaml deleted file mode 100644 index 2d7ddda1ace40..0000000000000 --- a/docs/changelog/117792.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 117792 -summary: Address mapping and compute engine runtime field issues -area: Mapping -type: bug -issues: - - 117644 diff --git a/docs/changelog/117842.yaml b/docs/changelog/117842.yaml deleted file mode 100644 index 9b528a158288c..0000000000000 --- a/docs/changelog/117842.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 117842 -summary: Limit size of `Literal#toString` -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/117865.yaml b/docs/changelog/117865.yaml deleted file mode 100644 index 33dc497725f92..0000000000000 --- a/docs/changelog/117865.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 117865 -summary: Fix BWC for ES|QL cluster request -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/117914.yaml b/docs/changelog/117914.yaml deleted file mode 100644 index da58ed7bb04b7..0000000000000 --- a/docs/changelog/117914.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 117914 -summary: Fix for propagating filters from compound to inner retrievers -area: Ranking -type: bug -issues: [] diff --git a/docs/changelog/117920.yaml b/docs/changelog/117920.yaml deleted file mode 100644 index 1bfddabd4462d..0000000000000 --- a/docs/changelog/117920.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 117920 -summary: Wait for the worker service to shutdown before closing task processor -area: Machine Learning -type: bug -issues: - - 117563 diff --git a/docs/changelog/117953.yaml b/docs/changelog/117953.yaml deleted file mode 100644 index 62f0218b1cdc7..0000000000000 --- a/docs/changelog/117953.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 117953 -summary: Acquire stats searcher for data stream stats -area: Data streams -type: bug -issues: [] diff --git a/docs/changelog/118354.yaml b/docs/changelog/118354.yaml deleted file mode 100644 index e2d72db121276..0000000000000 --- a/docs/changelog/118354.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 118354 -summary: Fix log message format bugs -area: Ingest Node -type: bug -issues: [] diff --git a/docs/changelog/118370.yaml b/docs/changelog/118370.yaml deleted file mode 100644 index e6a429448e493..0000000000000 --- a/docs/changelog/118370.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 118370 -summary: Fix concurrency issue with `ReinitializingSourceProvider` -area: Mapping -type: bug -issues: - - 118238 diff --git a/docs/changelog/118378.yaml b/docs/changelog/118378.yaml deleted file mode 100644 index d6c388b671968..0000000000000 --- a/docs/changelog/118378.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 118378 -summary: Opt into extra data stream resolution -area: ES|QL -type: bug -issues: [] From 950db572219e326d2986c744b20430dbfbd01a43 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mariusz=20J=C3=B3zala?= <377355+jozala@users.noreply.github.com> Date: Fri, 13 Dec 2024 17:27:24 +0100 Subject: [PATCH 26/32] [test] Avoid running the NoImds test on AWS (#118675) Disabled the NoImds test on AWS EC2 instance where it fails because the AWS metadata are available, which is not expected by this test. --- .../ec2/DiscoveryEc2AvailabilityZoneAttributeNoImdsIT.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2AvailabilityZoneAttributeNoImdsIT.java b/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2AvailabilityZoneAttributeNoImdsIT.java index 602a98e17970d..73213090b6f93 100644 --- a/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2AvailabilityZoneAttributeNoImdsIT.java +++ b/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2AvailabilityZoneAttributeNoImdsIT.java @@ -9,6 +9,8 @@ package org.elasticsearch.discovery.ec2; +import com.amazonaws.util.EC2MetadataUtils; + import org.elasticsearch.client.Request; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.rest.ESRestTestCase; @@ -29,6 +31,8 @@ protected String getTestRestCluster() { } public void testAvailabilityZoneAttribute() throws IOException { + assumeTrue("test only in non-AWS environment", EC2MetadataUtils.getInstanceId() == null); + final var nodesInfoResponse = assertOKAndCreateObjectPath(client().performRequest(new Request("GET", "/_nodes/_all/_none"))); for (final var nodeId : nodesInfoResponse.evaluateMapKeys("nodes")) { assertNull(nodesInfoResponse.evaluateExact("nodes", nodeId, "attributes", "aws_availability_zone")); From 54e839b11068b9ea455befe1ce88d569b0f2c937 Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Fri, 13 Dec 2024 17:42:08 +0100 Subject: [PATCH 27/32] ESQL: Fix LogicalPlanOptimizerTests testPlanSanityCheckWithBinaryPlans (#118672) --- muted-tests.yml | 3 --- .../xpack/esql/optimizer/LogicalPlanOptimizerTests.java | 3 +-- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 36dfc306b0147..e0d011c1bf239 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -317,9 +317,6 @@ tests: - class: org.elasticsearch.reservedstate.service.FileSettingsServiceTests method: testInvalidJSON issue: https://github.com/elastic/elasticsearch/issues/116521 -- class: org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizerTests - method: testPlanSanityCheckWithBinaryPlans - issue: https://github.com/elastic/elasticsearch/issues/118656 # Examples: # diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 7e498eb6654b9..d97a8bb2bc27f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -4911,8 +4911,7 @@ public void testPlanSanityCheckWithBinaryPlans() throws Exception { """); var project = as(plan, Project.class); - var limit = as(project.child(), Limit.class); - var join = as(limit.child(), Join.class); + var join = as(project.child(), Join.class); var joinWithInvalidLeftPlan = join.replaceChildren(join.right(), join.right()); IllegalStateException e = expectThrows(IllegalStateException.class, () -> logicalOptimizer.optimize(joinWithInvalidLeftPlan)); From 6c56c32f7a872acda86232c542315f28215d24b5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Slobodan=20Adamovi=C4=87?= Date: Fri, 13 Dec 2024 18:40:23 +0100 Subject: [PATCH 28/32] Grant necessary Kibana application privileges to `reporting_user` role (#118058) Previously, Kibana was authorizing (and granting application privileges) to create reports, simply based on the `reporting_user` role name. This PR makes these application privileges explicitly granted to the `reporting_user` role. --- docs/changelog/118058.yaml | 5 ++ .../authorization/built-in-roles.asciidoc | 11 ++- .../authz/store/ReservedRolesStore.java | 29 +++++-- .../authz/store/ReservedRolesStoreTests.java | 76 ++++++++++++++++++- 4 files changed, 106 insertions(+), 15 deletions(-) create mode 100644 docs/changelog/118058.yaml diff --git a/docs/changelog/118058.yaml b/docs/changelog/118058.yaml new file mode 100644 index 0000000000000..d5fad346d4d85 --- /dev/null +++ b/docs/changelog/118058.yaml @@ -0,0 +1,5 @@ +pr: 118058 +summary: Grant necessary Kibana application privileges to `reporting_user` role +area: Authorization +type: enhancement +issues: [] diff --git a/docs/reference/security/authorization/built-in-roles.asciidoc b/docs/reference/security/authorization/built-in-roles.asciidoc index d730587e7db17..13812b915dc5e 100644 --- a/docs/reference/security/authorization/built-in-roles.asciidoc +++ b/docs/reference/security/authorization/built-in-roles.asciidoc @@ -161,12 +161,11 @@ Grants the minimum privileges required to write data into the monitoring indices Grants the minimum privileges required to collect monitoring data for the {stack}. [[built-in-roles-reporting-user]] `reporting_user`:: -Grants the specific privileges required for users of {reporting} other than those -required to use {kib}. This role grants access to the reporting indices; each -user has access to only their own reports. -Reporting users should also be assigned additional roles that grant -{kibana-ref}/xpack-security-authorization.html[access to {kib}] as well as read -access to the <> that will be used to generate reports. +Grants the necessary privileges required to use {reporting} features in {kib}, +including generating and downloading reports. This role implicitly grants access +to all Kibana reporting features, with each user having access only to their own reports. +Note that reporting users should also be assigned additional roles that grant read access +to the <> that will be used to generate reports. [[built-in-roles-rollup-admin]] `rollup_admin`:: Grants `manage_rollup` cluster privileges, which enable you to manage and execute all rollup actions. diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java index fc14ec6811014..bdaf75203ee5d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java @@ -301,25 +301,40 @@ private static Map initializeReservedRoles() { "Grants access to manage all index templates and all ingest pipeline configurations." ) ), - // reporting_user doesn't have any privileges in Elasticsearch, and Kibana authorizes privileges based on this role entry( "reporting_user", new RoleDescriptor( "reporting_user", null, null, + new RoleDescriptor.ApplicationResourcePrivileges[] { + RoleDescriptor.ApplicationResourcePrivileges.builder() + .application("kibana-.kibana") + .resources("*") + .privileges( + "feature_discover.minimal_read", + "feature_discover.generate_report", + "feature_dashboard.minimal_read", + "feature_dashboard.generate_report", + "feature_dashboard.download_csv_report", + "feature_canvas.minimal_read", + "feature_canvas.generate_report", + "feature_visualize.minimal_read", + "feature_visualize.generate_report" + ) + .build() }, null, null, - null, - MetadataUtils.getDeprecatedReservedMetadata("Please use Kibana feature privileges instead"), + MetadataUtils.DEFAULT_RESERVED_METADATA, null, null, null, null, - "Grants the specific privileges required for users of X-Pack reporting other than those required to use Kibana. " - + "This role grants access to the reporting indices; each user has access to only their own reports. " - + "Reporting users should also be assigned additional roles that grant access to Kibana as well as read access " - + "to the indices that will be used to generate reports." + "Grants the necessary privileges required to use reporting features in Kibana, " + + "including generating and downloading reports. " + + "This role implicitly grants access to all Kibana reporting features, " + + "with each user having access only to their own reports. Note that reporting users should also be assigned " + + "additional roles that grant read access to the indices that will be used to generate reports." ) ), entry(KibanaSystemUser.ROLE_NAME, kibanaSystemRoleDescriptor(KibanaSystemUser.ROLE_NAME)), diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java index b69b0ece89960..1b9a65d12d8d9 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java @@ -2646,12 +2646,57 @@ public void testReportingUserRole() { RoleDescriptor roleDescriptor = ReservedRolesStore.roleDescriptor("reporting_user"); assertNotNull(roleDescriptor); assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true)); - assertThat(roleDescriptor.getMetadata(), hasEntry("_deprecated", true)); + + final String applicationName = "kibana-.kibana"; + + final Set applicationPrivilegeNames = Set.of( + "feature_discover.minimal_read", + "feature_discover.generate_report", + "feature_dashboard.minimal_read", + "feature_dashboard.generate_report", + "feature_dashboard.download_csv_report", + "feature_canvas.minimal_read", + "feature_canvas.generate_report", + "feature_visualize.minimal_read", + "feature_visualize.generate_report" + ); + + final Set allowedApplicationActionPatterns = Set.of( + "login:", + "app:discover", + "app:canvas", + "app:kibana", + "ui:catalogue/canvas", + "ui:navLinks/canvas", + "ui:catalogue/discover", + "ui:navLinks/discover", + "ui:navLinks/kibana", + "saved_object:index-pattern/*", + "saved_object:search/*", + "saved_object:query/*", + "saved_object:config/*", + "saved_object:config/get", + "saved_object:config/find", + "saved_object:config-global/*", + "saved_object:telemetry/*", + "saved_object:canvas-workpad/*", + "saved_object:canvas-element/*", + "saved_object:url/*", + "ui:discover/show" + ); + + final List applicationPrivilegeDescriptors = new ArrayList<>(); + for (String appPrivilegeName : applicationPrivilegeNames) { + applicationPrivilegeDescriptors.add( + new ApplicationPrivilegeDescriptor(applicationName, appPrivilegeName, allowedApplicationActionPatterns, Map.of()) + ); + } Role reportingUserRole = Role.buildFromRoleDescriptor( roleDescriptor, new FieldPermissionsCache(Settings.EMPTY), - RESTRICTED_INDICES + RESTRICTED_INDICES, + applicationPrivilegeDescriptors ); assertThat(reportingUserRole.cluster().check(TransportClusterHealthAction.NAME, request, authentication), is(false)); assertThat(reportingUserRole.cluster().check(ClusterStateAction.NAME, request, authentication), is(false)); @@ -2723,6 +2768,33 @@ public void testReportingUserRole() { assertNoAccessAllowed(reportingUserRole, TestRestrictedIndices.SAMPLE_RESTRICTED_NAMES); assertNoAccessAllowed(reportingUserRole, XPackPlugin.ASYNC_RESULTS_INDEX + randomAlphaOfLengthBetween(0, 2)); + + applicationPrivilegeNames.forEach(appPrivilege -> { + assertThat( + reportingUserRole.application() + .grants( + ApplicationPrivilegeTests.createPrivilege( + applicationName, + appPrivilege, + allowedApplicationActionPatterns.toArray(new String[0]) + ), + "*" + ), + is(true) + ); + }); + assertThat( + reportingUserRole.application() + .grants( + ApplicationPrivilegeTests.createPrivilege( + "kibana-.*", + "feature_random.minimal_read", + allowedApplicationActionPatterns.toArray(new String[0]) + ), + "*" + ), + is(false) + ); } public void testSuperuserRole() { From c9d8a3a2b6bff2130634ec1e572cdf2ccea203e4 Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Fri, 13 Dec 2024 13:47:51 -0500 Subject: [PATCH 29/32] Add replica handling to the ILM MountSnapshotStep (#118687) --- .../xpack/core/ilm/AsyncWaitStep.java | 3 + .../xpack/core/ilm/DeleteAction.java | 6 +- .../xpack/core/ilm/DownsampleAction.java | 3 +- .../xpack/core/ilm/ForceMergeAction.java | 3 +- .../xpack/core/ilm/MountSnapshotStep.java | 29 ++-- .../xpack/core/ilm/ReadOnlyAction.java | 3 +- .../core/ilm/SearchableSnapshotAction.java | 7 +- .../xpack/core/ilm/ShrinkAction.java | 3 +- .../core/ilm/TimeseriesLifecycleType.java | 13 +- .../WaitUntilTimeSeriesEndTimePassesStep.java | 5 +- .../core/ilm/MountSnapshotStepTests.java | 161 ++++++++++++------ .../ilm/SearchableSnapshotActionTests.java | 95 ++++------- ...UntilTimeSeriesEndTimePassesStepTests.java | 9 +- 13 files changed, 182 insertions(+), 158 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/AsyncWaitStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/AsyncWaitStep.java index 6a72af5bce5e9..fc5e8d473b763 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/AsyncWaitStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/AsyncWaitStep.java @@ -8,6 +8,7 @@ import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.Index; import org.elasticsearch.xcontent.ToXContentObject; @@ -20,6 +21,7 @@ */ public abstract class AsyncWaitStep extends Step { + @Nullable private final Client client; public AsyncWaitStep(StepKey key, StepKey nextStepKey, Client client) { @@ -27,6 +29,7 @@ public AsyncWaitStep(StepKey key, StepKey nextStepKey, Client client) { this.client = client; } + @Nullable protected Client getClient() { return client; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DeleteAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DeleteAction.java index 8712cefac5d31..6c2ab86995a6d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DeleteAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DeleteAction.java @@ -93,8 +93,7 @@ public List toSteps(Client client, String phase, Step.StepKey nextStepKey) WaitUntilTimeSeriesEndTimePassesStep waitUntilTimeSeriesEndTimeStep = new WaitUntilTimeSeriesEndTimePassesStep( waitTimeSeriesEndTimePassesKey, cleanSnapshotKey, - Instant::now, - client + Instant::now ); CleanupSnapshotStep cleanupSnapshotStep = new CleanupSnapshotStep(cleanSnapshotKey, deleteStepKey, client); DeleteStep deleteStep = new DeleteStep(deleteStepKey, nextStepKey, client); @@ -108,8 +107,7 @@ public List toSteps(Client client, String phase, Step.StepKey nextStepKey) WaitUntilTimeSeriesEndTimePassesStep waitUntilTimeSeriesEndTimeStep = new WaitUntilTimeSeriesEndTimePassesStep( waitTimeSeriesEndTimePassesKey, deleteStepKey, - Instant::now, - client + Instant::now ); DeleteStep deleteStep = new DeleteStep(deleteStepKey, nextStepKey, client); return List.of(waitForNoFollowersStep, waitUntilTimeSeriesEndTimeStep, deleteStep); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DownsampleAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DownsampleAction.java index 697f948e47832..6ce9e05e4a464 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DownsampleAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DownsampleAction.java @@ -200,8 +200,7 @@ public List toSteps(Client client, String phase, StepKey nextStepKey) { WaitUntilTimeSeriesEndTimePassesStep waitUntilTimeSeriesEndTimeStep = new WaitUntilTimeSeriesEndTimePassesStep( waitTimeSeriesEndTimePassesKey, readOnlyKey, - Instant::now, - client + Instant::now ); // Mark source index as read-only ReadOnlyStep readOnlyStep = new ReadOnlyStep(readOnlyKey, generateDownsampleIndexNameKey, client); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ForceMergeAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ForceMergeAction.java index f8f4ce2bb0354..ac398bccb64e4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ForceMergeAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ForceMergeAction.java @@ -162,8 +162,7 @@ public List toSteps(Client client, String phase, Step.StepKey nextStepKey) WaitUntilTimeSeriesEndTimePassesStep waitUntilTimeSeriesEndTimeStep = new WaitUntilTimeSeriesEndTimePassesStep( waitTimeSeriesEndTimePassesKey, codecChange ? closeKey : forceMergeKey, - Instant::now, - client + Instant::now ); // Indices already in this step key when upgrading need to know how to move forward but stop making the index diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/MountSnapshotStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/MountSnapshotStep.java index 7d045f2950e1b..82d41b91fea4f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/MountSnapshotStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/MountSnapshotStep.java @@ -41,6 +41,7 @@ public class MountSnapshotStep extends AsyncRetryDuringSnapshotActionStep { private final MountSearchableSnapshotRequest.Storage storageType; @Nullable private final Integer totalShardsPerNode; + private final int replicas; public MountSnapshotStep( StepKey key, @@ -48,7 +49,8 @@ public MountSnapshotStep( Client client, String restoredIndexPrefix, MountSearchableSnapshotRequest.Storage storageType, - @Nullable Integer totalShardsPerNode + @Nullable Integer totalShardsPerNode, + int replicas ) { super(key, nextStepKey, client); this.restoredIndexPrefix = restoredIndexPrefix; @@ -57,16 +59,10 @@ public MountSnapshotStep( throw new IllegalArgumentException("[" + SearchableSnapshotAction.TOTAL_SHARDS_PER_NODE.getPreferredName() + "] must be >= 1"); } this.totalShardsPerNode = totalShardsPerNode; - } - public MountSnapshotStep( - StepKey key, - StepKey nextStepKey, - Client client, - String restoredIndexPrefix, - MountSearchableSnapshotRequest.Storage storageType - ) { - this(key, nextStepKey, client, restoredIndexPrefix, storageType, null); + // this isn't directly settable by the user, so validation by assertion is sufficient + assert replicas >= 0 : "number of replicas must be gte zero, but was [" + replicas + "]"; + this.replicas = replicas; } @Override @@ -87,6 +83,10 @@ public Integer getTotalShardsPerNode() { return totalShardsPerNode; } + public int getReplicas() { + return replicas; + } + @Override void performDuringNoSnapshot(IndexMetadata indexMetadata, ClusterState currentClusterState, ActionListener listener) { String indexName = indexMetadata.getIndex().getName(); @@ -162,11 +162,13 @@ void performDuringNoSnapshot(IndexMetadata indexMetadata, ClusterState currentCl } final Settings.Builder settingsBuilder = Settings.builder(); - overrideTierPreference(this.getKey().phase()).ifPresent(override -> settingsBuilder.put(DataTier.TIER_PREFERENCE, override)); if (totalShardsPerNode != null) { settingsBuilder.put(ShardsLimitAllocationDecider.INDEX_TOTAL_SHARDS_PER_NODE_SETTING.getKey(), totalShardsPerNode); } + if (replicas > 0) { + settingsBuilder.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, replicas); + } final MountSearchableSnapshotRequest mountSearchableSnapshotRequest = new MountSearchableSnapshotRequest( TimeValue.MAX_VALUE, @@ -245,7 +247,7 @@ String[] ignoredIndexSettings() { @Override public int hashCode() { - return Objects.hash(super.hashCode(), restoredIndexPrefix, storageType, totalShardsPerNode); + return Objects.hash(super.hashCode(), restoredIndexPrefix, storageType, totalShardsPerNode, replicas); } @Override @@ -260,6 +262,7 @@ public boolean equals(Object obj) { return super.equals(obj) && Objects.equals(restoredIndexPrefix, other.restoredIndexPrefix) && Objects.equals(storageType, other.storageType) - && Objects.equals(totalShardsPerNode, other.totalShardsPerNode); + && Objects.equals(totalShardsPerNode, other.totalShardsPerNode) + && Objects.equals(replicas, other.replicas); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ReadOnlyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ReadOnlyAction.java index 2b03dc77eb5b6..b36156842acf5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ReadOnlyAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ReadOnlyAction.java @@ -67,8 +67,7 @@ public List toSteps(Client client, String phase, StepKey nextStepKey) { WaitUntilTimeSeriesEndTimePassesStep waitUntilTimeSeriesEndTimeStep = new WaitUntilTimeSeriesEndTimePassesStep( waitTimeSeriesEndTimePassesKey, readOnlyKey, - Instant::now, - client + Instant::now ); ReadOnlyStep readOnlyStep = new ReadOnlyStep(readOnlyKey, nextStepKey, client); return List.of(checkNotWriteIndexStep, waitUntilTimeSeriesEndTimeStep, readOnlyStep); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/SearchableSnapshotAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/SearchableSnapshotAction.java index f585575534b76..b746ee8ea7c07 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/SearchableSnapshotAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/SearchableSnapshotAction.java @@ -113,6 +113,7 @@ public String getSnapshotRepository() { return snapshotRepository; } + @Nullable public Integer getTotalShardsPerNode() { return totalShardsPerNode; } @@ -230,8 +231,7 @@ public List toSteps(Client client, String phase, StepKey nextStepKey, XPac WaitUntilTimeSeriesEndTimePassesStep waitUntilTimeSeriesEndTimeStep = new WaitUntilTimeSeriesEndTimePassesStep( waitTimeSeriesEndTimePassesKey, skipGeneratingSnapshotKey, - Instant::now, - client + Instant::now ); // When generating a snapshot, we either jump to the force merge step, or we skip the @@ -318,7 +318,8 @@ public List toSteps(Client client, String phase, StepKey nextStepKey, XPac client, getRestoredIndexPrefix(mountSnapshotKey), storageType, - totalShardsPerNode + totalShardsPerNode, + 0 ); WaitForIndexColorStep waitForGreenIndexHealthStep = new WaitForIndexColorStep( waitForGreenRestoredIndexKey, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ShrinkAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ShrinkAction.java index 70ec5da1d8a2a..f7478518613e2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ShrinkAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ShrinkAction.java @@ -231,8 +231,7 @@ public List toSteps(Client client, String phase, Step.StepKey nextStepKey) WaitUntilTimeSeriesEndTimePassesStep waitUntilTimeSeriesEndTimeStep = new WaitUntilTimeSeriesEndTimePassesStep( waitTimeSeriesEndTimePassesKey, readOnlyKey, - Instant::now, - client + Instant::now ); ReadOnlyStep readOnlyStep = new ReadOnlyStep(readOnlyKey, checkTargetShardsCountKey, client); CheckTargetShardsCountStep checkTargetShardsCountStep = new CheckTargetShardsCountStep( diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/TimeseriesLifecycleType.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/TimeseriesLifecycleType.java index 0fd280f440f39..10a4c7086a0cc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/TimeseriesLifecycleType.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/TimeseriesLifecycleType.java @@ -27,7 +27,6 @@ import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; -import java.util.stream.Stream; /** * Represents the lifecycle of an index from creation to deletion. A @@ -49,7 +48,7 @@ public class TimeseriesLifecycleType implements LifecycleType { static final String DELETE_PHASE = "delete"; public static final List ORDERED_VALID_PHASES = List.of(HOT_PHASE, WARM_PHASE, COLD_PHASE, FROZEN_PHASE, DELETE_PHASE); - public static final List ORDERED_VALID_HOT_ACTIONS = Stream.of( + public static final List ORDERED_VALID_HOT_ACTIONS = List.of( SetPriorityAction.NAME, UnfollowAction.NAME, RolloverAction.NAME, @@ -58,8 +57,8 @@ public class TimeseriesLifecycleType implements LifecycleType { ShrinkAction.NAME, ForceMergeAction.NAME, SearchableSnapshotAction.NAME - ).filter(Objects::nonNull).toList(); - public static final List ORDERED_VALID_WARM_ACTIONS = Stream.of( + ); + public static final List ORDERED_VALID_WARM_ACTIONS = List.of( SetPriorityAction.NAME, UnfollowAction.NAME, ReadOnlyAction.NAME, @@ -68,8 +67,8 @@ public class TimeseriesLifecycleType implements LifecycleType { MigrateAction.NAME, ShrinkAction.NAME, ForceMergeAction.NAME - ).filter(Objects::nonNull).toList(); - public static final List ORDERED_VALID_COLD_ACTIONS = Stream.of( + ); + public static final List ORDERED_VALID_COLD_ACTIONS = List.of( SetPriorityAction.NAME, UnfollowAction.NAME, ReadOnlyAction.NAME, @@ -78,7 +77,7 @@ public class TimeseriesLifecycleType implements LifecycleType { AllocateAction.NAME, MigrateAction.NAME, FreezeAction.NAME - ).filter(Objects::nonNull).toList(); + ); public static final List ORDERED_VALID_FROZEN_ACTIONS = List.of(UnfollowAction.NAME, SearchableSnapshotAction.NAME); public static final List ORDERED_VALID_DELETE_ACTIONS = List.of(WaitForSnapshotAction.NAME, DeleteAction.NAME); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitUntilTimeSeriesEndTimePassesStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitUntilTimeSeriesEndTimePassesStep.java index 50a7d48672c8e..3e190a26dd961 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitUntilTimeSeriesEndTimePassesStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitUntilTimeSeriesEndTimePassesStep.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.core.ilm; -import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.Strings; @@ -33,8 +32,8 @@ public class WaitUntilTimeSeriesEndTimePassesStep extends AsyncWaitStep { public static final String NAME = "check-ts-end-time-passed"; private final Supplier nowSupplier; - public WaitUntilTimeSeriesEndTimePassesStep(StepKey key, StepKey nextStepKey, Supplier nowSupplier, Client client) { - super(key, nextStepKey, client); + public WaitUntilTimeSeriesEndTimePassesStep(StepKey key, StepKey nextStepKey, Supplier nowSupplier) { + super(key, nextStepKey, null); this.nowSupplier = nowSupplier; } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/MountSnapshotStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/MountSnapshotStepTests.java index 8ca7a00ab0948..7ccdb1a27326a 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/MountSnapshotStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/MountSnapshotStepTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.cluster.metadata.LifecycleExecutionState; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider; +import org.elasticsearch.core.Nullable; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.snapshots.RestoreInfo; import org.elasticsearch.test.client.NoOpClient; @@ -42,7 +43,7 @@ public MountSnapshotStep createRandomInstance() { String restoredIndexPrefix = randomAlphaOfLength(10); MountSearchableSnapshotRequest.Storage storage = randomStorageType(); Integer totalShardsPerNode = randomTotalShardsPerNode(true); - return new MountSnapshotStep(stepKey, nextStepKey, client, restoredIndexPrefix, storage, totalShardsPerNode); + return new MountSnapshotStep(stepKey, nextStepKey, client, restoredIndexPrefix, storage, totalShardsPerNode, 0); } public static MountSearchableSnapshotRequest.Storage randomStorageType() { @@ -61,7 +62,8 @@ protected MountSnapshotStep copyInstance(MountSnapshotStep instance) { instance.getClient(), instance.getRestoredIndexPrefix(), instance.getStorage(), - instance.getTotalShardsPerNode() + instance.getTotalShardsPerNode(), + instance.getReplicas() ); } @@ -72,7 +74,8 @@ public MountSnapshotStep mutateInstance(MountSnapshotStep instance) { String restoredIndexPrefix = instance.getRestoredIndexPrefix(); MountSearchableSnapshotRequest.Storage storage = instance.getStorage(); Integer totalShardsPerNode = instance.getTotalShardsPerNode(); - switch (between(0, 4)) { + int replicas = instance.getReplicas(); + switch (between(0, 5)) { case 0: key = new StepKey(key.phase(), key.action(), key.name() + randomAlphaOfLength(5)); break; @@ -94,10 +97,13 @@ public MountSnapshotStep mutateInstance(MountSnapshotStep instance) { case 4: totalShardsPerNode = totalShardsPerNode == null ? 1 : totalShardsPerNode + randomIntBetween(1, 100); break; + case 5: + replicas = replicas == 0 ? 1 : 0; // swap between 0 and 1 + break; default: throw new AssertionError("Illegal randomisation branch"); } - return new MountSnapshotStep(key, nextKey, instance.getClient(), restoredIndexPrefix, storage, totalShardsPerNode); + return new MountSnapshotStep(key, nextKey, instance.getClient(), restoredIndexPrefix, storage, totalShardsPerNode, replicas); } public void testCreateWithInvalidTotalShardsPerNode() throws Exception { @@ -111,7 +117,8 @@ public void testCreateWithInvalidTotalShardsPerNode() throws Exception { client, RESTORED_INDEX_PREFIX, randomStorageType(), - invalidTotalShardsPerNode + invalidTotalShardsPerNode, + 0 ) ); assertEquals("[total_shards_per_node] must be >= 1", exception.getMessage()); @@ -195,14 +202,18 @@ public void testPerformAction() throws Exception { indexName, RESTORED_INDEX_PREFIX, indexName, - new String[] { LifecycleSettings.LIFECYCLE_NAME } + new String[] { LifecycleSettings.LIFECYCLE_NAME }, + null, + 0 ); MountSnapshotStep step = new MountSnapshotStep( randomStepKey(), randomStepKey(), client, RESTORED_INDEX_PREFIX, - randomStorageType() + randomStorageType(), + null, + 0 ); performActionAndWait(step, indexMetadata, clusterState, null); } @@ -237,7 +248,9 @@ public void testResponseStatusHandling() throws Exception { randomStepKey(), clientPropagatingOKResponse, RESTORED_INDEX_PREFIX, - randomStorageType() + randomStorageType(), + null, + 0 ); performActionAndWait(step, indexMetadata, clusterState, null); } @@ -252,7 +265,9 @@ public void testResponseStatusHandling() throws Exception { randomStepKey(), clientPropagatingACCEPTEDResponse, RESTORED_INDEX_PREFIX, - randomStorageType() + randomStorageType(), + null, + 0 ); performActionAndWait(step, indexMetadata, clusterState, null); } @@ -289,47 +304,49 @@ public void testMountWithPartialAndRestoredPrefix() throws Exception { ); } - public void doTestMountWithoutSnapshotIndexNameInState(String prefix) throws Exception { - { - String indexNameSnippet = randomAlphaOfLength(10); - String indexName = prefix + indexNameSnippet; - String policyName = "test-ilm-policy"; - Map ilmCustom = new HashMap<>(); - String snapshotName = indexName + "-" + policyName; - ilmCustom.put("snapshot_name", snapshotName); - String repository = "repository"; - ilmCustom.put("snapshot_repository", repository); + private void doTestMountWithoutSnapshotIndexNameInState(String prefix) throws Exception { + String indexNameSnippet = randomAlphaOfLength(10); + String indexName = prefix + indexNameSnippet; + String policyName = "test-ilm-policy"; + Map ilmCustom = new HashMap<>(); + String snapshotName = indexName + "-" + policyName; + ilmCustom.put("snapshot_name", snapshotName); + String repository = "repository"; + ilmCustom.put("snapshot_repository", repository); - IndexMetadata.Builder indexMetadataBuilder = IndexMetadata.builder(indexName) - .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_NAME, policyName)) - .putCustom(LifecycleExecutionState.ILM_CUSTOM_METADATA_KEY, ilmCustom) - .numberOfShards(randomIntBetween(1, 5)) - .numberOfReplicas(randomIntBetween(0, 5)); - IndexMetadata indexMetadata = indexMetadataBuilder.build(); + IndexMetadata.Builder indexMetadataBuilder = IndexMetadata.builder(indexName) + .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_NAME, policyName)) + .putCustom(LifecycleExecutionState.ILM_CUSTOM_METADATA_KEY, ilmCustom) + .numberOfShards(randomIntBetween(1, 5)) + .numberOfReplicas(randomIntBetween(0, 5)); + IndexMetadata indexMetadata = indexMetadataBuilder.build(); - ClusterState clusterState = ClusterState.builder(emptyClusterState()) - .metadata(Metadata.builder().put(indexMetadata, true).build()) - .build(); + ClusterState clusterState = ClusterState.builder(emptyClusterState()) + .metadata(Metadata.builder().put(indexMetadata, true).build()) + .build(); - try (var threadPool = createThreadPool()) { - final var client = getRestoreSnapshotRequestAssertingClient( - threadPool, - repository, - snapshotName, - indexName, - RESTORED_INDEX_PREFIX, - indexNameSnippet, - new String[] { LifecycleSettings.LIFECYCLE_NAME } - ); - MountSnapshotStep step = new MountSnapshotStep( - randomStepKey(), - randomStepKey(), - client, - RESTORED_INDEX_PREFIX, - randomStorageType() - ); - performActionAndWait(step, indexMetadata, clusterState, null); - } + try (var threadPool = createThreadPool()) { + final var client = getRestoreSnapshotRequestAssertingClient( + threadPool, + repository, + snapshotName, + indexName, + RESTORED_INDEX_PREFIX, + indexNameSnippet, + new String[] { LifecycleSettings.LIFECYCLE_NAME }, + null, + 0 + ); + MountSnapshotStep step = new MountSnapshotStep( + randomStepKey(), + randomStepKey(), + client, + RESTORED_INDEX_PREFIX, + randomStorageType(), + null, + 0 + ); + performActionAndWait(step, indexMetadata, clusterState, null); } } @@ -361,7 +378,11 @@ public void testIgnoreTotalShardsPerNodeInFrozenPhase() throws Exception { indexName, RESTORED_INDEX_PREFIX, indexName, - new String[] { LifecycleSettings.LIFECYCLE_NAME, ShardsLimitAllocationDecider.INDEX_TOTAL_SHARDS_PER_NODE_SETTING.getKey() } + new String[] { + LifecycleSettings.LIFECYCLE_NAME, + ShardsLimitAllocationDecider.INDEX_TOTAL_SHARDS_PER_NODE_SETTING.getKey() }, + null, + 0 ); MountSnapshotStep step = new MountSnapshotStep( new StepKey(TimeseriesLifecycleType.FROZEN_PHASE, randomAlphaOfLength(10), randomAlphaOfLength(10)), @@ -369,13 +390,14 @@ public void testIgnoreTotalShardsPerNodeInFrozenPhase() throws Exception { client, RESTORED_INDEX_PREFIX, randomStorageType(), - null + null, + 0 ); performActionAndWait(step, indexMetadata, clusterState, null); } } - public void testDoNotIgnoreTotalShardsPerNodeIfSet() throws Exception { + public void testDoNotIgnoreTotalShardsPerNodeAndReplicasIfSet() throws Exception { String indexName = randomAlphaOfLength(10); String policyName = "test-ilm-policy"; Map ilmCustom = new HashMap<>(); @@ -395,6 +417,9 @@ public void testDoNotIgnoreTotalShardsPerNodeIfSet() throws Exception { .metadata(Metadata.builder().put(indexMetadata, true).build()) .build(); + final Integer totalShardsPerNode = randomTotalShardsPerNode(false); + final int replicas = randomIntBetween(1, 5); + try (var threadPool = createThreadPool()) { final var client = getRestoreSnapshotRequestAssertingClient( threadPool, @@ -403,7 +428,9 @@ public void testDoNotIgnoreTotalShardsPerNodeIfSet() throws Exception { indexName, RESTORED_INDEX_PREFIX, indexName, - new String[] { LifecycleSettings.LIFECYCLE_NAME } + new String[] { LifecycleSettings.LIFECYCLE_NAME }, + totalShardsPerNode, + replicas ); MountSnapshotStep step = new MountSnapshotStep( new StepKey(TimeseriesLifecycleType.FROZEN_PHASE, randomAlphaOfLength(10), randomAlphaOfLength(10)), @@ -411,7 +438,8 @@ public void testDoNotIgnoreTotalShardsPerNodeIfSet() throws Exception { client, RESTORED_INDEX_PREFIX, randomStorageType(), - randomTotalShardsPerNode(false) + totalShardsPerNode, + replicas ); performActionAndWait(step, indexMetadata, clusterState, null); } @@ -439,7 +467,9 @@ private NoOpClient getRestoreSnapshotRequestAssertingClient( String indexName, String restoredIndexPrefix, String expectedSnapshotIndexName, - String[] expectedIgnoredIndexSettings + String[] expectedIgnoredIndexSettings, + @Nullable Integer totalShardsPerNode, + int replicas ) { return new NoOpClient(threadPool) { @Override @@ -462,6 +492,31 @@ protected void assertThat(mountSearchableSnapshotRequest.mountedIndexName(), is(restoredIndexPrefix + indexName)); assertThat(mountSearchableSnapshotRequest.snapshotIndexName(), is(expectedSnapshotIndexName)); + if (totalShardsPerNode != null) { + Integer totalShardsPerNodeSettingValue = ShardsLimitAllocationDecider.INDEX_TOTAL_SHARDS_PER_NODE_SETTING.get( + mountSearchableSnapshotRequest.indexSettings() + ); + assertThat(totalShardsPerNodeSettingValue, is(totalShardsPerNode)); + } else { + assertThat( + mountSearchableSnapshotRequest.indexSettings() + .hasValue(ShardsLimitAllocationDecider.INDEX_TOTAL_SHARDS_PER_NODE_SETTING.getKey()), + is(false) + ); + } + + if (replicas > 0) { + Integer numberOfReplicasSettingValue = IndexMetadata.INDEX_NUMBER_OF_REPLICAS_SETTING.get( + mountSearchableSnapshotRequest.indexSettings() + ); + assertThat(numberOfReplicasSettingValue, is(replicas)); + } else { + assertThat( + mountSearchableSnapshotRequest.indexSettings().hasValue(IndexMetadata.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey()), + is(false) + ); + } + // invoke the awaiting listener with a very generic 'response', just to fulfill the contract listener.onResponse((Response) new RestoreSnapshotResponse((RestoreInfo) null)); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/SearchableSnapshotActionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/SearchableSnapshotActionTests.java index ca219fdde3d57..5304b7885f96c 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/SearchableSnapshotActionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/SearchableSnapshotActionTests.java @@ -14,6 +14,8 @@ import java.io.IOException; import java.util.List; +import java.util.Objects; +import java.util.stream.Stream; import static org.elasticsearch.xpack.core.ilm.SearchableSnapshotAction.NAME; import static org.elasticsearch.xpack.core.ilm.SearchableSnapshotAction.TOTAL_SHARDS_PER_NODE; @@ -29,40 +31,23 @@ public void testToSteps() { StepKey nextStepKey = new StepKey(phase, randomAlphaOfLengthBetween(1, 5), randomAlphaOfLengthBetween(1, 5)); List steps = action.toSteps(null, phase, nextStepKey, null); - assertThat(steps.size(), is(action.isForceMergeIndex() ? 19 : 17)); - - List expectedSteps = action.isForceMergeIndex() - ? expectedStepKeysWithForceMerge(phase) - : expectedStepKeysNoForceMerge(phase); - - assertThat(steps.get(0).getKey(), is(expectedSteps.get(0))); - assertThat(steps.get(1).getKey(), is(expectedSteps.get(1))); - assertThat(steps.get(2).getKey(), is(expectedSteps.get(2))); - assertThat(steps.get(3).getKey(), is(expectedSteps.get(3))); - assertThat(steps.get(4).getKey(), is(expectedSteps.get(4))); - assertThat(steps.get(5).getKey(), is(expectedSteps.get(5))); - assertThat(steps.get(6).getKey(), is(expectedSteps.get(6))); - assertThat(steps.get(7).getKey(), is(expectedSteps.get(7))); - assertThat(steps.get(8).getKey(), is(expectedSteps.get(8))); - assertThat(steps.get(9).getKey(), is(expectedSteps.get(9))); - assertThat(steps.get(10).getKey(), is(expectedSteps.get(10))); - assertThat(steps.get(11).getKey(), is(expectedSteps.get(11))); - assertThat(steps.get(12).getKey(), is(expectedSteps.get(12))); - assertThat(steps.get(13).getKey(), is(expectedSteps.get(13))); - assertThat(steps.get(14).getKey(), is(expectedSteps.get(14))); - assertThat(steps.get(15).getKey(), is(expectedSteps.get(15))); - - if (action.isForceMergeIndex()) { - assertThat(steps.get(16).getKey(), is(expectedSteps.get(16))); - assertThat(steps.get(17).getKey(), is(expectedSteps.get(17))); - CreateSnapshotStep createSnapshotStep = (CreateSnapshotStep) steps.get(9); - assertThat(createSnapshotStep.getNextKeyOnIncomplete(), is(expectedSteps.get(8))); - validateWaitForDataTierStep(phase, steps, 10, 11); - } else { - CreateSnapshotStep createSnapshotStep = (CreateSnapshotStep) steps.get(7); - assertThat(createSnapshotStep.getNextKeyOnIncomplete(), is(expectedSteps.get(6))); - validateWaitForDataTierStep(phase, steps, 8, 9); + + List expectedSteps = expectedStepKeys(phase, action.isForceMergeIndex()); + assertThat(steps.size(), is(expectedSteps.size())); + for (int i = 0; i < expectedSteps.size(); i++) { + assertThat("steps match expectation at index " + i, steps.get(i).getKey(), is(expectedSteps.get(i))); + } + + int index = -1; + for (int i = 0; i < expectedSteps.size(); i++) { + if (expectedSteps.get(i).name().equals(CreateSnapshotStep.NAME)) { + index = i; + break; + } } + CreateSnapshotStep createSnapshotStep = (CreateSnapshotStep) steps.get(index); + assertThat(createSnapshotStep.getNextKeyOnIncomplete(), is(expectedSteps.get(index - 1))); + validateWaitForDataTierStep(phase, steps, index + 1, index + 2); } private void validateWaitForDataTierStep(String phase, List steps, int waitForDataTierStepIndex, int mountStepIndex) { @@ -108,15 +93,15 @@ public void testCreateWithInvalidTotalShardsPerNode() { assertEquals("[" + TOTAL_SHARDS_PER_NODE.getPreferredName() + "] must be >= 1", exception.getMessage()); } - private List expectedStepKeysWithForceMerge(String phase) { - return List.of( + private List expectedStepKeys(String phase, boolean forceMergeIndex) { + return Stream.of( new StepKey(phase, NAME, SearchableSnapshotAction.CONDITIONAL_SKIP_ACTION_STEP), new StepKey(phase, NAME, CheckNotDataStreamWriteIndexStep.NAME), new StepKey(phase, NAME, WaitForNoFollowersStep.NAME), new StepKey(phase, NAME, WaitUntilTimeSeriesEndTimePassesStep.NAME), new StepKey(phase, NAME, SearchableSnapshotAction.CONDITIONAL_SKIP_GENERATE_AND_CLEAN), - new StepKey(phase, NAME, ForceMergeStep.NAME), - new StepKey(phase, NAME, SegmentCountStep.NAME), + forceMergeIndex ? new StepKey(phase, NAME, ForceMergeStep.NAME) : null, + forceMergeIndex ? new StepKey(phase, NAME, SegmentCountStep.NAME) : null, new StepKey(phase, NAME, GenerateSnapshotNameStep.NAME), new StepKey(phase, NAME, CleanupSnapshotStep.NAME), new StepKey(phase, NAME, CreateSnapshotStep.NAME), @@ -129,29 +114,7 @@ private List expectedStepKeysWithForceMerge(String phase) { new StepKey(phase, NAME, ReplaceDataStreamBackingIndexStep.NAME), new StepKey(phase, NAME, DeleteStep.NAME), new StepKey(phase, NAME, SwapAliasesAndDeleteSourceIndexStep.NAME) - ); - } - - private List expectedStepKeysNoForceMerge(String phase) { - return List.of( - new StepKey(phase, NAME, SearchableSnapshotAction.CONDITIONAL_SKIP_ACTION_STEP), - new StepKey(phase, NAME, CheckNotDataStreamWriteIndexStep.NAME), - new StepKey(phase, NAME, WaitForNoFollowersStep.NAME), - new StepKey(phase, NAME, WaitUntilTimeSeriesEndTimePassesStep.NAME), - new StepKey(phase, NAME, SearchableSnapshotAction.CONDITIONAL_SKIP_GENERATE_AND_CLEAN), - new StepKey(phase, NAME, GenerateSnapshotNameStep.NAME), - new StepKey(phase, NAME, CleanupSnapshotStep.NAME), - new StepKey(phase, NAME, CreateSnapshotStep.NAME), - new StepKey(phase, NAME, WaitForDataTierStep.NAME), - new StepKey(phase, NAME, MountSnapshotStep.NAME), - new StepKey(phase, NAME, WaitForIndexColorStep.NAME), - new StepKey(phase, NAME, CopyExecutionStateStep.NAME), - new StepKey(phase, NAME, CopySettingsStep.NAME), - new StepKey(phase, NAME, SearchableSnapshotAction.CONDITIONAL_DATASTREAM_CHECK_KEY), - new StepKey(phase, NAME, ReplaceDataStreamBackingIndexStep.NAME), - new StepKey(phase, NAME, DeleteStep.NAME), - new StepKey(phase, NAME, SwapAliasesAndDeleteSourceIndexStep.NAME) - ); + ).filter(Objects::nonNull).toList(); } @Override @@ -172,8 +135,16 @@ protected Writeable.Reader instanceReader() { @Override protected SearchableSnapshotAction mutateInstance(SearchableSnapshotAction instance) { return switch (randomIntBetween(0, 2)) { - case 0 -> new SearchableSnapshotAction(randomAlphaOfLengthBetween(5, 10), instance.isForceMergeIndex()); - case 1 -> new SearchableSnapshotAction(instance.getSnapshotRepository(), instance.isForceMergeIndex() == false); + case 0 -> new SearchableSnapshotAction( + randomAlphaOfLengthBetween(5, 10), + instance.isForceMergeIndex(), + instance.getTotalShardsPerNode() + ); + case 1 -> new SearchableSnapshotAction( + instance.getSnapshotRepository(), + instance.isForceMergeIndex() == false, + instance.getTotalShardsPerNode() + ); case 2 -> new SearchableSnapshotAction( instance.getSnapshotRepository(), instance.isForceMergeIndex(), diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitUntilTimeSeriesEndTimePassesStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitUntilTimeSeriesEndTimePassesStepTests.java index 8ca6c0016a791..15bbbe7446429 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitUntilTimeSeriesEndTimePassesStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitUntilTimeSeriesEndTimePassesStepTests.java @@ -30,7 +30,7 @@ public class WaitUntilTimeSeriesEndTimePassesStepTests extends AbstractStepTestC protected WaitUntilTimeSeriesEndTimePassesStep createRandomInstance() { Step.StepKey stepKey = randomStepKey(); Step.StepKey nextStepKey = randomStepKey(); - return new WaitUntilTimeSeriesEndTimePassesStep(stepKey, nextStepKey, Instant::now, client); + return new WaitUntilTimeSeriesEndTimePassesStep(stepKey, nextStepKey, Instant::now); } @Override @@ -42,12 +42,12 @@ protected WaitUntilTimeSeriesEndTimePassesStep mutateInstance(WaitUntilTimeSerie case 0 -> key = new Step.StepKey(key.phase(), key.action(), key.name() + randomAlphaOfLength(5)); case 1 -> nextKey = new Step.StepKey(nextKey.phase(), nextKey.action(), nextKey.name() + randomAlphaOfLength(5)); } - return new WaitUntilTimeSeriesEndTimePassesStep(key, nextKey, Instant::now, client); + return new WaitUntilTimeSeriesEndTimePassesStep(key, nextKey, Instant::now); } @Override protected WaitUntilTimeSeriesEndTimePassesStep copyInstance(WaitUntilTimeSeriesEndTimePassesStep instance) { - return new WaitUntilTimeSeriesEndTimePassesStep(instance.getKey(), instance.getNextStepKey(), Instant::now, client); + return new WaitUntilTimeSeriesEndTimePassesStep(instance.getKey(), instance.getNextStepKey(), Instant::now); } public void testEvaluateCondition() { @@ -68,8 +68,7 @@ public void testEvaluateCondition() { WaitUntilTimeSeriesEndTimePassesStep step = new WaitUntilTimeSeriesEndTimePassesStep( randomStepKey(), randomStepKey(), - () -> currentTime, - client + () -> currentTime ); { // end_time has lapsed already so condition must be met From dcadb08b57bc65ac5f989ad60e18eac9fcce42c4 Mon Sep 17 00:00:00 2001 From: Keith Massey Date: Fri, 13 Dec 2024 13:37:07 -0600 Subject: [PATCH 30/32] Unmuting XPackRestTest migrate reindex tests (#118407) These were fixed by https://github.com/elastic/elasticsearch/pull/118382 Closes #118401 Closes #118272 Closes #118273 Closes #118274 --- muted-tests.yml | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index e0d011c1bf239..1255739e818be 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -294,18 +294,6 @@ tests: - class: org.elasticsearch.docker.test.DockerYmlTestSuiteIT method: test {p0=/11_nodes/Test cat nodes output} issue: https://github.com/elastic/elasticsearch/issues/118397 -- class: org.elasticsearch.xpack.test.rest.XPackRestIT - method: test {p0=migrate/20_reindex_status/Test get reindex status with nonexistent task id} - issue: https://github.com/elastic/elasticsearch/issues/118401 -- class: org.elasticsearch.xpack.test.rest.XPackRestIT - method: test {p0=migrate/10_reindex/Test Reindex With Nonexistent Data Stream} - issue: https://github.com/elastic/elasticsearch/issues/118274 -- class: org.elasticsearch.xpack.test.rest.XPackRestIT - method: test {p0=migrate/10_reindex/Test Reindex With Bad Data Stream Name} - issue: https://github.com/elastic/elasticsearch/issues/118272 -- class: org.elasticsearch.xpack.test.rest.XPackRestIT - method: test {p0=migrate/10_reindex/Test Reindex With Unsupported Mode} - issue: https://github.com/elastic/elasticsearch/issues/118273 - class: org.elasticsearch.xpack.security.operator.OperatorPrivilegesIT method: testEveryActionIsEitherOperatorOnlyOrNonOperator issue: https://github.com/elastic/elasticsearch/issues/118220 From 908cf9ab768c4854e51926f28b62fd2108c69aa7 Mon Sep 17 00:00:00 2001 From: Quentin Deschamps Date: Fri, 13 Dec 2024 20:54:48 +0100 Subject: [PATCH 31/32] Fix moving function linear weighted avg (#118516) Fix moving function linear weighted avg --- docs/changelog/118516.yaml | 6 ++++ modules/aggregations/build.gradle | 1 + .../test/aggregations/moving_fn.yml | 31 +++++++++++++------ .../action/search/SearchCapabilities.java | 3 ++ .../pipeline/MovingFunctions.java | 4 +-- .../MovFnWhitelistedFunctionTests.java | 2 +- 6 files changed, 34 insertions(+), 13 deletions(-) create mode 100644 docs/changelog/118516.yaml diff --git a/docs/changelog/118516.yaml b/docs/changelog/118516.yaml new file mode 100644 index 0000000000000..8a618a6d6cfd7 --- /dev/null +++ b/docs/changelog/118516.yaml @@ -0,0 +1,6 @@ +pr: 118435 +summary: Fix moving function linear weighted avg +area: Aggregations +type: bug +issues: + - 113751 diff --git a/modules/aggregations/build.gradle b/modules/aggregations/build.gradle index 94fdddf6d711a..3b5fb6ddecde9 100644 --- a/modules/aggregations/build.gradle +++ b/modules/aggregations/build.gradle @@ -48,4 +48,5 @@ tasks.named("yamlRestCompatTestTransform").configure({ task -> task.skipTest("aggregations/date_agg_per_day_of_week/Date aggregartion per day of week", "week-date behaviour has changed") task.skipTest("aggregations/time_series/Configure with no synthetic source", "temporary until backport") task.skipTest("aggregations/percentiles_hdr_metric/Negative values test", "returned exception has changed") + task.skipTest("aggregations/moving_fn/linearWeightedAvg", "math was wrong in previous versions") }) diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/moving_fn.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/moving_fn.yml index cd6feb601b1df..3abad87d57907 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/moving_fn.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/moving_fn.yml @@ -255,6 +255,17 @@ linearWeightedAvg: - skip: features: close_to + - requires: + test_runner_features: [capabilities] + + - requires: + capabilities: + - method: POST + path: /_search + parameters: [method, path, parameters, capabilities] + capabilities: [moving_fn_right_math] + reason: "math not fixed yet" + - do: search: index: no_gaps @@ -275,11 +286,11 @@ linearWeightedAvg: - match: { hits.total.value: 6 } - length: { aggregations.@timestamp.buckets: 6 } - is_false: aggregations.@timestamp.buckets.0.d.value - - close_to: { aggregations.@timestamp.buckets.1.d.value: { value: 0.500, error: 0.0005 } } - - close_to: { aggregations.@timestamp.buckets.2.d.value: { value: 1.250, error: 0.0005 } } - - close_to: { aggregations.@timestamp.buckets.3.d.value: { value: 1.000, error: 0.0005 } } - - close_to: { aggregations.@timestamp.buckets.4.d.value: { value: 2.250, error: 0.0005 } } - - close_to: { aggregations.@timestamp.buckets.5.d.value: { value: 3.500, error: 0.0005 } } + - close_to: { aggregations.@timestamp.buckets.1.d.value: { value: 1.000, error: 0.0005 } } + - close_to: { aggregations.@timestamp.buckets.2.d.value: { value: 1.667, error: 0.0005 } } + - close_to: { aggregations.@timestamp.buckets.3.d.value: { value: 1.333, error: 0.0005 } } + - close_to: { aggregations.@timestamp.buckets.4.d.value: { value: 3.000, error: 0.0005 } } + - close_to: { aggregations.@timestamp.buckets.5.d.value: { value: 4.667, error: 0.0005 } } - do: search: @@ -301,11 +312,11 @@ linearWeightedAvg: - match: { hits.total.value: 6 } - length: { aggregations.@timestamp.buckets: 6 } - is_false: aggregations.@timestamp.buckets.0.d.value - - close_to: { aggregations.@timestamp.buckets.1.d.value: { value: 0.500, error: 0.0005 } } - - close_to: { aggregations.@timestamp.buckets.2.d.value: { value: 1.250, error: 0.0005 } } - - close_to: { aggregations.@timestamp.buckets.3.d.value: { value: 1.143, error: 0.0005 } } - - close_to: { aggregations.@timestamp.buckets.4.d.value: { value: 2.286, error: 0.0005 } } - - close_to: { aggregations.@timestamp.buckets.5.d.value: { value: 3.429, error: 0.0005 } } + - close_to: { aggregations.@timestamp.buckets.1.d.value: { value: 1.000, error: 0.0005 } } + - close_to: { aggregations.@timestamp.buckets.2.d.value: { value: 1.667, error: 0.0005 } } + - close_to: { aggregations.@timestamp.buckets.3.d.value: { value: 1.333, error: 0.0005 } } + - close_to: { aggregations.@timestamp.buckets.4.d.value: { value: 2.667, error: 0.0005 } } + - close_to: { aggregations.@timestamp.buckets.5.d.value: { value: 4.000, error: 0.0005 } } --- ewma: diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java b/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java index 86304c8c4bde2..06f8f8f3c1be6 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java @@ -42,6 +42,8 @@ private SearchCapabilities() {} private static final String RANK_VECTORS_SCRIPT_ACCESS = "rank_vectors_script_access"; /** Initial support for rank-vectors maxSim functions access. */ private static final String RANK_VECTORS_SCRIPT_MAX_SIM = "rank_vectors_script_max_sim_with_bugfix"; + /** Fixed the math in {@code moving_fn}'s {@code linearWeightedAvg}. */ + private static final String MOVING_FN_RIGHT_MATH = "moving_fn_right_math"; private static final String RANDOM_SAMPLER_WITH_SCORED_SUBAGGS = "random_sampler_with_scored_subaggs"; private static final String OPTIMIZED_SCALAR_QUANTIZATION_BBQ = "optimized_scalar_quantization_bbq"; @@ -59,6 +61,7 @@ private SearchCapabilities() {} capabilities.add(RANDOM_SAMPLER_WITH_SCORED_SUBAGGS); capabilities.add(OPTIMIZED_SCALAR_QUANTIZATION_BBQ); capabilities.add(KNN_QUANTIZED_VECTOR_RESCORE); + capabilities.add(MOVING_FN_RIGHT_MATH); if (RankVectorsFieldMapper.FEATURE_FLAG.isEnabled()) { capabilities.add(RANK_VECTORS_FIELD_MAPPER); capabilities.add(RANK_VECTORS_SCRIPT_ACCESS); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovingFunctions.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovingFunctions.java index 02e3c76e5e793..46584c171d16c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovingFunctions.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovingFunctions.java @@ -100,7 +100,7 @@ public static double stdDev(double[] values, double avg) { */ public static double linearWeightedAvg(double[] values) { double avg = 0; - long totalWeight = 1; + long totalWeight = 0; long current = 1; for (double v : values) { @@ -110,7 +110,7 @@ public static double linearWeightedAvg(double[] values) { current += 1; } } - return totalWeight == 1 ? Double.NaN : avg / totalWeight; + return totalWeight == 0 ? Double.NaN : avg / totalWeight; } /** diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovFnWhitelistedFunctionTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovFnWhitelistedFunctionTests.java index 69173957aebab..3bc458880db0a 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovFnWhitelistedFunctionTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovFnWhitelistedFunctionTests.java @@ -326,7 +326,7 @@ public void testLinearMovAvg() { } double avg = 0; - long totalWeight = 1; + long totalWeight = 0; long current = 1; for (double value : window) { From b4610c8e26f292f73545b659a0bcdba7022ad1d0 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Fri, 13 Dec 2024 21:28:11 +0100 Subject: [PATCH 32/32] Remove needless sending of OriginalIndices in SearchFreeContextRequest (#117245) We don't need to use this request, the handler for freeing of scroll requests literally goes to the same transport handler and doesn't come with the list of indices. The original security need for keeping the list of indices around is long gone. --- .../action/IndicesRequestIT.java | 13 +--- .../search/AbstractSearchAsyncAction.java | 6 +- .../action/search/DfsQueryPhase.java | 6 +- .../action/search/SearchPhase.java | 6 +- .../action/search/SearchTransportService.java | 70 ++----------------- .../AbstractSearchAsyncActionTests.java | 6 +- .../action/search/MockSearchPhaseContext.java | 2 +- .../action/search/SearchAsyncActionTests.java | 15 +++- .../test/ESSingleNodeTestCase.java | 4 +- .../xpack/security/authz/RBACEngine.java | 26 +++---- 10 files changed, 44 insertions(+), 110 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java index 8bedf436e3698..f5860cedcd989 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java @@ -556,11 +556,7 @@ public void testUpdateSettings() { } public void testSearchQueryThenFetch() throws Exception { - interceptTransportActions( - SearchTransportService.QUERY_ACTION_NAME, - SearchTransportService.FETCH_ID_ACTION_NAME, - SearchTransportService.FREE_CONTEXT_ACTION_NAME - ); + interceptTransportActions(SearchTransportService.QUERY_ACTION_NAME, SearchTransportService.FETCH_ID_ACTION_NAME); String[] randomIndicesOrAliases = randomIndicesOrAliases(); for (int i = 0; i < randomIndicesOrAliases.length; i++) { @@ -580,16 +576,13 @@ public void testSearchQueryThenFetch() throws Exception { SearchTransportService.QUERY_ACTION_NAME, SearchTransportService.FETCH_ID_ACTION_NAME ); - // free context messages are not necessarily sent, but if they are, check their indices - assertIndicesSubsetOptionalRequests(Arrays.asList(searchRequest.indices()), SearchTransportService.FREE_CONTEXT_ACTION_NAME); } public void testSearchDfsQueryThenFetch() throws Exception { interceptTransportActions( SearchTransportService.DFS_ACTION_NAME, SearchTransportService.QUERY_ID_ACTION_NAME, - SearchTransportService.FETCH_ID_ACTION_NAME, - SearchTransportService.FREE_CONTEXT_ACTION_NAME + SearchTransportService.FETCH_ID_ACTION_NAME ); String[] randomIndicesOrAliases = randomIndicesOrAliases(); @@ -611,8 +604,6 @@ public void testSearchDfsQueryThenFetch() throws Exception { SearchTransportService.QUERY_ID_ACTION_NAME, SearchTransportService.FETCH_ID_ACTION_NAME ); - // free context messages are not necessarily sent, but if they are, check their indices - assertIndicesSubsetOptionalRequests(Arrays.asList(searchRequest.indices()), SearchTransportService.FREE_CONTEXT_ACTION_NAME); } private static void assertSameIndices(IndicesRequest originalRequest, String... actions) { diff --git a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java index 800193e258dba..47abfe266c524 100644 --- a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java @@ -711,7 +711,7 @@ private void raisePhaseFailure(SearchPhaseExecutionException exception) { try { SearchShardTarget searchShardTarget = entry.getSearchShardTarget(); Transport.Connection connection = getConnection(searchShardTarget.getClusterAlias(), searchShardTarget.getNodeId()); - sendReleaseSearchContext(entry.getContextId(), connection, getOriginalIndices(entry.getShardIndex())); + sendReleaseSearchContext(entry.getContextId(), connection); } catch (Exception inner) { inner.addSuppressed(exception); logger.trace("failed to release context", inner); @@ -727,10 +727,10 @@ private void raisePhaseFailure(SearchPhaseExecutionException exception) { * @see org.elasticsearch.search.fetch.FetchSearchResult#getContextId() * */ - void sendReleaseSearchContext(ShardSearchContextId contextId, Transport.Connection connection, OriginalIndices originalIndices) { + void sendReleaseSearchContext(ShardSearchContextId contextId, Transport.Connection connection) { assert isPartOfPointInTime(contextId) == false : "Must not release point in time context [" + contextId + "]"; if (connection != null) { - searchTransportService.sendFreeContext(connection, contextId, originalIndices); + searchTransportService.sendFreeContext(connection, contextId, ActionListener.noop()); } } diff --git a/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java b/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java index 285dd0a22fd7e..cc8c4becea9a9 100644 --- a/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java @@ -119,11 +119,7 @@ public void onFailure(Exception exception) { // the query might not have been executed at all (for example because thread pool rejected // execution) and the search context that was created in dfs phase might not be released. // release it again to be in the safe side - context.sendReleaseSearchContext( - querySearchRequest.contextId(), - connection, - context.getOriginalIndices(shardIndex) - ); + context.sendReleaseSearchContext(querySearchRequest.contextId(), connection); } } } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhase.java index 986f7210c0d1b..7d849a72abf9d 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhase.java @@ -97,11 +97,7 @@ protected static void releaseIrrelevantSearchContext(SearchPhaseResult searchPha context.getLogger().trace("trying to release search context [{}]", phaseResult.getContextId()); SearchShardTarget shardTarget = phaseResult.getSearchShardTarget(); Transport.Connection connection = context.getConnection(shardTarget.getClusterAlias(), shardTarget.getNodeId()); - context.sendReleaseSearchContext( - phaseResult.getContextId(), - connection, - context.getOriginalIndices(phaseResult.getShardIndex()) - ); + context.sendReleaseSearchContext(phaseResult.getContextId(), connection); } catch (Exception e) { context.getLogger().trace("failed to release context", e); } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java b/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java index 8444a92b24432..cfc2e1bcdaf2b 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java @@ -13,12 +13,10 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListenerResponseHandler; -import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; import org.elasticsearch.action.admin.cluster.node.tasks.get.TransportGetTaskAction; import org.elasticsearch.action.support.ChannelActionListener; -import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -124,24 +122,6 @@ public SearchTransportService( this.responseWrapper = responseWrapper; } - private static final ActionListenerResponseHandler SEND_FREE_CONTEXT_LISTENER = - new ActionListenerResponseHandler<>( - ActionListener.noop(), - SearchFreeContextResponse::readFrom, - TransportResponseHandler.TRANSPORT_WORKER - ); - - public void sendFreeContext(Transport.Connection connection, final ShardSearchContextId contextId, OriginalIndices originalIndices) { - transportService.sendRequest( - connection, - FREE_CONTEXT_ACTION_NAME, - new SearchFreeContextRequest(originalIndices, contextId), - TransportRequestOptions.EMPTY, - // no need to respond if it was freed or not - SEND_FREE_CONTEXT_LISTENER - ); - } - public void sendFreeContext( Transport.Connection connection, ShardSearchContextId contextId, @@ -370,43 +350,6 @@ private static class ClearScrollContextsRequest extends TransportRequest { } } - static class SearchFreeContextRequest extends ScrollFreeContextRequest implements IndicesRequest { - private final OriginalIndices originalIndices; - - SearchFreeContextRequest(OriginalIndices originalIndices, ShardSearchContextId id) { - super(id); - this.originalIndices = originalIndices; - } - - SearchFreeContextRequest(StreamInput in) throws IOException { - super(in); - originalIndices = OriginalIndices.readOriginalIndices(in); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - OriginalIndices.writeOriginalIndices(originalIndices, out); - } - - @Override - public String[] indices() { - if (originalIndices == null) { - return null; - } - return originalIndices.indices(); - } - - @Override - public IndicesOptions indicesOptions() { - if (originalIndices == null) { - return null; - } - return originalIndices.indicesOptions(); - } - - } - public static class SearchFreeContextResponse extends TransportResponse { private static final SearchFreeContextResponse FREED = new SearchFreeContextResponse(true); @@ -456,12 +399,13 @@ public static void registerRequestHandler(TransportService transportService, Sea SearchFreeContextResponse::readFrom ); - transportService.registerRequestHandler( - FREE_CONTEXT_ACTION_NAME, - freeContextExecutor, - SearchFreeContextRequest::new, - freeContextHandler - ); + // TODO: remove this handler once the lowest compatible version stops using it + transportService.registerRequestHandler(FREE_CONTEXT_ACTION_NAME, freeContextExecutor, in -> { + var res = new ScrollFreeContextRequest(in); + // this handler exists for BwC purposes only, we don't need the original indices to free the context + OriginalIndices.readOriginalIndices(in); + return res; + }, freeContextHandler); TransportActionProxy.registerProxyAction(transportService, FREE_CONTEXT_ACTION_NAME, false, SearchFreeContextResponse::readFrom); transportService.registerRequestHandler( diff --git a/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java index 725a4583d104a..71bf2a47cfa47 100644 --- a/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java @@ -112,11 +112,7 @@ long buildTookInMillis() { } @Override - public void sendReleaseSearchContext( - ShardSearchContextId contextId, - Transport.Connection connection, - OriginalIndices originalIndices - ) { + public void sendReleaseSearchContext(ShardSearchContextId contextId, Transport.Connection connection) { releasedContexts.add(contextId); } diff --git a/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java b/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java index 7a38858d8477a..cf65d756811ad 100644 --- a/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java +++ b/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java @@ -155,7 +155,7 @@ protected void executePhaseOnShard( } @Override - public void sendReleaseSearchContext(ShardSearchContextId contextId, Transport.Connection connection, OriginalIndices originalIndices) { + public void sendReleaseSearchContext(ShardSearchContextId contextId, Transport.Connection connection) { releasedSearchContexts.add(contextId); } diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java index b4ddd48172d01..2361beb7ad036 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java @@ -296,7 +296,11 @@ public void testFanOutAndCollect() throws InterruptedException { AtomicInteger numFreedContext = new AtomicInteger(); SearchTransportService transportService = new SearchTransportService(null, null, null) { @Override - public void sendFreeContext(Transport.Connection connection, ShardSearchContextId contextId, OriginalIndices originalIndices) { + public void sendFreeContext( + Transport.Connection connection, + ShardSearchContextId contextId, + ActionListener listener + ) { numFreedContext.incrementAndGet(); assertTrue(nodeToContextMap.containsKey(connection.getNode())); assertTrue(nodeToContextMap.get(connection.getNode()).remove(contextId)); @@ -363,7 +367,7 @@ public void run() { for (int i = 0; i < results.getNumShards(); i++) { TestSearchPhaseResult result = results.getAtomicArray().get(i); assertEquals(result.node.getId(), result.getSearchShardTarget().getNodeId()); - sendReleaseSearchContext(result.getContextId(), new MockConnection(result.node), OriginalIndices.NONE); + sendReleaseSearchContext(result.getContextId(), new MockConnection(result.node)); } responseListener.onResponse(testResponse); if (latchTriggered.compareAndSet(false, true) == false) { @@ -421,8 +425,13 @@ public void testFanOutAndFail() throws InterruptedException { ); AtomicInteger numFreedContext = new AtomicInteger(); SearchTransportService transportService = new SearchTransportService(null, null, null) { + @Override - public void sendFreeContext(Transport.Connection connection, ShardSearchContextId contextId, OriginalIndices originalIndices) { + public void sendFreeContext( + Transport.Connection connection, + ShardSearchContextId contextId, + ActionListener listener + ) { assertNotNull(contextId); numFreedContext.incrementAndGet(); assertTrue(nodeToContextMap.containsKey(connection.getNode())); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java index 459d5573d7c12..63334bd70306f 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java @@ -78,7 +78,7 @@ import java.util.stream.Collectors; import java.util.stream.Stream; -import static org.elasticsearch.action.search.SearchTransportService.FREE_CONTEXT_ACTION_NAME; +import static org.elasticsearch.action.search.SearchTransportService.FREE_CONTEXT_SCROLL_ACTION_NAME; import static org.elasticsearch.cluster.coordination.ClusterBootstrapService.INITIAL_MASTER_NODES_SETTING; import static org.elasticsearch.discovery.SettingsBasedSeedHostsProvider.DISCOVERY_SEED_HOSTS_SETTING; import static org.elasticsearch.test.NodeRoles.dataNode; @@ -482,7 +482,7 @@ protected void ensureNoInitializingShards() { */ protected void ensureAllFreeContextActionsAreConsumed() throws Exception { logger.info("--> waiting for all free_context tasks to complete within a reasonable time"); - safeGet(clusterAdmin().prepareListTasks().setActions(FREE_CONTEXT_ACTION_NAME + "*").setWaitForCompletion(true).execute()); + safeGet(clusterAdmin().prepareListTasks().setActions(FREE_CONTEXT_SCROLL_ACTION_NAME + "*").setWaitForCompletion(true).execute()); } /** diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java index fa6187798da25..2353d710059ff 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java @@ -129,6 +129,19 @@ public class RBACEngine implements AuthorizationEngine { private static final String DELETE_SUB_REQUEST_REPLICA = TransportDeleteAction.NAME + "[r]"; private static final Logger logger = LogManager.getLogger(RBACEngine.class); + + private static final Set SCROLL_RELATED_ACTIONS = Set.of( + TransportSearchScrollAction.TYPE.name(), + SearchTransportService.FETCH_ID_SCROLL_ACTION_NAME, + SearchTransportService.QUERY_FETCH_SCROLL_ACTION_NAME, + SearchTransportService.QUERY_SCROLL_ACTION_NAME, + SearchTransportService.FREE_CONTEXT_ACTION_NAME, + SearchTransportService.FREE_CONTEXT_SCROLL_ACTION_NAME, + TransportClearScrollAction.NAME, + "indices:data/read/sql/close_cursor", + SearchTransportService.CLEAR_SCROLL_CONTEXTS_ACTION_NAME + ); + private final Settings settings; private final CompositeRolesStore rolesStore; private final FieldPermissionsCache fieldPermissionsCache; @@ -320,7 +333,7 @@ public void authorizeIndexAction( // need to validate that the action is allowed and then move on listener.onResponse(role.checkIndicesAction(action) ? IndexAuthorizationResult.EMPTY : IndexAuthorizationResult.DENIED); } else if (request instanceof IndicesRequest == false) { - if (isScrollRelatedAction(action)) { + if (SCROLL_RELATED_ACTIONS.contains(action)) { // scroll is special // some APIs are indices requests that are not actually associated with indices. For example, // search scroll request, is categorized under the indices context, but doesn't hold indices names @@ -1000,17 +1013,6 @@ public int hashCode() { } } - private static boolean isScrollRelatedAction(String action) { - return action.equals(TransportSearchScrollAction.TYPE.name()) - || action.equals(SearchTransportService.FETCH_ID_SCROLL_ACTION_NAME) - || action.equals(SearchTransportService.QUERY_FETCH_SCROLL_ACTION_NAME) - || action.equals(SearchTransportService.QUERY_SCROLL_ACTION_NAME) - || action.equals(SearchTransportService.FREE_CONTEXT_SCROLL_ACTION_NAME) - || action.equals(TransportClearScrollAction.NAME) - || action.equals("indices:data/read/sql/close_cursor") - || action.equals(SearchTransportService.CLEAR_SCROLL_CONTEXTS_ACTION_NAME); - } - private static boolean isAsyncRelatedAction(String action) { return action.equals(SubmitAsyncSearchAction.NAME) || action.equals(GetAsyncSearchAction.NAME)