From 00ca64bcf23a6dad9074e62f0a5d57af801272b0 Mon Sep 17 00:00:00 2001 From: Moritz Mack Date: Fri, 12 Jan 2024 10:30:04 +0100 Subject: [PATCH 01/35] Use allow-list for APM agent settings and consolidate defaults in APMJvmOptions (#104141) Prevent invalid and miss-configuration of APM agent using an explicit allow-list of setting keys. Additionally, configuration defaults of APMAgentSettings are consolidated in APMJvmOptions to keep defaults in a single location. (ES-6916) --- .../gradle/testclusters/RunTask.java | 6 +- .../server/cli/APMJvmOptions.java | 3 +- .../apm/internal/APMAgentSettings.java | 162 ++++++++++++++---- .../apm/internal/APMAgentSettingsTests.java | 43 ++--- 4 files changed, 145 insertions(+), 69 deletions(-) diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java index ca2cbc09f7c2f..746a09d242761 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java @@ -201,7 +201,7 @@ public void beforeStart() { try { mockServer.start(); node.setting("telemetry.metrics.enabled", "true"); - node.setting("tracing.apm.agent.enabled", "true"); + node.setting("tracing.apm.enabled", "true"); node.setting("tracing.apm.agent.transaction_sample_rate", "0.10"); node.setting("tracing.apm.agent.metrics_interval", "10s"); node.setting("tracing.apm.agent.server_url", "http://127.0.0.1:" + mockServer.getPort()); @@ -213,8 +213,8 @@ public void beforeStart() { // if metrics were not enabled explicitly for gradlew run we should disable them else if (node.getSettingKeys().contains("telemetry.metrics.enabled") == false) { // metrics node.setting("telemetry.metrics.enabled", "false"); - } else if (node.getSettingKeys().contains("tracing.apm.agent.enabled") == false) { // tracing - node.setting("tracing.apm.agent.enable", "false"); + } else if (node.getSettingKeys().contains("tracing.apm.enabled") == false) { // tracing + node.setting("tracing.apm.enable", "false"); } } diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java index 9dcd630f52631..8531e22447a2d 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java @@ -79,7 +79,8 @@ class APMJvmOptions { "application_packages", "org.elasticsearch,org.apache.lucene", "metrics_interval", "120s", "breakdown_metrics", "false", - "central_config", "false" + "central_config", "false", + "transaction_sample_rate", "0.2" ); // end::noformat diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java index 41816318a3586..0ee13dae70740 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java @@ -22,8 +22,8 @@ import java.security.AccessController; import java.security.PrivilegedAction; import java.util.List; -import java.util.Map; import java.util.Objects; +import java.util.Set; import static org.elasticsearch.common.settings.Setting.Property.NodeScope; import static org.elasticsearch.common.settings.Setting.Property.OperatorDynamic; @@ -36,17 +36,6 @@ public class APMAgentSettings { private static final Logger LOGGER = LogManager.getLogger(APMAgentSettings.class); - /** - * Sensible defaults that Elasticsearch configures. This cannot be done via the APM agent - * config file, as then their values could not be overridden dynamically via system properties. - */ - static Map APM_AGENT_DEFAULT_SETTINGS = Map.of( - "transaction_sample_rate", - "0.2", - "enable_experimental_instrumentations", - "true" - ); - public void addClusterSettingsListeners( ClusterService clusterService, APMTelemetryProvider apmTelemetryProvider, @@ -77,16 +66,7 @@ public void addClusterSettingsListeners( */ public void syncAgentSystemProperties(Settings settings) { this.setAgentSetting("recording", Boolean.toString(APM_ENABLED_SETTING.get(settings))); - - // Apply default values for some system properties. Although we configure - // the settings in APM_AGENT_DEFAULT_SETTINGS to defer to the default values, they won't - // do anything if those settings are never configured. - APM_AGENT_DEFAULT_SETTINGS.keySet() - .forEach( - key -> this.setAgentSetting(key, APM_AGENT_SETTINGS.getConcreteSetting(APM_AGENT_SETTINGS.getKey() + key).get(settings)) - ); - - // Then apply values from the settings in the cluster state + // Apply values from the settings in the cluster state APM_AGENT_SETTINGS.getAsMap(settings).forEach(this::setAgentSetting); } @@ -114,15 +94,130 @@ public void setAgentSetting(String key, String value) { private static final String APM_SETTING_PREFIX = "tracing.apm."; /** - * A list of APM agent config keys that should never be configured by the user. + * Allow-list of APM agent config keys users are permitted to configure. + * @see APM Java Agent Configuration */ - private static final List PROHIBITED_AGENT_KEYS = List.of( - // ES generates a config file and sets this value - "config_file", - // ES controls this via `telemetry.metrics.enabled` - "recording", - // ES controls this via `apm.enabled` - "instrument" + private static final Set PERMITTED_AGENT_KEYS = Set.of( + // Circuit-Breaker: + "circuit_breaker_enabled", + "stress_monitoring_interval", + "stress_monitor_gc_stress_threshold", + "stress_monitor_gc_relief_threshold", + "stress_monitor_cpu_duration_threshold", + "stress_monitor_system_cpu_stress_threshold", + "stress_monitor_system_cpu_relief_threshold", + + // Core: + // forbid 'enabled', must remain enabled to dynamically enable tracing / metrics + // forbid 'recording' / 'instrument', controlled by 'telemetry.metrics.enabled' / 'tracing.apm.enabled' + "service_name", + "service_node_name", + // forbid 'service_version', forced by APMJvmOptions + "hostname", + "environment", + "transaction_sample_rate", + "transaction_max_spans", + "long_field_max_length", + "sanitize_field_names", + "enable_instrumentations", + "disable_instrumentations", + // forbid 'enable_experimental_instrumentations', expected to be always enabled by APMJvmOptions + "unnest_exceptions", + "ignore_exceptions", + "capture_body", + "capture_headers", + "global_labels", + "instrument_ancient_bytecode", + "context_propagation_only", + "classes_excluded_from_instrumentation", + "trace_methods", + "trace_methods_duration_threshold", + // forbid 'central_config', may impact usage of config_file, disabled in APMJvmOptions + // forbid 'config_file', configured by APMJvmOptions + "breakdown_metrics", + "plugins_dir", + "use_elastic_traceparent_header", + "disable_outgoing_tracecontext_headers", + "span_min_duration", + "cloud_provider", + "enable_public_api_annotation_inheritance", + "transaction_name_groups", + "trace_continuation_strategy", + "baggage_to_attach", + + // Datastore: irrelevant, not whitelisted + + // HTTP: + "capture_body_content_types", + "transaction_ignore_urls", + "transaction_ignore_user_agents", + "use_path_as_transaction_name", + // forbid deprecated url_groups + + // Huge Traces: + "span_compression_enabled", + "span_compression_exact_match_max_duration", + "span_compression_same_kind_max_duration", + "exit_span_min_duration", + + // JAX-RS: irrelevant, not whitelisted + + // JMX: + "capture_jmx_metrics", + + // Logging: + "log_level", // allow overriding the default in APMJvmOptions + // forbid log_file, always set by APMJvmOptions + "log_ecs_reformatting", + "log_ecs_reformatting_additional_fields", + "log_ecs_formatter_allow_list", + // forbid log_ecs_reformatting_dir, always use logsDir provided in APMJvmOptions + "log_file_size", + // forbid log_format_sout, always use file logging + // forbid log_format_file, expected to be JSON in APMJvmOptions + "log_sending", + + // Messaging: irrelevant, not whitelisted + + // Metrics: + "dedot_custom_metrics", + "custom_metrics_histogram_boundaries", + "metric_set_limit", + "agent_reporter_health_metrics", + "agent_background_overhead_metrics", + + // Profiling: + "profiling_inferred_spans_enabled", + "profiling_inferred_spans_logging_enabled", + "profiling_inferred_spans_sampling_interval", + "profiling_inferred_spans_min_duration", + "profiling_inferred_spans_included_classes", + "profiling_inferred_spans_excluded_classes", + "profiling_inferred_spans_lib_directory", + + // Reporter: + // forbid secret_token: use tracing.apm.secret_token instead + // forbid api_key: use tracing.apm.api_key instead + "server_url", + "server_urls", + "disable_send", + "server_timeout", + "verify_server_cert", + "max_queue_size", + "include_process_args", + "api_request_time", + "api_request_size", + "metrics_interval", + "disable_metrics", + + // Serverless: + "aws_lambda_handler", + "data_flush_timeout", + + // Stacktraces: + "application_packages", + "stack_trace_limit", + "span_stack_trace_min_duration" ); public static final Setting.AffixSetting APM_AGENT_SETTINGS = Setting.prefixKeySetting( @@ -130,10 +225,9 @@ public void setAgentSetting(String key, String value) { (qualifiedKey) -> { final String[] parts = qualifiedKey.split("\\."); final String key = parts[parts.length - 1]; - final String defaultValue = APM_AGENT_DEFAULT_SETTINGS.getOrDefault(key, ""); - return new Setting<>(qualifiedKey, defaultValue, (value) -> { - if (PROHIBITED_AGENT_KEYS.contains(key)) { - throw new IllegalArgumentException("Explicitly configuring [" + qualifiedKey + "] is prohibited"); + return new Setting<>(qualifiedKey, "", (value) -> { + if (qualifiedKey.equals("_na_") == false && PERMITTED_AGENT_KEYS.contains(key) == false) { + throw new IllegalArgumentException("Configuration [" + qualifiedKey + "] is either prohibited or unknown."); } return value; }, Setting.Property.NodeScope, Setting.Property.OperatorDynamic); diff --git a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettingsTests.java b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettingsTests.java index b22a57bb9bf0c..7457b97eebdde 100644 --- a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettingsTests.java +++ b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettingsTests.java @@ -11,8 +11,8 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; +import static org.hamcrest.Matchers.containsString; import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; public class APMAgentSettingsTests extends ESTestCase { @@ -20,7 +20,7 @@ public class APMAgentSettingsTests extends ESTestCase { /** * Check that when the tracer is enabled, it also sets the APM agent's recording system property to true. */ - public void test_whenTracerEnabled_setsRecordingProperty() { + public void testEnableRecording() { APMAgentSettings apmAgentSettings = spy(new APMAgentSettings()); Settings settings = Settings.builder().put(APMAgentSettings.APM_ENABLED_SETTING.getKey(), true).build(); apmAgentSettings.syncAgentSystemProperties(settings); @@ -31,7 +31,7 @@ public void test_whenTracerEnabled_setsRecordingProperty() { /** * Check that when the tracer is disabled, it also sets the APM agent's recording system property to false. */ - public void test_whenTracerDisabled_setsRecordingProperty() { + public void testDisableRecording() { APMAgentSettings apmAgentSettings = spy(new APMAgentSettings()); Settings settings = Settings.builder().put(APMAgentSettings.APM_ENABLED_SETTING.getKey(), false).build(); apmAgentSettings.syncAgentSystemProperties(settings); @@ -40,48 +40,29 @@ public void test_whenTracerDisabled_setsRecordingProperty() { } /** - * Check that when cluster settings are synchronised with the system properties, default values are - * applied. + * Check that when cluster settings are synchronised with the system properties, agent settings are set. */ - public void test_whenTracerCreated_defaultSettingsApplied() { - APMAgentSettings apmAgentSettings = spy(new APMAgentSettings()); - Settings settings = Settings.builder().put(APMAgentSettings.APM_ENABLED_SETTING.getKey(), true).build(); - apmAgentSettings.syncAgentSystemProperties(settings); - - verify(apmAgentSettings).setAgentSetting("transaction_sample_rate", "0.2"); - } - - /** - * Check that when cluster settings are synchronised with the system properties, values in the settings - * are reflected in the system properties, overwriting default values. - */ - public void test_whenTracerCreated_clusterSettingsOverrideDefaults() { + public void testSetAgentSettings() { APMAgentSettings apmAgentSettings = spy(new APMAgentSettings()); Settings settings = Settings.builder() .put(APMAgentSettings.APM_ENABLED_SETTING.getKey(), true) - .put(APMAgentSettings.APM_AGENT_SETTINGS.getKey() + "transaction_sample_rate", "0.75") + .put(APMAgentSettings.APM_AGENT_SETTINGS.getKey() + "span_compression_enabled", "true") .build(); apmAgentSettings.syncAgentSystemProperties(settings); - // This happens twice because we first apply the default settings, whose values are overridden - // from the cluster settings, then we apply all the APM-agent related settings, not just the - // ones with default values. Although there is some redundancy here, it only happens at startup - // for a very small number of settings. - verify(apmAgentSettings, times(2)).setAgentSetting("transaction_sample_rate", "0.75"); + verify(apmAgentSettings).setAgentSetting("span_compression_enabled", "true"); } /** - * Check that when cluster settings are synchronised with the system properties, agent settings other - * than those with default values are set. + * Check that invalid or forbidden APM agent settings are rejected. */ - public void test_whenTracerCreated_clusterSettingsAlsoApplied() { - APMAgentSettings apmAgentSettings = spy(new APMAgentSettings()); + public void testRejectForbiddenOrUnknownSettings() { Settings settings = Settings.builder() .put(APMAgentSettings.APM_ENABLED_SETTING.getKey(), true) - .put(APMAgentSettings.APM_AGENT_SETTINGS.getKey() + "span_compression_enabled", "true") + .put(APMAgentSettings.APM_AGENT_SETTINGS.getKey() + "unknown", "true") .build(); - apmAgentSettings.syncAgentSystemProperties(settings); - verify(apmAgentSettings).setAgentSetting("span_compression_enabled", "true"); + Exception exception = expectThrows(IllegalArgumentException.class, () -> APMAgentSettings.APM_AGENT_SETTINGS.getAsMap(settings)); + assertThat(exception.getMessage(), containsString("[tracing.apm.agent.unknown]")); } } From 1b2e8cf5a2b6faa8386905aaa42911bf20f2c9cb Mon Sep 17 00:00:00 2001 From: David Kyle Date: Fri, 12 Jan 2024 09:34:35 +0000 Subject: [PATCH 02/35] [ML] Refactor InferenceConfigUpdate for simpler changes to tokenization options (#104277) --- .../BertJapaneseTokenization.java | 5 ++ .../trainedmodel/BertTokenization.java | 5 ++ .../trainedmodel/ClassificationConfig.java | 27 +++++++ .../ClassificationConfigUpdate.java | 41 ---------- .../trainedmodel/EmptyConfigUpdate.java | 8 +- .../trainedmodel/FillMaskConfig.java | 22 +++++ .../trainedmodel/FillMaskConfigUpdate.java | 35 -------- .../trainedmodel/InferenceConfig.java | 18 +++++ .../trainedmodel/InferenceConfigUpdate.java | 10 ++- .../trainedmodel/MPNetTokenization.java | 5 ++ .../ml/inference/trainedmodel/NerConfig.java | 17 ++++ .../trainedmodel/NerConfigUpdate.java | 23 ------ .../trainedmodel/NullInferenceConfig.java | 5 ++ .../trainedmodel/PassThroughConfig.java | 16 ++++ .../trainedmodel/PassThroughConfigUpdate.java | 24 ------ .../trainedmodel/QuestionAnsweringConfig.java | 26 ++++++ .../QuestionAnsweringConfigUpdate.java | 32 -------- .../trainedmodel/RegressionConfig.java | 18 +++++ .../trainedmodel/RegressionConfigUpdate.java | 30 ------- .../trainedmodel/ResultsFieldUpdate.java | 17 ---- .../trainedmodel/RobertaTokenization.java | 12 +++ .../TextClassificationConfig.java | 34 ++++++++ .../TextClassificationConfigUpdate.java | 48 ----------- .../trainedmodel/TextEmbeddingConfig.java | 17 ++++ .../TextEmbeddingConfigUpdate.java | 23 ------ .../trainedmodel/TextExpansionConfig.java | 16 ++++ .../TextExpansionConfigUpdate.java | 28 ------- .../trainedmodel/TextSimilarityConfig.java | 18 +++++ .../TextSimilarityConfigUpdate.java | 26 +----- .../inference/trainedmodel/Tokenization.java | 77 ++++++++++++++---- .../TokenizationConfigUpdate.java | 81 +++++++++++++++++++ .../trainedmodel/XLMRobertaTokenization.java | 5 ++ .../ZeroShotClassificationConfig.java | 37 +++++++++ .../ZeroShotClassificationConfigUpdate.java | 40 --------- .../BertJapaneseTokenizationTests.java | 7 ++ .../trainedmodel/BertTokenizationTests.java | 7 ++ .../ClassificationConfigUpdateTests.java | 19 ++--- .../FillMaskConfigUpdateTests.java | 34 ++------ .../trainedmodel/NerConfigUpdateTests.java | 13 +-- .../PassThroughConfigUpdateTests.java | 13 +-- .../QuestionAnsweringConfigUpdateTests.java | 27 ++++--- .../RegressionConfigUpdateTests.java | 11 ++- .../trainedmodel/ResultsFieldUpdateTests.java | 4 +- .../RobertaTokenizationTests.java | 7 ++ .../TextClassificationConfigUpdateTests.java | 54 +++---------- .../TextEmbeddingConfigUpdateTests.java | 13 +-- .../TextSimilarityConfigUpdateTests.java | 18 ++--- .../TokenizationConfigUpdateTests.java | 32 ++++++++ ...roShotClassificationConfigUpdateTests.java | 46 +++-------- .../TrainedModelDeploymentTask.java | 12 +-- .../inference/loadingservice/LocalModel.java | 5 +- .../ZeroShotClassificationProcessorTests.java | 2 +- 52 files changed, 614 insertions(+), 556 deletions(-) create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TokenizationConfigUpdate.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TokenizationConfigUpdateTests.java diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertJapaneseTokenization.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertJapaneseTokenization.java index 269d803a698bf..392258608acd1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertJapaneseTokenization.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertJapaneseTokenization.java @@ -59,6 +59,11 @@ public BertJapaneseTokenization(StreamInput in) throws IOException { super(in); } + @Override + Tokenization buildWindowingTokenization(int updatedMaxSeqLength, int updatedSpan) { + return new BertJapaneseTokenization(this.doLowerCase, this.withSpecialTokens, updatedMaxSeqLength, Truncate.NONE, updatedSpan); + } + XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { return builder; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertTokenization.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertTokenization.java index a950a18fcab2b..a229227df60e9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertTokenization.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertTokenization.java @@ -60,6 +60,11 @@ public BertTokenization(StreamInput in) throws IOException { super(in); } + @Override + Tokenization buildWindowingTokenization(int updatedMaxSeqLength, int updatedSpan) { + return new BertTokenization(this.doLowerCase, this.withSpecialTokens, updatedMaxSeqLength, Truncate.NONE, updatedSpan); + } + @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfig.java index 156fd76a9419c..cc66c361925a6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfig.java @@ -109,6 +109,33 @@ public ClassificationConfig(StreamInput in) throws IOException { this.predictionFieldType = PredictionFieldType.fromStream(in); } + @Override + public InferenceConfig apply(InferenceConfigUpdate update) { + if (update instanceof ClassificationConfigUpdate configUpdate) { + ClassificationConfig.Builder builder = new ClassificationConfig.Builder(this); + if (configUpdate.getResultsField() != null) { + builder.setResultsField(configUpdate.getResultsField()); + } + if (configUpdate.getNumTopFeatureImportanceValues() != null) { + builder.setNumTopFeatureImportanceValues(configUpdate.getNumTopFeatureImportanceValues()); + } + if (configUpdate.getTopClassesResultsField() != null) { + builder.setTopClassesResultsField(configUpdate.getTopClassesResultsField()); + } + if (configUpdate.getNumTopClasses() != null) { + builder.setNumTopClasses(configUpdate.getNumTopClasses()); + } + if (configUpdate.getPredictionFieldType() != null) { + builder.setPredictionFieldType(configUpdate.getPredictionFieldType()); + } + return builder.build(); + } else if (update instanceof ResultsFieldUpdate resultsFieldUpdate) { + return new ClassificationConfig.Builder(this).setResultsField(resultsFieldUpdate.getResultsField()).build(); + } else { + throw incompatibleUpdateException(update.getName()); + } + } + public int getNumTopClasses() { return numTopClasses; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfigUpdate.java index a036427abbe48..de4004792af7c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfigUpdate.java @@ -203,52 +203,11 @@ public String getName() { return NAME.getPreferredName(); } - @Override - public InferenceConfig apply(InferenceConfig originalConfig) { - if (originalConfig instanceof ClassificationConfig == false) { - throw ExceptionsHelper.badRequestException( - "Inference config of type [{}] can not be updated with a inference request of type [{}]", - originalConfig.getName(), - getName() - ); - } - ClassificationConfig classificationConfig = (ClassificationConfig) originalConfig; - - if (isNoop(classificationConfig)) { - return originalConfig; - } - ClassificationConfig.Builder builder = new ClassificationConfig.Builder(classificationConfig); - if (resultsField != null) { - builder.setResultsField(resultsField); - } - if (numTopFeatureImportanceValues != null) { - builder.setNumTopFeatureImportanceValues(numTopFeatureImportanceValues); - } - if (topClassesResultsField != null) { - builder.setTopClassesResultsField(topClassesResultsField); - } - if (numTopClasses != null) { - builder.setNumTopClasses(numTopClasses); - } - if (predictionFieldType != null) { - builder.setPredictionFieldType(predictionFieldType); - } - return builder.build(); - } - @Override public boolean isSupported(InferenceConfig inferenceConfig) { return inferenceConfig instanceof ClassificationConfig; } - boolean isNoop(ClassificationConfig originalConfig) { - return (resultsField == null || resultsField.equals(originalConfig.getResultsField())) - && (numTopFeatureImportanceValues == null || originalConfig.getNumTopFeatureImportanceValues() == numTopFeatureImportanceValues) - && (topClassesResultsField == null || topClassesResultsField.equals(originalConfig.getTopClassesResultsField())) - && (numTopClasses == null || originalConfig.getNumTopClasses() == numTopClasses) - && (predictionFieldType == null || predictionFieldType.equals(originalConfig.getPredictionFieldType())); - } - @Override public TransportVersion getMinimalSupportedVersion() { return TransportVersions.V_7_8_0; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/EmptyConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/EmptyConfigUpdate.java index c098b13fd1deb..feb3a2e3191ff 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/EmptyConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/EmptyConfigUpdate.java @@ -30,13 +30,13 @@ public EmptyConfigUpdate() {} public EmptyConfigUpdate(StreamInput in) {} @Override - public String getResultsField() { - return null; + public boolean isEmpty() { + return true; } @Override - public InferenceConfig apply(InferenceConfig originalConfig) { - return originalConfig; + public String getResultsField() { + return null; } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/FillMaskConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/FillMaskConfig.java index 24b7a95c9ccac..ab45c2f420bd9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/FillMaskConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/FillMaskConfig.java @@ -99,6 +99,28 @@ public FillMaskConfig(StreamInput in) throws IOException { resultsField = in.readOptionalString(); } + @Override + public InferenceConfig apply(InferenceConfigUpdate update) { + if (update instanceof FillMaskConfigUpdate configUpdate) { + FillMaskConfig.Builder builder = new FillMaskConfig.Builder(this); + if (configUpdate.getNumTopClasses() != null) { + builder.setNumTopClasses(configUpdate.getNumTopClasses()); + } + if (configUpdate.getResultsField() != null) { + builder.setResultsField(configUpdate.getResultsField()); + } + if (configUpdate.getTokenizationUpdate() != null) { + builder.setTokenization(configUpdate.getTokenizationUpdate().apply(this.getTokenization())); + } + return builder.build(); + } else if (update instanceof TokenizationConfigUpdate tokenizationUpdate) { + FillMaskConfig.Builder builder = new FillMaskConfig.Builder(this); + return builder.setTokenization(this.getTokenization().updateSpanSettings(tokenizationUpdate.getSpanSettings())).build(); + } else { + throw incompatibleUpdateException(update.getName()); + } + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/FillMaskConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/FillMaskConfigUpdate.java index cb081aa48d0a2..9ac4ea9cf18e7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/FillMaskConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/FillMaskConfigUpdate.java @@ -108,41 +108,6 @@ public TransportVersion getMinimalSupportedVersion() { return TransportVersions.V_8_0_0; } - @Override - public InferenceConfig apply(InferenceConfig originalConfig) { - if (originalConfig instanceof FillMaskConfig == false) { - throw ExceptionsHelper.badRequestException( - "Inference config of type [{}] can not be updated with a request of type [{}]", - originalConfig.getName(), - getName() - ); - } - - FillMaskConfig fillMaskConfig = (FillMaskConfig) originalConfig; - if (isNoop(fillMaskConfig)) { - return originalConfig; - } - - FillMaskConfig.Builder builder = new FillMaskConfig.Builder(fillMaskConfig); - if (numTopClasses != null) { - builder.setNumTopClasses(numTopClasses); - } - if (resultsField != null) { - builder.setResultsField(resultsField); - } - if (tokenizationUpdate != null) { - builder.setTokenization(tokenizationUpdate.apply(fillMaskConfig.getTokenization())); - - } - return builder.build(); - } - - boolean isNoop(FillMaskConfig originalConfig) { - return (this.numTopClasses == null || this.numTopClasses == originalConfig.getNumTopClasses()) - && (this.resultsField == null || this.resultsField.equals(originalConfig.getResultsField())) - && super.isNoop(); - } - @Override public boolean isSupported(InferenceConfig config) { return config instanceof FillMaskConfig; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/InferenceConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/InferenceConfig.java index 2b043cf022a3d..8733e456157d2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/InferenceConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/InferenceConfig.java @@ -6,10 +6,12 @@ */ package org.elasticsearch.xpack.core.ml.inference.trainedmodel; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.TransportVersion; import org.elasticsearch.common.io.stream.VersionedNamedWriteable; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xpack.core.ml.MlConfigVersion; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.ml.utils.NamedXContentObject; public interface InferenceConfig extends NamedXContentObject, VersionedNamedWriteable { @@ -20,6 +22,14 @@ public interface InferenceConfig extends NamedXContentObject, VersionedNamedWrit boolean isTargetTypeSupported(TargetType targetType); + /** + * Return a copy of this with the settings updated by the + * values in {@code update}. + * @param update The update to apply + * @return A new updated config + */ + InferenceConfig apply(InferenceConfigUpdate update); + @Override default TransportVersion getMinimalSupportedVersion() { return getMinimalSupportedTransportVersion(); @@ -54,4 +64,12 @@ default boolean supportsPipelineAggregation() { default boolean supportsSearchRescorer() { return false; } + + default ElasticsearchStatusException incompatibleUpdateException(String updateName) { + throw ExceptionsHelper.badRequestException( + "Inference config of type [{}] can not be updated with a inference request of type [{}]", + getName(), + updateName + ); + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/InferenceConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/InferenceConfigUpdate.java index 30ecac00a3b80..50d516378e92d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/InferenceConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/InferenceConfigUpdate.java @@ -20,10 +20,16 @@ public interface InferenceConfigUpdate extends VersionedNamedWriteable { Arrays.asList(WarningInferenceResults.WARNING.getPreferredName(), TrainedModelConfig.MODEL_ID.getPreferredName()) ); - InferenceConfig apply(InferenceConfig originalConfig); - boolean isSupported(InferenceConfig config); + /** + * Is this an empty update. + * @return True if empty + */ + default boolean isEmpty() { + return false; + } + String getResultsField(); interface Builder, U extends InferenceConfigUpdate> { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/MPNetTokenization.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/MPNetTokenization.java index e18a1d056f57c..9e599eb86b8ad 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/MPNetTokenization.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/MPNetTokenization.java @@ -59,6 +59,11 @@ public MPNetTokenization(StreamInput in) throws IOException { super(in); } + @Override + Tokenization buildWindowingTokenization(int updatedMaxSeqLength, int updatedSpan) { + return new MPNetTokenization(this.doLowerCase, this.withSpecialTokens, updatedMaxSeqLength, Truncate.NONE, updatedSpan); + } + @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfig.java index e7f3a66b6748f..b87e7e7edbb71 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfig.java @@ -157,6 +157,23 @@ public boolean isTargetTypeSupported(TargetType targetType) { return false; } + @Override + public InferenceConfig apply(InferenceConfigUpdate update) { + if (update instanceof NerConfigUpdate configUpdate) { + return new NerConfig( + vocabularyConfig, + (configUpdate.getTokenizationUpdate() == null) ? tokenization : configUpdate.getTokenizationUpdate().apply(tokenization), + classificationLabels, + Optional.ofNullable(update.getResultsField()).orElse(resultsField) + ); + } else if (update instanceof TokenizationConfigUpdate tokenizationUpdate) { + var updatedTokenization = getTokenization().updateSpanSettings(tokenizationUpdate.getSpanSettings()); + return new NerConfig(this.vocabularyConfig, updatedTokenization, this.classificationLabels, this.resultsField); + } else { + throw incompatibleUpdateException(update.getName()); + } + } + @Override public MlConfigVersion getMinimalSupportedMlConfigVersion() { return MlConfigVersion.V_8_0_0; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfigUpdate.java index 884ecb39df448..015aa658b1658 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfigUpdate.java @@ -20,7 +20,6 @@ import java.util.HashMap; import java.util.Map; import java.util.Objects; -import java.util.Optional; import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig.RESULTS_FIELD; import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig.TOKENIZATION; @@ -92,28 +91,6 @@ public String getName() { return NAME; } - @Override - public InferenceConfig apply(InferenceConfig originalConfig) { - if (originalConfig instanceof NerConfig == false) { - throw ExceptionsHelper.badRequestException( - "Inference config of type [{}] can not be updated with a request of type [{}]", - originalConfig.getName(), - getName() - ); - } - NerConfig nerConfig = (NerConfig) originalConfig; - if (isNoop(nerConfig)) { - return nerConfig; - } - - return new NerConfig( - nerConfig.getVocabularyConfig(), - (tokenizationUpdate == null) ? nerConfig.getTokenization() : tokenizationUpdate.apply(nerConfig.getTokenization()), - nerConfig.getClassificationLabels(), - Optional.ofNullable(resultsField).orElse(nerConfig.getResultsField()) - ); - } - boolean isNoop(NerConfig originalConfig) { return (this.resultsField == null || this.resultsField.equals(originalConfig.getResultsField())) && super.isNoop(); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NullInferenceConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NullInferenceConfig.java index 67d0edb880a66..dae96dc9a684c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NullInferenceConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NullInferenceConfig.java @@ -29,6 +29,11 @@ public boolean isTargetTypeSupported(TargetType targetType) { return true; } + @Override + public InferenceConfig apply(InferenceConfigUpdate update) { + throw new UnsupportedOperationException("Cannot update NullInferenceConfig objects"); + } + @Override public MlConfigVersion getMinimalSupportedMlConfigVersion() { return MlConfigVersion.CURRENT; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfig.java index 74ca76779d4b2..0e27fc00b9b70 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfig.java @@ -120,6 +120,22 @@ public boolean isTargetTypeSupported(TargetType targetType) { return false; } + @Override + public InferenceConfig apply(InferenceConfigUpdate update) { + if (update instanceof PassThroughConfigUpdate configUpdate) { + return new PassThroughConfig( + vocabularyConfig, + (configUpdate.getTokenizationUpdate() == null) ? tokenization : configUpdate.getTokenizationUpdate().apply(tokenization), + update.getResultsField() == null ? resultsField : update.getResultsField() + ); + } else if (update instanceof TokenizationConfigUpdate tokenizationUpdate) { + var updatedTokenization = getTokenization().updateSpanSettings(tokenizationUpdate.getSpanSettings()); + return new PassThroughConfig(this.vocabularyConfig, updatedTokenization, this.resultsField); + } else { + throw incompatibleUpdateException(update.getName()); + } + } + @Override public MlConfigVersion getMinimalSupportedMlConfigVersion() { return MlConfigVersion.V_8_0_0; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfigUpdate.java index 874f82dc019ca..1a7832a70cfdf 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfigUpdate.java @@ -96,30 +96,6 @@ public String getName() { return NAME; } - @Override - public InferenceConfig apply(InferenceConfig originalConfig) { - if ((resultsField == null || resultsField.equals(originalConfig.getResultsField())) && super.isNoop()) { - return originalConfig; - } - - if (originalConfig instanceof PassThroughConfig == false) { - throw ExceptionsHelper.badRequestException( - "Inference config of type [{}] can not be updated with a inference request of type [{}]", - originalConfig.getName(), - getName() - ); - } - - PassThroughConfig passThroughConfig = (PassThroughConfig) originalConfig; - return new PassThroughConfig( - passThroughConfig.getVocabularyConfig(), - (tokenizationUpdate == null) - ? passThroughConfig.getTokenization() - : tokenizationUpdate.apply(passThroughConfig.getTokenization()), - resultsField == null ? originalConfig.getResultsField() : resultsField - ); - } - @Override public boolean isSupported(InferenceConfig config) { return config instanceof PassThroughConfig; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/QuestionAnsweringConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/QuestionAnsweringConfig.java index 7572d757f2b5f..014cdb1dd891f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/QuestionAnsweringConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/QuestionAnsweringConfig.java @@ -188,6 +188,32 @@ public boolean isTargetTypeSupported(TargetType targetType) { return false; } + @Override + public InferenceConfig apply(InferenceConfigUpdate update) { + if (update instanceof QuestionAnsweringConfigUpdate configUpdate) { + return new QuestionAnsweringConfig( + configUpdate.getQuestion(), + Optional.ofNullable(configUpdate.getNumTopClasses()).orElse(numTopClasses), + Optional.ofNullable(configUpdate.getMaxAnswerLength()).orElse(maxAnswerLength), + vocabularyConfig, + configUpdate.tokenizationUpdate == null ? tokenization : configUpdate.tokenizationUpdate.apply(tokenization), + Optional.ofNullable(configUpdate.getResultsField()).orElse(resultsField) + ); + } else if (update instanceof TokenizationConfigUpdate tokenizationUpdate) { + var updatedTokenization = getTokenization().updateSpanSettings(tokenizationUpdate.getSpanSettings()); + return new QuestionAnsweringConfig( + question, + numTopClasses, + maxAnswerLength, + vocabularyConfig, + updatedTokenization, + resultsField + ); + } else { + throw incompatibleUpdateException(update.getName()); + } + } + @Override public MlConfigVersion getMinimalSupportedMlConfigVersion() { return MlConfigVersion.V_8_3_0; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/QuestionAnsweringConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/QuestionAnsweringConfigUpdate.java index 40657544a14d5..df4cb565731ed 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/QuestionAnsweringConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/QuestionAnsweringConfigUpdate.java @@ -22,7 +22,6 @@ import java.util.HashMap; import java.util.Map; import java.util.Objects; -import java.util.Optional; import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig.NUM_TOP_CLASSES; import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig.RESULTS_FIELD; @@ -126,37 +125,6 @@ public String getWriteableName() { return NAME; } - @Override - public InferenceConfig apply(InferenceConfig originalConfig) { - if (originalConfig instanceof QuestionAnsweringConfig == false) { - throw ExceptionsHelper.badRequestException( - "Inference config of type [{}] can not be updated with a inference request of type [{}]", - originalConfig.getName(), - getName() - ); - } - - QuestionAnsweringConfig questionAnsweringConfig = (QuestionAnsweringConfig) originalConfig; - return new QuestionAnsweringConfig( - question, - Optional.ofNullable(numTopClasses).orElse(questionAnsweringConfig.getNumTopClasses()), - Optional.ofNullable(maxAnswerLength).orElse(questionAnsweringConfig.getMaxAnswerLength()), - questionAnsweringConfig.getVocabularyConfig(), - tokenizationUpdate == null - ? questionAnsweringConfig.getTokenization() - : tokenizationUpdate.apply(questionAnsweringConfig.getTokenization()), - Optional.ofNullable(resultsField).orElse(questionAnsweringConfig.getResultsField()) - ); - } - - boolean isNoop(QuestionAnsweringConfig originalConfig) { - return (numTopClasses == null || numTopClasses.equals(originalConfig.getNumTopClasses())) - && (maxAnswerLength == null || maxAnswerLength.equals(originalConfig.getMaxAnswerLength())) - && (resultsField == null || resultsField.equals(originalConfig.getResultsField())) - && (question == null || question.equals(originalConfig.getQuestion())) - && super.isNoop(); - } - @Override public boolean isSupported(InferenceConfig config) { return config instanceof QuestionAnsweringConfig; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfig.java index 8ea53b2725523..337a1ac693128 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfig.java @@ -135,6 +135,24 @@ public boolean isTargetTypeSupported(TargetType targetType) { return TargetType.REGRESSION.equals(targetType); } + @Override + public InferenceConfig apply(InferenceConfigUpdate update) { + if (update instanceof RegressionConfigUpdate configUpdate) { + RegressionConfig.Builder builder = new RegressionConfig.Builder(this); + if (configUpdate.getResultsField() != null) { + builder.setResultsField(configUpdate.getResultsField()); + } + if (configUpdate.getNumTopFeatureImportanceValues() != null) { + builder.setNumTopFeatureImportanceValues(configUpdate.getNumTopFeatureImportanceValues()); + } + return builder.build(); + } else if (update instanceof ResultsFieldUpdate resultsFieldUpdate) { + return new RegressionConfig.Builder(this).setResultsField(resultsFieldUpdate.getResultsField()).build(); + } else { + throw incompatibleUpdateException(update.getName()); + } + } + @Override public MlConfigVersion getMinimalSupportedMlConfigVersion() { return requestingImportance() ? MlConfigVersion.V_7_7_0 : MIN_SUPPORTED_VERSION; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfigUpdate.java index a678806181ef8..dc1a7bdeef104 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfigUpdate.java @@ -144,41 +144,11 @@ public int hashCode() { return Objects.hash(resultsField, numTopFeatureImportanceValues); } - @Override - public InferenceConfig apply(InferenceConfig originalConfig) { - if (originalConfig instanceof RegressionConfig == false) { - throw ExceptionsHelper.badRequestException( - "Inference config of type [{}] can not be updated with a inference request of type [{}]", - originalConfig.getName(), - getName() - ); - } - - RegressionConfig regressionConfig = (RegressionConfig) originalConfig; - if (isNoop(regressionConfig)) { - return originalConfig; - } - RegressionConfig.Builder builder = new RegressionConfig.Builder(regressionConfig); - if (resultsField != null) { - builder.setResultsField(resultsField); - } - if (numTopFeatureImportanceValues != null) { - builder.setNumTopFeatureImportanceValues(numTopFeatureImportanceValues); - } - return builder.build(); - } - @Override public boolean isSupported(InferenceConfig inferenceConfig) { return inferenceConfig instanceof RegressionConfig; } - boolean isNoop(RegressionConfig originalConfig) { - return (resultsField == null || originalConfig.getResultsField().equals(resultsField)) - && (numTopFeatureImportanceValues == null - || originalConfig.getNumTopFeatureImportanceValues() == numTopFeatureImportanceValues); - } - public static class Builder implements InferenceConfigUpdate.Builder { private String resultsField; private Integer numTopFeatureImportanceValues; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ResultsFieldUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ResultsFieldUpdate.java index fe1fb9844610d..34d3b1c1e38f5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ResultsFieldUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ResultsFieldUpdate.java @@ -11,7 +11,6 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import java.io.IOException; import java.util.Objects; @@ -34,22 +33,6 @@ public ResultsFieldUpdate(StreamInput in) throws IOException { resultsField = in.readString(); } - @Override - public InferenceConfig apply(InferenceConfig originalConfig) { - if (originalConfig instanceof ClassificationConfig) { - ClassificationConfigUpdate update = new ClassificationConfigUpdate(null, resultsField, null, null, null); - return update.apply(originalConfig); - } else if (originalConfig instanceof RegressionConfig) { - RegressionConfigUpdate update = new RegressionConfigUpdate(resultsField, null); - return update.apply(originalConfig); - } else { - throw ExceptionsHelper.badRequestException( - "Inference config of unknown type [{}] can not be updated", - originalConfig.getName() - ); - } - } - @Override public boolean isSupported(InferenceConfig config) { return true; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RobertaTokenization.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RobertaTokenization.java index febb7cb40ec82..bbb35ad70b90d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RobertaTokenization.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RobertaTokenization.java @@ -85,6 +85,18 @@ public RobertaTokenization(StreamInput in) throws IOException { this.addPrefixSpace = in.readBoolean(); } + @Override + Tokenization buildWindowingTokenization(int updatedMaxSeqLength, int updatedSpan) { + return new RobertaTokenization( + this.doLowerCase, + this.withSpecialTokens, + updatedMaxSeqLength, + Truncate.NONE, + updatedSpan, + this.addPrefixSpace + ); + } + public boolean isAddPrefixSpace() { return addPrefixSpace; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfig.java index ab50f26636fc4..153879d4f61b4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfig.java @@ -133,6 +133,40 @@ public boolean isTargetTypeSupported(TargetType targetType) { return false; } + @Override + public InferenceConfig apply(InferenceConfigUpdate update) { + if (update instanceof TextClassificationConfigUpdate configUpdate) { + TextClassificationConfig.Builder builder = new TextClassificationConfig.Builder(this); + if (configUpdate.getNumTopClasses() != null) { + builder.setNumTopClasses(configUpdate.getNumTopClasses()); + } + if (configUpdate.getClassificationLabels() != null) { + if (classificationLabels.size() != configUpdate.getClassificationLabels().size()) { + throw ExceptionsHelper.badRequestException( + "The number of [{}] the model is defined with [{}] does not match the number in the update [{}]", + CLASSIFICATION_LABELS, + classificationLabels.size(), + configUpdate.getClassificationLabels().size() + ); + } + builder.setClassificationLabels(configUpdate.getClassificationLabels()); + } + if (configUpdate.getResultsField() != null) { + builder.setResultsField(configUpdate.getResultsField()); + } + if (configUpdate.tokenizationUpdate != null) { + builder.setTokenization(configUpdate.tokenizationUpdate.apply(tokenization)); + } + + return builder.build(); + } else if (update instanceof TokenizationConfigUpdate tokenizationUpdate) { + var updatedTokenization = getTokenization().updateSpanSettings(tokenizationUpdate.getSpanSettings()); + return new TextClassificationConfig.Builder(this).setTokenization(updatedTokenization).build(); + } else { + throw incompatibleUpdateException(update.getName()); + } + } + @Override public MlConfigVersion getMinimalSupportedMlConfigVersion() { return MlConfigVersion.V_8_0_0; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfigUpdate.java index 460a3a685d534..5379e3eeb17f7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfigUpdate.java @@ -111,54 +111,6 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalString(resultsField); } - @Override - public InferenceConfig apply(InferenceConfig originalConfig) { - if (originalConfig instanceof TextClassificationConfig == false) { - throw ExceptionsHelper.badRequestException( - "Inference config of type [{}] can not be updated with a request of type [{}]", - originalConfig.getName(), - getName() - ); - } - - TextClassificationConfig classificationConfig = (TextClassificationConfig) originalConfig; - if (isNoop(classificationConfig)) { - return originalConfig; - } - - TextClassificationConfig.Builder builder = new TextClassificationConfig.Builder(classificationConfig); - if (numTopClasses != null) { - builder.setNumTopClasses(numTopClasses); - } - if (classificationLabels != null) { - if (classificationLabels.size() != classificationConfig.getClassificationLabels().size()) { - throw ExceptionsHelper.badRequestException( - "The number of [{}] the model is defined with [{}] does not match the number in the update [{}]", - CLASSIFICATION_LABELS, - classificationConfig.getClassificationLabels().size(), - classificationLabels.size() - ); - } - builder.setClassificationLabels(classificationLabels); - } - if (resultsField != null) { - builder.setResultsField(resultsField); - } - - if (tokenizationUpdate != null) { - builder.setTokenization(tokenizationUpdate.apply(classificationConfig.getTokenization())); - } - - return builder.build(); - } - - boolean isNoop(TextClassificationConfig originalConfig) { - return (this.numTopClasses == null || this.numTopClasses == originalConfig.getNumTopClasses()) - && (this.classificationLabels == null) - && (this.resultsField == null || this.resultsField.equals(originalConfig.getResultsField())) - && super.isNoop(); - } - @Override public boolean isSupported(InferenceConfig config) { return config instanceof TextClassificationConfig; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfig.java index 518b9eb62d793..d043c17535636 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfig.java @@ -145,6 +145,23 @@ public boolean isTargetTypeSupported(TargetType targetType) { return false; } + @Override + public InferenceConfig apply(InferenceConfigUpdate update) { + if (update instanceof TextEmbeddingConfigUpdate configUpdate) { + return new TextEmbeddingConfig( + vocabularyConfig, + configUpdate.tokenizationUpdate == null ? tokenization : configUpdate.tokenizationUpdate.apply(tokenization), + configUpdate.getResultsField() == null ? resultsField : configUpdate.getResultsField(), + embeddingSize + ); + } else if (update instanceof TokenizationConfigUpdate tokenizationUpdate) { + var updatedTokenization = getTokenization().updateSpanSettings(tokenizationUpdate.getSpanSettings()); + return new TextEmbeddingConfig(vocabularyConfig, updatedTokenization, resultsField, embeddingSize); + } else { + throw incompatibleUpdateException(update.getName()); + } + } + @Override public MlConfigVersion getMinimalSupportedMlConfigVersion() { return MlConfigVersion.V_8_0_0; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfigUpdate.java index 6acd2d209a875..e89281a59f7d2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfigUpdate.java @@ -104,29 +104,6 @@ public TransportVersion getMinimalSupportedVersion() { return TransportVersions.V_8_0_0; } - @Override - public InferenceConfig apply(InferenceConfig originalConfig) { - if ((resultsField == null || resultsField.equals(originalConfig.getResultsField())) && super.isNoop()) { - return originalConfig; - } - - if (originalConfig instanceof TextEmbeddingConfig == false) { - throw ExceptionsHelper.badRequestException( - "Inference config of type [{}] can not be updated with a inference request of type [{}]", - originalConfig.getName(), - getName() - ); - } - - TextEmbeddingConfig embeddingConfig = (TextEmbeddingConfig) originalConfig; - return new TextEmbeddingConfig( - embeddingConfig.getVocabularyConfig(), - tokenizationUpdate == null ? embeddingConfig.getTokenization() : tokenizationUpdate.apply(embeddingConfig.getTokenization()), - resultsField == null ? embeddingConfig.getResultsField() : resultsField, - embeddingConfig.getEmbeddingSize() - ); - } - @Override public boolean isSupported(InferenceConfig config) { return config instanceof TextEmbeddingConfig; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfig.java index d8315bec14153..c4d78c9faf219 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfig.java @@ -121,6 +121,22 @@ public boolean isTargetTypeSupported(TargetType targetType) { return false; } + @Override + public InferenceConfig apply(InferenceConfigUpdate update) { + if (update instanceof TextExpansionConfigUpdate configUpdate) { + return new TextExpansionConfig( + vocabularyConfig, + configUpdate.tokenizationUpdate == null ? tokenization : configUpdate.tokenizationUpdate.apply(tokenization), + Optional.ofNullable(configUpdate.getResultsField()).orElse(resultsField) + ); + } else if (update instanceof TokenizationConfigUpdate tokenizationUpdate) { + var updatedTokenization = getTokenization().updateSpanSettings(tokenizationUpdate.getSpanSettings()); + return new TextExpansionConfig(vocabularyConfig, updatedTokenization, resultsField); + } else { + throw incompatibleUpdateException(update.getName()); + } + } + @Override public boolean isAllocateOnly() { return true; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfigUpdate.java index 181cadbaf7168..3ba5c91502480 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfigUpdate.java @@ -21,7 +21,6 @@ import java.util.HashMap; import java.util.Map; import java.util.Objects; -import java.util.Optional; import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig.RESULTS_FIELD; import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig.TOKENIZATION; @@ -100,33 +99,6 @@ public String getName() { return NAME; } - @Override - public InferenceConfig apply(InferenceConfig originalConfig) { - if (originalConfig instanceof TextExpansionConfig == false) { - throw ExceptionsHelper.badRequestException( - "Inference config of type [{}] can not be updated with a request of type [{}]", - originalConfig.getName(), - getName() - ); - } - TextExpansionConfig textExpansionConfig = (TextExpansionConfig) originalConfig; - if (isNoop(textExpansionConfig)) { - return textExpansionConfig; - } - - return new TextExpansionConfig( - textExpansionConfig.getVocabularyConfig(), - (tokenizationUpdate == null) - ? textExpansionConfig.getTokenization() - : tokenizationUpdate.apply(textExpansionConfig.getTokenization()), - Optional.ofNullable(resultsField).orElse(textExpansionConfig.getResultsField()) - ); - } - - boolean isNoop(TextExpansionConfig originalConfig) { - return (this.resultsField == null || this.resultsField.equals(originalConfig.getResultsField())) && super.isNoop(); - } - @Override public boolean isSupported(InferenceConfig config) { return config instanceof TextExpansionConfig; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextSimilarityConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextSimilarityConfig.java index 5511df03e6f36..bbd819891e217 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextSimilarityConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextSimilarityConfig.java @@ -149,6 +149,24 @@ public boolean isTargetTypeSupported(TargetType targetType) { return false; } + @Override + public InferenceConfig apply(InferenceConfigUpdate update) { + if (update instanceof TextSimilarityConfigUpdate configUpdate) { + return new TextSimilarityConfig( + configUpdate.getText(), + vocabularyConfig, + configUpdate.tokenizationUpdate == null ? tokenization : configUpdate.tokenizationUpdate.apply(tokenization), + Optional.ofNullable(configUpdate.getResultsField()).orElse(resultsField), + Optional.ofNullable(configUpdate.getSpanScoreFunction()).orElse(spanScoreFunction) + ); + } else if (update instanceof TokenizationConfigUpdate tokenizationUpdate) { + var updatedTokenization = getTokenization().updateSpanSettings(tokenizationUpdate.getSpanSettings()); + return new TextSimilarityConfig(text, vocabularyConfig, updatedTokenization, resultsField, spanScoreFunction); + } else { + throw incompatibleUpdateException(update.getName()); + } + } + @Override public MlConfigVersion getMinimalSupportedMlConfigVersion() { return MlConfigVersion.V_8_5_0; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextSimilarityConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextSimilarityConfigUpdate.java index c7afacc07b944..2ddbf8bd63f49 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextSimilarityConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextSimilarityConfigUpdate.java @@ -110,31 +110,13 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th return builder; } - @Override - public String getWriteableName() { - return NAME; + public TextSimilarityConfig.SpanScoreFunction getSpanScoreFunction() { + return spanScoreFunction; } @Override - public InferenceConfig apply(InferenceConfig originalConfig) { - if (originalConfig instanceof TextSimilarityConfig == false) { - throw ExceptionsHelper.badRequestException( - "Inference config of type [{}] can not be updated with a inference request of type [{}]", - originalConfig.getName(), - getName() - ); - } - - TextSimilarityConfig textSimilarityConfig = (TextSimilarityConfig) originalConfig; - return new TextSimilarityConfig( - text, - textSimilarityConfig.getVocabularyConfig(), - tokenizationUpdate == null - ? textSimilarityConfig.getTokenization() - : tokenizationUpdate.apply(textSimilarityConfig.getTokenization()), - Optional.ofNullable(resultsField).orElse(textSimilarityConfig.getResultsField()), - Optional.ofNullable(spanScoreFunction).orElse(textSimilarityConfig.getSpanScoreFunction()) - ); + public String getWriteableName() { + return NAME; } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/Tokenization.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/Tokenization.java index ef437e0201510..4f301b48cdacc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/Tokenization.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/Tokenization.java @@ -7,11 +7,14 @@ package org.elasticsearch.xpack.core.ml.inference.trainedmodel; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.Nullable; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; @@ -49,6 +52,19 @@ public String toString() { } } + record SpanSettings(@Nullable Integer maxSequenceLength, int span) implements Writeable { + + SpanSettings(StreamInput in) throws IOException { + this(in.readOptionalVInt(), in.readVInt()); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalVInt(maxSequenceLength); + out.writeVInt(span); + } + }; + // TODO add global params like never_split, bos_token, eos_token, mask_token, tokenize_chinese_chars, strip_accents, etc. public static final ParseField DO_LOWER_CASE = new ParseField("do_lower_case"); public static final ParseField WITH_SPECIAL_TOKENS = new ParseField("with_special_tokens"); @@ -104,20 +120,8 @@ public static BertTokenization createDefault() { + "] to indicate no windowing should occur" ); } - if (this.span > this.maxSequenceLength) { - throw new IllegalArgumentException( - "[" - + SPAN.getPreferredName() - + "] provided [" - + this.span - + "] must not be greater than [" - + MAX_SEQUENCE_LENGTH.getPreferredName() - + "] provided [" - + this.maxSequenceLength - + "]" - ); - } - validateSpanAndTruncate(truncate, span); + validateSpanAndMaxSequenceLength(this.maxSequenceLength, this.span); + validateSpanAndTruncate(this.truncate, this.span); } public Tokenization(StreamInput in) throws IOException { @@ -132,6 +136,35 @@ public Tokenization(StreamInput in) throws IOException { } } + /** + * Return a copy of this with the tokenizer span settings updated + * @param update The settings to update + * @return An updated Tokenization + */ + public Tokenization updateSpanSettings(SpanSettings update) { + int maxLength = update.maxSequenceLength() == null ? this.maxSequenceLength : update.maxSequenceLength(); + validateSpanAndMaxSequenceLength(maxLength, span); + if (update.maxSequenceLength() != null && update.maxSequenceLength() > this.maxSequenceLength) { + throw new ElasticsearchStatusException( + "Updated max sequence length [{}] cannot be greater " + "than the model's max sequence length [{}]", + RestStatus.BAD_REQUEST, + update.maxSequenceLength(), + this.maxSequenceLength + ); + } + + return buildWindowingTokenization(maxLength, update.span()); + } + + /** + * Build a copy of this with {@code Truncate == NONE} using + * the specified max sequence length and span + * @param updatedMaxSeqLength Max sequence length + * @param updatedSpan Span + * @return A new Tokenization object + */ + abstract Tokenization buildWindowingTokenization(int updatedMaxSeqLength, int updatedSpan); + @Override public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(doLowerCase); @@ -160,6 +193,22 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } + public static void validateSpanAndMaxSequenceLength(int maxSequenceLength, int span) { + if (span > maxSequenceLength) { + throw new IllegalArgumentException( + "[" + + SPAN.getPreferredName() + + "] provided [" + + span + + "] must not be greater than [" + + MAX_SEQUENCE_LENGTH.getPreferredName() + + "] provided [" + + maxSequenceLength + + "]" + ); + } + } + public static void validateSpanAndTruncate(@Nullable Truncate truncate, @Nullable Integer span) { if ((span != null && span != UNSET_SPAN_VALUE) && (truncate != null && truncate.isInCompatibleWithSpan())) { throw new IllegalArgumentException( diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TokenizationConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TokenizationConfigUpdate.java new file mode 100644 index 0000000000000..2414fe5776438 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TokenizationConfigUpdate.java @@ -0,0 +1,81 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.ml.inference.trainedmodel; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; +import java.util.Objects; + +/** + * An update that sets the tokenization truncate option to NONE + * and updates the span and max sequence length settings. + */ +public class TokenizationConfigUpdate implements InferenceConfigUpdate { + + public static final String NAME = "tokenization_update"; + + private final Tokenization.SpanSettings spanSettings; + + public TokenizationConfigUpdate(Tokenization.SpanSettings spanSettings) { + this.spanSettings = spanSettings; + } + + public TokenizationConfigUpdate(StreamInput in) throws IOException { + this.spanSettings = new Tokenization.SpanSettings(in); + } + + public Tokenization.SpanSettings getSpanSettings() { + return spanSettings; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return null; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + spanSettings.writeTo(out); + } + + @Override + public boolean isSupported(InferenceConfig config) { + return true; + } + + @Override + public String getResultsField() { + return null; + } + + @Override + public Builder, ? extends InferenceConfigUpdate> newBuilder() { + throw new UnsupportedOperationException("Tokenization update is not supported as a builder"); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TokenizationConfigUpdate that = (TokenizationConfigUpdate) o; + return Objects.equals(spanSettings, that.spanSettings); + } + + @Override + public int hashCode() { + return Objects.hash(spanSettings); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/XLMRobertaTokenization.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/XLMRobertaTokenization.java index 43016e58420ad..648e52538040d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/XLMRobertaTokenization.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/XLMRobertaTokenization.java @@ -72,6 +72,11 @@ public XLMRobertaTokenization(StreamInput in) throws IOException { super(in); } + @Override + protected Tokenization buildWindowingTokenization(int maxSeqLength, int span) { + return new XLMRobertaTokenization(withSpecialTokens, maxSeqLength, Truncate.NONE, span); + } + @Override public String getWriteableName() { return NAME; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfig.java index ba4c130b987d2..4c669f289016a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfig.java @@ -198,6 +198,43 @@ public boolean isTargetTypeSupported(TargetType targetType) { return false; } + @Override + public InferenceConfig apply(InferenceConfigUpdate update) { + if (update instanceof ZeroShotClassificationConfigUpdate configUpdate) { + if ((configUpdate.getLabels() == null || configUpdate.getLabels().isEmpty()) + && (this.labels == null || this.labels.isEmpty())) { + throw ExceptionsHelper.badRequestException( + "stored configuration has no [{}] defined, supplied inference_config update must supply [{}]", + LABELS.getPreferredName(), + LABELS.getPreferredName() + ); + } + + return new ZeroShotClassificationConfig( + classificationLabels, + vocabularyConfig, + configUpdate.tokenizationUpdate == null ? tokenization : configUpdate.tokenizationUpdate.apply(tokenization), + hypothesisTemplate, + Optional.ofNullable(configUpdate.getMultiLabel()).orElse(isMultiLabel), + Optional.ofNullable(configUpdate.getLabels()).orElse(labels), + Optional.ofNullable(configUpdate.getResultsField()).orElse(resultsField) + ); + } else if (update instanceof TokenizationConfigUpdate tokenizationUpdate) { + var updatedTokenization = getTokenization().updateSpanSettings(tokenizationUpdate.getSpanSettings()); + return new ZeroShotClassificationConfig( + classificationLabels, + vocabularyConfig, + updatedTokenization, + hypothesisTemplate, + isMultiLabel, + labels, + resultsField + ); + } else { + throw incompatibleUpdateException(update.getName()); + } + } + @Override public MlConfigVersion getMinimalSupportedMlConfigVersion() { return MlConfigVersion.V_8_0_0; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfigUpdate.java index 47fd75ed6ff42..8f03d5e3d01cf 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfigUpdate.java @@ -23,7 +23,6 @@ import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.Optional; import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig.RESULTS_FIELD; import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig.TOKENIZATION; @@ -121,45 +120,6 @@ public String getWriteableName() { return NAME; } - @Override - public InferenceConfig apply(InferenceConfig originalConfig) { - if (originalConfig instanceof ZeroShotClassificationConfig == false) { - throw ExceptionsHelper.badRequestException( - "Inference config of type [{}] can not be updated with a inference request of type [{}]", - originalConfig.getName(), - getName() - ); - } - - ZeroShotClassificationConfig zeroShotConfig = (ZeroShotClassificationConfig) originalConfig; - if ((labels == null || labels.isEmpty()) && (zeroShotConfig.getLabels() == null || zeroShotConfig.getLabels().isEmpty())) { - throw ExceptionsHelper.badRequestException( - "stored configuration has no [{}] defined, supplied inference_config update must supply [{}]", - LABELS.getPreferredName(), - LABELS.getPreferredName() - ); - } - if (isNoop(zeroShotConfig)) { - return originalConfig; - } - return new ZeroShotClassificationConfig( - zeroShotConfig.getClassificationLabels(), - zeroShotConfig.getVocabularyConfig(), - tokenizationUpdate == null ? zeroShotConfig.getTokenization() : tokenizationUpdate.apply(zeroShotConfig.getTokenization()), - zeroShotConfig.getHypothesisTemplate(), - Optional.ofNullable(isMultiLabel).orElse(zeroShotConfig.isMultiLabel()), - Optional.ofNullable(labels).orElse(zeroShotConfig.getLabels().orElse(null)), - Optional.ofNullable(resultsField).orElse(zeroShotConfig.getResultsField()) - ); - } - - boolean isNoop(ZeroShotClassificationConfig originalConfig) { - return (labels == null || labels.equals(originalConfig.getLabels().orElse(null))) - && (isMultiLabel == null || isMultiLabel.equals(originalConfig.isMultiLabel())) - && (resultsField == null || resultsField.equals(originalConfig.getResultsField())) - && super.isNoop(); - } - @Override public boolean isSupported(InferenceConfig config) { return config instanceof ZeroShotClassificationConfig; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertJapaneseTokenizationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertJapaneseTokenizationTests.java index 79c069afbd4ab..9253469ecc49d 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertJapaneseTokenizationTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertJapaneseTokenizationTests.java @@ -63,6 +63,13 @@ protected BertJapaneseTokenization mutateInstanceForVersion(BertJapaneseTokeniza return mutateForVersion(instance, version); } + public void testsBuildUpdatedTokenization() { + var update = new BertJapaneseTokenization(true, true, 100, Tokenization.Truncate.FIRST, -1).buildWindowingTokenization(50, 20); + assertEquals(Tokenization.Truncate.NONE, update.getTruncate()); + assertEquals(50, update.maxSequenceLength()); + assertEquals(20, update.getSpan()); + } + public static BertJapaneseTokenization createRandom() { return new BertJapaneseTokenization( randomBoolean() ? null : randomBoolean(), diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertTokenizationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertTokenizationTests.java index a00ebec79a862..b9cda9a2068ea 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertTokenizationTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertTokenizationTests.java @@ -63,6 +63,13 @@ protected BertTokenization mutateInstanceForVersion(BertTokenization instance, T return mutateForVersion(instance, version); } + public void testsBuildUpdatedTokenization() { + var update = new BertTokenization(true, true, 100, Tokenization.Truncate.FIRST, -1).buildWindowingTokenization(50, 20); + assertEquals(Tokenization.Truncate.NONE, update.getTruncate()); + assertEquals(50, update.maxSequenceLength()); + assertEquals(20, update.getSpan()); + } + public static BertTokenization createRandom() { return new BertTokenization( randomBoolean() ? null : randomBoolean(), diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfigUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfigUpdateTests.java index 1d52deaafa719..620036a040368 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfigUpdateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfigUpdateTests.java @@ -59,11 +59,11 @@ public void testFromMapWithUnknownField() { public void testApply() { ClassificationConfig originalConfig = randomClassificationConfig(); - assertThat(originalConfig, equalTo(ClassificationConfigUpdate.EMPTY_PARAMS.apply(originalConfig))); + assertThat(originalConfig, equalTo(originalConfig.apply(ClassificationConfigUpdate.EMPTY_PARAMS))); assertThat( new ClassificationConfig.Builder(originalConfig).setNumTopClasses(5).build(), - equalTo(new ClassificationConfigUpdate.Builder().setNumTopClasses(5).build().apply(originalConfig)) + equalTo(originalConfig.apply(new ClassificationConfigUpdate.Builder().setNumTopClasses(5).build())) ); assertThat( new ClassificationConfig.Builder().setNumTopClasses(5) @@ -73,13 +73,14 @@ public void testApply() { .setTopClassesResultsField("bar") .build(), equalTo( - new ClassificationConfigUpdate.Builder().setNumTopClasses(5) - .setNumTopFeatureImportanceValues(1) - .setPredictionFieldType(PredictionFieldType.BOOLEAN) - .setResultsField("foo") - .setTopClassesResultsField("bar") - .build() - .apply(originalConfig) + originalConfig.apply( + new ClassificationConfigUpdate.Builder().setNumTopClasses(5) + .setNumTopFeatureImportanceValues(1) + .setPredictionFieldType(PredictionFieldType.BOOLEAN) + .setResultsField("foo") + .setTopClassesResultsField("bar") + .build() + ) ) ); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/FillMaskConfigUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/FillMaskConfigUpdateTests.java index 40eb9a4afd35f..385f5b1ddbf83 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/FillMaskConfigUpdateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/FillMaskConfigUpdateTests.java @@ -62,39 +62,19 @@ FillMaskConfigUpdate fromMap(Map map) { return FillMaskConfigUpdate.fromMap(map); } - public void testIsNoop() { - assertTrue(new FillMaskConfigUpdate.Builder().build().isNoop(FillMaskConfigTests.createRandom())); - - assertFalse( - new FillMaskConfigUpdate.Builder().setResultsField("foo") - .build() - .isNoop(new FillMaskConfig.Builder().setResultsField("bar").build()) - ); - - assertFalse( - new FillMaskConfigUpdate.Builder().setTokenizationUpdate(new BertTokenizationUpdate(Tokenization.Truncate.SECOND, null)) - .build() - .isNoop(new FillMaskConfig.Builder().setResultsField("bar").build()) - ); - - assertTrue( - new FillMaskConfigUpdate.Builder().setNumTopClasses(3).build().isNoop(new FillMaskConfig.Builder().setNumTopClasses(3).build()) - ); - } - public void testApply() { FillMaskConfig originalConfig = FillMaskConfigTests.createRandom(); - assertThat(originalConfig, equalTo(new FillMaskConfigUpdate.Builder().build().apply(originalConfig))); + assertThat(originalConfig, equalTo(originalConfig.apply(new FillMaskConfigUpdate.Builder().build()))); assertThat( new FillMaskConfig.Builder(originalConfig).setResultsField("ml-results").build(), - equalTo(new FillMaskConfigUpdate.Builder().setResultsField("ml-results").build().apply(originalConfig)) + equalTo(originalConfig.apply(new FillMaskConfigUpdate.Builder().setResultsField("ml-results").build())) ); assertThat( new FillMaskConfig.Builder(originalConfig).setNumTopClasses(originalConfig.getNumTopClasses() + 1).build(), equalTo( - new FillMaskConfigUpdate.Builder().setNumTopClasses(originalConfig.getNumTopClasses() + 1).build().apply(originalConfig) + originalConfig.apply(new FillMaskConfigUpdate.Builder().setNumTopClasses(originalConfig.getNumTopClasses() + 1).build()) ) ); @@ -103,9 +83,11 @@ public void testApply() { assertThat( new FillMaskConfig.Builder(originalConfig).setTokenization(tokenization).build(), equalTo( - new FillMaskConfigUpdate.Builder().setTokenizationUpdate( - createTokenizationUpdate(originalConfig.getTokenization(), truncate, null) - ).build().apply(originalConfig) + originalConfig.apply( + new FillMaskConfigUpdate.Builder().setTokenizationUpdate( + createTokenizationUpdate(originalConfig.getTokenization(), truncate, null) + ).build() + ) ) ); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfigUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfigUpdateTests.java index eb2afa501a4cc..72ba9fa5ba540 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfigUpdateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfigUpdateTests.java @@ -20,7 +20,6 @@ import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfigTestScaffolding.cloneWithNewTruncation; import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfigTestScaffolding.createTokenizationUpdate; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.sameInstance; public class NerConfigUpdateTests extends AbstractNlpConfigUpdateTestCase { @@ -61,7 +60,7 @@ NerConfigUpdate fromMap(Map map) { public void testApply() { NerConfig originalConfig = NerConfigTests.createRandom(); - assertThat(originalConfig, sameInstance(new NerConfigUpdate.Builder().build().apply(originalConfig))); + assertThat(originalConfig, equalTo(originalConfig.apply(new NerConfigUpdate.Builder().build()))); assertThat( new NerConfig( @@ -70,7 +69,7 @@ public void testApply() { originalConfig.getClassificationLabels(), "ml-results" ), - equalTo(new NerConfigUpdate.Builder().setResultsField("ml-results").build().apply(originalConfig)) + equalTo(originalConfig.apply(new NerConfigUpdate.Builder().setResultsField("ml-results").build())) ); Tokenization.Truncate truncate = randomFrom(Tokenization.Truncate.values()); @@ -83,9 +82,11 @@ public void testApply() { originalConfig.getResultsField() ), equalTo( - new NerConfigUpdate.Builder().setTokenizationUpdate( - createTokenizationUpdate(originalConfig.getTokenization(), truncate, null) - ).build().apply(originalConfig) + originalConfig.apply( + new NerConfigUpdate.Builder().setTokenizationUpdate( + createTokenizationUpdate(originalConfig.getTokenization(), truncate, null) + ).build() + ) ) ); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfigUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfigUpdateTests.java index 9cbf73dfe4809..caec28a93e5a3 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfigUpdateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfigUpdateTests.java @@ -20,7 +20,6 @@ import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfigTestScaffolding.cloneWithNewTruncation; import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfigTestScaffolding.createTokenizationUpdate; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.sameInstance; public class PassThroughConfigUpdateTests extends AbstractNlpConfigUpdateTestCase { @@ -61,11 +60,11 @@ PassThroughConfigUpdate fromMap(Map map) { public void testApply() { PassThroughConfig originalConfig = PassThroughConfigTests.createRandom(); - assertThat(originalConfig, sameInstance(new PassThroughConfigUpdate.Builder().build().apply(originalConfig))); + assertEquals(originalConfig, originalConfig.apply(new PassThroughConfigUpdate.Builder().build())); assertThat( new PassThroughConfig(originalConfig.getVocabularyConfig(), originalConfig.getTokenization(), "ml-results"), - equalTo(new PassThroughConfigUpdate.Builder().setResultsField("ml-results").build().apply(originalConfig)) + equalTo(originalConfig.apply(new PassThroughConfigUpdate.Builder().setResultsField("ml-results").build())) ); Tokenization.Truncate truncate = randomFrom(Tokenization.Truncate.values()); @@ -73,9 +72,11 @@ public void testApply() { assertThat( new PassThroughConfig(originalConfig.getVocabularyConfig(), tokenization, originalConfig.getResultsField()), equalTo( - new PassThroughConfigUpdate.Builder().setTokenizationUpdate( - createTokenizationUpdate(originalConfig.getTokenization(), truncate, null) - ).build().apply(originalConfig) + originalConfig.apply( + new PassThroughConfigUpdate.Builder().setTokenizationUpdate( + createTokenizationUpdate(originalConfig.getTokenization(), truncate, null) + ).build() + ) ) ); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/QuestionAnsweringConfigUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/QuestionAnsweringConfigUpdateTests.java index 46f11e7c5f793..e787b770b5da5 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/QuestionAnsweringConfigUpdateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/QuestionAnsweringConfigUpdateTests.java @@ -122,11 +122,12 @@ public void testApply() { originalConfig.getResultsField() ), equalTo( - new QuestionAnsweringConfigUpdate.Builder().setQuestion("Are you my mother?") - .setNumTopClasses(4) - .setMaxAnswerLength(40) - .build() - .apply(originalConfig) + originalConfig.apply( + new QuestionAnsweringConfigUpdate.Builder().setQuestion("Are you my mother?") + .setNumTopClasses(4) + .setMaxAnswerLength(40) + .build() + ) ) ); assertThat( @@ -139,10 +140,9 @@ public void testApply() { "updated-field" ), equalTo( - new QuestionAnsweringConfigUpdate.Builder().setQuestion("Are you my mother?") - .setResultsField("updated-field") - .build() - .apply(originalConfig) + originalConfig.apply( + new QuestionAnsweringConfigUpdate.Builder().setQuestion("Are you my mother?").setResultsField("updated-field").build() + ) ) ); @@ -158,10 +158,11 @@ public void testApply() { originalConfig.getResultsField() ), equalTo( - new QuestionAnsweringConfigUpdate.Builder().setQuestion("Are you my mother?") - .setTokenizationUpdate(createTokenizationUpdate(originalConfig.getTokenization(), truncate, null)) - .build() - .apply(originalConfig) + originalConfig.apply( + new QuestionAnsweringConfigUpdate.Builder().setQuestion("Are you my mother?") + .setTokenizationUpdate(createTokenizationUpdate(originalConfig.getTokenization(), truncate, null)) + .build() + ) ) ); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfigUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfigUpdateTests.java index 4c60ca7f885c4..35d2cb7fda16f 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfigUpdateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfigUpdateTests.java @@ -53,19 +53,18 @@ public void testFromMapWithUnknownField() { public void testApply() { RegressionConfig originalConfig = randomRegressionConfig(); - assertThat(originalConfig, equalTo(RegressionConfigUpdate.EMPTY_PARAMS.apply(originalConfig))); + assertThat(originalConfig, equalTo(originalConfig.apply(RegressionConfigUpdate.EMPTY_PARAMS))); assertThat( new RegressionConfig.Builder(originalConfig).setNumTopFeatureImportanceValues(5).build(), - equalTo(new RegressionConfigUpdate.Builder().setNumTopFeatureImportanceValues(5).build().apply(originalConfig)) + equalTo(originalConfig.apply(new RegressionConfigUpdate.Builder().setNumTopFeatureImportanceValues(5).build())) ); assertThat( new RegressionConfig.Builder().setNumTopFeatureImportanceValues(1).setResultsField("foo").build(), equalTo( - new RegressionConfigUpdate.Builder().setNumTopFeatureImportanceValues(1) - .setResultsField("foo") - .build() - .apply(originalConfig) + originalConfig.apply( + new RegressionConfigUpdate.Builder().setNumTopFeatureImportanceValues(1).setResultsField("foo").build() + ) ) ); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ResultsFieldUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ResultsFieldUpdateTests.java index 4237458d01f63..9accabb788669 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ResultsFieldUpdateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ResultsFieldUpdateTests.java @@ -44,7 +44,7 @@ public void testApply_OnlyTheResultsFieldIsChanged() { ClassificationConfig config = ClassificationConfigTests.randomClassificationConfig(); String newResultsField = config.getResultsField() + "foobar"; ResultsFieldUpdate update = new ResultsFieldUpdate(newResultsField); - InferenceConfig applied = update.apply(config); + InferenceConfig applied = config.apply(update); assertThat(applied, instanceOf(ClassificationConfig.class)); ClassificationConfig appliedConfig = (ClassificationConfig) applied; @@ -55,7 +55,7 @@ public void testApply_OnlyTheResultsFieldIsChanged() { RegressionConfig config = RegressionConfigTests.randomRegressionConfig(); String newResultsField = config.getResultsField() + "foobar"; ResultsFieldUpdate update = new ResultsFieldUpdate(newResultsField); - InferenceConfig applied = update.apply(config); + InferenceConfig applied = config.apply(update); assertThat(applied, instanceOf(RegressionConfig.class)); RegressionConfig appliedConfig = (RegressionConfig) applied; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RobertaTokenizationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RobertaTokenizationTests.java index 4f2c167015816..8cedd20432a6e 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RobertaTokenizationTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RobertaTokenizationTests.java @@ -63,6 +63,13 @@ protected RobertaTokenization mutateInstanceForVersion(RobertaTokenization insta return mutateForVersion(instance, version); } + public void testsBuildUpdatedTokenization() { + var update = new RobertaTokenization(true, true, 100, Tokenization.Truncate.FIRST, -1).buildWindowingTokenization(50, 20); + assertEquals(Tokenization.Truncate.NONE, update.getTruncate()); + assertEquals(50, update.maxSequenceLength()); + assertEquals(20, update.getSpan()); + } + public static RobertaTokenization createRandom() { return new RobertaTokenization( randomBoolean() ? null : randomBoolean(), diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfigUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfigUpdateTests.java index 72d963da8f0be..25b4299b41e8e 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfigUpdateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfigUpdateTests.java @@ -79,38 +79,6 @@ TextClassificationConfigUpdate fromMap(Map map) { return TextClassificationConfigUpdate.fromMap(map); } - public void testIsNoop() { - assertTrue(new TextClassificationConfigUpdate.Builder().build().isNoop(TextClassificationConfigTests.createRandom())); - - assertFalse( - new TextClassificationConfigUpdate.Builder().setResultsField("foo") - .build() - .isNoop( - new TextClassificationConfig.Builder().setClassificationLabels(List.of("a", "b")) - .setNumTopClasses(-1) - .setResultsField("bar") - .build() - ) - ); - - assertTrue( - new TextClassificationConfigUpdate.Builder().setNumTopClasses(3) - .build() - .isNoop(new TextClassificationConfig.Builder().setClassificationLabels(List.of("a", "b")).setNumTopClasses(3).build()) - ); - assertFalse( - new TextClassificationConfigUpdate.Builder().setClassificationLabels(List.of("a", "b")) - .build() - .isNoop(new TextClassificationConfig.Builder().setClassificationLabels(List.of("c", "d")).build()) - ); - assertFalse( - new TextClassificationConfigUpdate.Builder().setTokenizationUpdate( - new BertTokenizationUpdate(Tokenization.Truncate.SECOND, null) - ).build().isNoop(new TextClassificationConfig.Builder().setClassificationLabels(List.of("c", "d")).build()) - ); - - } - public void testApply() { TextClassificationConfig originalConfig = new TextClassificationConfig( VocabularyConfigTests.createRandom(), @@ -120,24 +88,24 @@ public void testApply() { "foo-results" ); - assertThat(originalConfig, equalTo(new TextClassificationConfigUpdate.Builder().build().apply(originalConfig))); + assertThat(originalConfig, equalTo(originalConfig.apply(new TextClassificationConfigUpdate.Builder().build()))); assertThat( new TextClassificationConfig.Builder(originalConfig).setClassificationLabels(List.of("foo", "bar")).build(), equalTo( - new TextClassificationConfigUpdate.Builder().setClassificationLabels(List.of("foo", "bar")).build().apply(originalConfig) + originalConfig.apply(new TextClassificationConfigUpdate.Builder().setClassificationLabels(List.of("foo", "bar")).build()) ) ); assertThat( new TextClassificationConfig.Builder(originalConfig).setResultsField("ml-results").build(), - equalTo(new TextClassificationConfigUpdate.Builder().setResultsField("ml-results").build().apply(originalConfig)) + equalTo(originalConfig.apply(new TextClassificationConfigUpdate.Builder().setResultsField("ml-results").build())) ); assertThat( new TextClassificationConfig.Builder(originalConfig).setNumTopClasses(originalConfig.getNumTopClasses() + 2).build(), equalTo( - new TextClassificationConfigUpdate.Builder().setNumTopClasses(originalConfig.getNumTopClasses() + 2) - .build() - .apply(originalConfig) + originalConfig.apply( + new TextClassificationConfigUpdate.Builder().setNumTopClasses(originalConfig.getNumTopClasses() + 2).build() + ) ) ); @@ -146,9 +114,11 @@ public void testApply() { assertThat( new TextClassificationConfig.Builder(originalConfig).setTokenization(tokenization).build(), equalTo( - new TextClassificationConfigUpdate.Builder().setTokenizationUpdate( - createTokenizationUpdate(originalConfig.getTokenization(), truncate, null) - ).build().apply(originalConfig) + originalConfig.apply( + new TextClassificationConfigUpdate.Builder().setTokenizationUpdate( + createTokenizationUpdate(originalConfig.getTokenization(), truncate, null) + ).build() + ) ) ); } @@ -161,7 +131,7 @@ public void testApplyWithInvalidLabels() { var update = new TextClassificationConfigUpdate.Builder().setClassificationLabels(newLabels).build(); - ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, () -> update.apply(originalConfig)); + ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, () -> originalConfig.apply(update)); assertThat( e.getMessage(), containsString( diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfigUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfigUpdateTests.java index 06abb12bdb0a2..ecff9c1010c46 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfigUpdateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfigUpdateTests.java @@ -20,7 +20,6 @@ import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfigTestScaffolding.cloneWithNewTruncation; import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfigTestScaffolding.createTokenizationUpdate; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.sameInstance; public class TextEmbeddingConfigUpdateTests extends AbstractNlpConfigUpdateTestCase { @@ -61,7 +60,7 @@ TextEmbeddingConfigUpdate fromMap(Map map) { public void testApply() { TextEmbeddingConfig originalConfig = TextEmbeddingConfigTests.createRandom(); - assertThat(originalConfig, sameInstance(new TextEmbeddingConfigUpdate.Builder().build().apply(originalConfig))); + assertThat(originalConfig, equalTo(originalConfig.apply(new TextEmbeddingConfigUpdate.Builder().build()))); assertThat( new TextEmbeddingConfig( @@ -70,7 +69,7 @@ public void testApply() { "ml-results", originalConfig.getEmbeddingSize() ), - equalTo(new TextEmbeddingConfigUpdate.Builder().setResultsField("ml-results").build().apply(originalConfig)) + equalTo(originalConfig.apply(new TextEmbeddingConfigUpdate.Builder().setResultsField("ml-results").build())) ); Tokenization.Truncate truncate = randomFrom(Tokenization.Truncate.values()); @@ -83,9 +82,11 @@ public void testApply() { originalConfig.getEmbeddingSize() ), equalTo( - new TextEmbeddingConfigUpdate.Builder().setTokenizationUpdate( - createTokenizationUpdate(originalConfig.getTokenization(), truncate, null) - ).build().apply(originalConfig) + originalConfig.apply( + new TextEmbeddingConfigUpdate.Builder().setTokenizationUpdate( + createTokenizationUpdate(originalConfig.getTokenization(), truncate, null) + ).build() + ) ) ); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextSimilarityConfigUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextSimilarityConfigUpdateTests.java index b8b5f50d15bae..e5061a743c672 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextSimilarityConfigUpdateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextSimilarityConfigUpdateTests.java @@ -127,7 +127,7 @@ public void testApply() { originalConfig.getResultsField(), originalConfig.getSpanScoreFunction() ), - equalTo(new TextSimilarityConfigUpdate.Builder().setText("Are you my mother?").build().apply(originalConfig)) + equalTo(originalConfig.apply(new TextSimilarityConfigUpdate.Builder().setText("Are you my mother?").build())) ); assertThat( new TextSimilarityConfig( @@ -138,10 +138,9 @@ public void testApply() { originalConfig.getSpanScoreFunction() ), equalTo( - new TextSimilarityConfigUpdate.Builder().setText("Are you my mother?") - .setResultsField("updated-field") - .build() - .apply(originalConfig) + originalConfig.apply( + new TextSimilarityConfigUpdate.Builder().setText("Are you my mother?").setResultsField("updated-field").build() + ) ) ); @@ -156,10 +155,11 @@ public void testApply() { originalConfig.getSpanScoreFunction() ), equalTo( - new TextSimilarityConfigUpdate.Builder().setText("Are you my mother?") - .setTokenizationUpdate(createTokenizationUpdate(originalConfig.getTokenization(), truncate, null)) - .build() - .apply(originalConfig) + originalConfig.apply( + new TextSimilarityConfigUpdate.Builder().setText("Are you my mother?") + .setTokenizationUpdate(createTokenizationUpdate(originalConfig.getTokenization(), truncate, null)) + .build() + ) ) ); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TokenizationConfigUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TokenizationConfigUpdateTests.java new file mode 100644 index 0000000000000..431dcf6c8c769 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TokenizationConfigUpdateTests.java @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.ml.inference.trainedmodel; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; + +import java.io.IOException; + +public class TokenizationConfigUpdateTests extends AbstractWireSerializingTestCase { + @Override + protected Writeable.Reader instanceReader() { + return TokenizationConfigUpdate::new; + } + + @Override + protected TokenizationConfigUpdate createTestInstance() { + Integer maxSequenceLength = randomBoolean() ? null : randomIntBetween(32, 64); + int span = randomIntBetween(8, 16); + return new TokenizationConfigUpdate(new Tokenization.SpanSettings(maxSequenceLength, span)); + } + + @Override + protected TokenizationConfigUpdate mutateInstance(TokenizationConfigUpdate instance) throws IOException { + return null; + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfigUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfigUpdateTests.java index 09c8eed048d96..ed034bb8518d9 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfigUpdateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfigUpdateTests.java @@ -108,7 +108,7 @@ public void testApply() { randomBoolean() ? null : randomAlphaOfLength(8) ); - assertThat(originalConfig, equalTo(new ZeroShotClassificationConfigUpdate.Builder().build().apply(originalConfig))); + assertThat(originalConfig, equalTo(originalConfig.apply(new ZeroShotClassificationConfigUpdate.Builder().build()))); assertThat( new ZeroShotClassificationConfig( @@ -120,7 +120,7 @@ public void testApply() { List.of("foo", "bar"), originalConfig.getResultsField() ), - equalTo(new ZeroShotClassificationConfigUpdate.Builder().setLabels(List.of("foo", "bar")).build().apply(originalConfig)) + equalTo(originalConfig.apply(new ZeroShotClassificationConfigUpdate.Builder().setLabels(List.of("foo", "bar")).build())) ); assertThat( new ZeroShotClassificationConfig( @@ -132,7 +132,7 @@ public void testApply() { originalConfig.getLabels().orElse(null), originalConfig.getResultsField() ), - equalTo(new ZeroShotClassificationConfigUpdate.Builder().setMultiLabel(true).build().apply(originalConfig)) + equalTo(originalConfig.apply(new ZeroShotClassificationConfigUpdate.Builder().setMultiLabel(true).build())) ); assertThat( new ZeroShotClassificationConfig( @@ -144,7 +144,7 @@ public void testApply() { originalConfig.getLabels().orElse(null), "updated-field" ), - equalTo(new ZeroShotClassificationConfigUpdate.Builder().setResultsField("updated-field").build().apply(originalConfig)) + equalTo(originalConfig.apply(new ZeroShotClassificationConfigUpdate.Builder().setResultsField("updated-field").build())) ); Tokenization.Truncate truncate = randomFrom(Tokenization.Truncate.values()); @@ -160,9 +160,11 @@ public void testApply() { originalConfig.getResultsField() ), equalTo( - new ZeroShotClassificationConfigUpdate.Builder().setTokenizationUpdate( - createTokenizationUpdate(originalConfig.getTokenization(), truncate, null) - ).build().apply(originalConfig) + originalConfig.apply( + new ZeroShotClassificationConfigUpdate.Builder().setTokenizationUpdate( + createTokenizationUpdate(originalConfig.getTokenization(), truncate, null) + ).build() + ) ) ); } @@ -178,41 +180,13 @@ public void testApplyWithEmptyLabelsInConfigAndUpdate() { null ); - Exception ex = expectThrows(Exception.class, () -> new ZeroShotClassificationConfigUpdate.Builder().build().apply(originalConfig)); + Exception ex = expectThrows(Exception.class, () -> originalConfig.apply(new ZeroShotClassificationConfigUpdate.Builder().build())); assertThat( ex.getMessage(), containsString("stored configuration has no [labels] defined, supplied inference_config update must supply [labels]") ); } - public void testIsNoop() { - assertTrue(new ZeroShotClassificationConfigUpdate.Builder().build().isNoop(ZeroShotClassificationConfigTests.createRandom())); - - var originalConfig = new ZeroShotClassificationConfig( - List.of("contradiction", "neutral", "entailment"), - randomBoolean() ? null : VocabularyConfigTests.createRandom(), - randomBoolean() ? null : BertTokenizationTests.createRandom(), - randomAlphaOfLength(10), - randomBoolean(), - null, - randomBoolean() ? null : randomAlphaOfLength(8) - ); - - var update = new ZeroShotClassificationConfigUpdate.Builder().setLabels(List.of("glad", "sad", "mad")).build(); - assertFalse(update.isNoop(originalConfig)); - - originalConfig = new ZeroShotClassificationConfig( - List.of("contradiction", "neutral", "entailment"), - randomBoolean() ? null : VocabularyConfigTests.createRandom(), - randomBoolean() ? null : BertTokenizationTests.createRandom(), - randomAlphaOfLength(10), - randomBoolean(), - List.of("glad", "sad", "mad"), - randomBoolean() ? null : randomAlphaOfLength(8) - ); - assertTrue(update.isNoop(originalConfig)); - } - public static ZeroShotClassificationConfigUpdate createRandom() { return randomUpdate(); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/TrainedModelDeploymentTask.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/TrainedModelDeploymentTask.java index cd7ed9e3eb55a..851dd8744d03e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/TrainedModelDeploymentTask.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/TrainedModelDeploymentTask.java @@ -171,16 +171,8 @@ public void infer( ); return; } - trainedModelAssignmentNodeService.infer( - this, - update.apply(inferenceConfigHolder.get()), - input, - skipQueue, - timeout, - prefixType, - parentActionTask, - listener - ); + var updatedConfig = update.isEmpty() ? inferenceConfigHolder.get() : inferenceConfigHolder.get().apply(update); + trainedModelAssignmentNodeService.infer(this, updatedConfig, input, skipQueue, timeout, prefixType, parentActionTask, listener); } public Optional modelStats() { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/LocalModel.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/LocalModel.java index ffd70849d8f1c..fe0bd18b2147a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/LocalModel.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/LocalModel.java @@ -175,7 +175,10 @@ public void infer(Map fields, InferenceConfigUpdate update, Acti listener.onResponse(new WarningInferenceResults(Messages.getMessage(INFERENCE_WARNING_ALL_FIELDS_MISSING, modelId))); return; } - InferenceResults inferenceResults = trainedModelDefinition.infer(flattenedFields, update.apply(inferenceConfig)); + InferenceResults inferenceResults = trainedModelDefinition.infer( + flattenedFields, + update.isEmpty() ? inferenceConfig : inferenceConfig.apply(update) + ); if (shouldPersistStats) { persistStats(false); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/ZeroShotClassificationProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/ZeroShotClassificationProcessorTests.java index ce9d7a9d3640b..d8f1a1fd7433d 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/ZeroShotClassificationProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/ZeroShotClassificationProcessorTests.java @@ -47,7 +47,7 @@ public void testBuildRequest() throws IOException { ZeroShotClassificationProcessor processor = new ZeroShotClassificationProcessor(tokenizer, config); NlpTask.Request request = processor.getRequestBuilder( - (NlpConfig) new ZeroShotClassificationConfigUpdate.Builder().setLabels(List.of("new", "stuff")).build().apply(config) + (NlpConfig) config.apply(new ZeroShotClassificationConfigUpdate.Builder().setLabels(List.of("new", "stuff")).build()) ).buildRequest(List.of("Elasticsearch fun"), "request1", Tokenization.Truncate.NONE, -1); Map jsonDocAsMap = XContentHelper.convertToMap(request.processInput(), true, XContentType.JSON).v2(); From 971cfb9317fc64ca04fb75d0e463419166f6ab84 Mon Sep 17 00:00:00 2001 From: Valeriy Khakhutskyy <1292899+valeriy42@users.noreply.github.com> Date: Fri, 12 Jan 2024 11:00:19 +0100 Subject: [PATCH 03/35] [ML] Refactor assignment planner code (#104260) This PR simplifies the code in a few places. In other places where I had the TODO comment, the possible simplification would lead to undesired consequences, so I removed the TODO comment with the reference to issue #101612. Closes #101612 --- .../TrainedModelAssignmentRebalancer.java | 10 +++------- .../planning/AbstractPreserveAllocations.java | 3 +-- .../assignment/planning/AssignmentPlan.java | 14 +++++++------- .../planning/RandomizedAssignmentRounding.java | 2 -- .../planning/ZoneAwareAssignmentPlanner.java | 12 +++--------- 5 files changed, 14 insertions(+), 27 deletions(-) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentRebalancer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentRebalancer.java index a1142796558f4..ef8af6af445fb 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentRebalancer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentRebalancer.java @@ -142,13 +142,9 @@ private static void copyAssignments( for (Map.Entry assignment : nodeAssignments.entrySet()) { AssignmentPlan.Node originalNode = originalNodeById.get(assignment.getKey().id()); dest.assignModelToNode(m, originalNode, assignment.getValue()); - if (m.currentAllocationsByNodeId().containsKey(originalNode.id())) { - // TODO (#101612) requiredMemory should be calculated by the AssignmentPlan.Builder - // As the node has all its available memory we need to manually account memory of models with - // current allocations. - long requiredMemory = m.estimateMemoryUsageBytes(m.currentAllocationsByNodeId().get(originalNode.id())); - dest.accountMemory(m, originalNode, requiredMemory); - } + // As the node has all its available memory we need to manually account memory of models with + // current allocations. + dest.accountMemory(m, originalNode); } } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AbstractPreserveAllocations.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AbstractPreserveAllocations.java index 026b433a8c2d4..98988ffa11055 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AbstractPreserveAllocations.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AbstractPreserveAllocations.java @@ -68,7 +68,7 @@ Deployment modifyModelPreservingPreviousAssignments(Deployment m) { AssignmentPlan mergePreservedAllocations(AssignmentPlan assignmentPlan) { // As the model/node objects the assignment plan are the modified ones, // they will not match the models/nodes members we have in this class. - // Therefore, we build a lookup table based on the ids so we can merge the plan + // Therefore, we build a lookup table based on the ids, so we can merge the plan // with its preserved allocations. final Map, Integer> plannedAssignmentsByModelNodeIdPair = new HashMap<>(); for (Deployment m : assignmentPlan.models()) { @@ -80,7 +80,6 @@ AssignmentPlan mergePreservedAllocations(AssignmentPlan assignmentPlan) { AssignmentPlan.Builder mergedPlanBuilder = AssignmentPlan.builder(nodes, deployments); for (Node n : nodes) { - // TODO (#101612) Should the first loop happen in the builder constructor? for (Deployment deploymentAllocationsToPreserve : deployments) { // if the model m is already allocated on the node n and I want to preserve this allocation diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlan.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlan.java index d9cb0f08a6cd0..123c728587604 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlan.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlan.java @@ -401,8 +401,7 @@ public Builder assignModelToNode(Deployment deployment, Node node, int allocatio if (allocations <= 0) { return this; } - if (/*isAlreadyAssigned(deployment, node) == false - &&*/ requiredMemory > remainingNodeMemory.get(node)) { + if (requiredMemory > remainingNodeMemory.get(node)) { throw new IllegalArgumentException( "not enough memory on node [" + node.id() @@ -448,13 +447,14 @@ private static int getCurrentAllocations(Deployment m, Node n) { } public void accountMemory(Deployment m, Node n) { - // TODO (#101612) remove or refactor unused method - long requiredMemory = getDeploymentMemoryRequirement(m, n, getCurrentAllocations(m, n)); - accountMemory(m, n, requiredMemory); + if (m.currentAllocationsByNodeId().containsKey(n.id())) { + int allocations = m.currentAllocationsByNodeId().get(n.id()); + long requiredMemory = m.estimateMemoryUsageBytes(allocations); + accountMemory(m, n, requiredMemory); + } } - public void accountMemory(Deployment m, Node n, long requiredMemory) { - // TODO (#101612) computation of required memory should be done internally + private void accountMemory(Deployment m, Node n, long requiredMemory) { remainingNodeMemory.computeIfPresent(n, (k, v) -> v - requiredMemory); if (remainingNodeMemory.containsKey(n) && remainingNodeMemory.get(n) < 0) { throw new IllegalArgumentException("not enough memory on node [" + n.id() + "] to assign model [" + m.id() + "]"); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/RandomizedAssignmentRounding.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/RandomizedAssignmentRounding.java index 8bdc99998a0c2..81696cd20d922 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/RandomizedAssignmentRounding.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/RandomizedAssignmentRounding.java @@ -310,8 +310,6 @@ private void unassignOversizedModels(Node n) { private AssignmentPlan toPlan() { AssignmentPlan.Builder builder = AssignmentPlan.builder(nodes, deployments); for (Map.Entry, Integer> assignment : tryAssigningRemainingCores().entrySet()) { - // TODO (#101612) The model should be assigned to the node only when it is possible. This means, that canAssign should be - // integrated into the assignModelToNode. if (builder.canAssign(assignment.getKey().v1(), assignment.getKey().v2(), assignment.getValue())) { builder.assignModelToNode(assignment.getKey().v1(), assignment.getKey().v2(), assignment.getValue()); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/ZoneAwareAssignmentPlanner.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/ZoneAwareAssignmentPlanner.java index 8c9499ca9e00c..9af2e4cd49b17 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/ZoneAwareAssignmentPlanner.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/ZoneAwareAssignmentPlanner.java @@ -183,15 +183,9 @@ private AssignmentPlan swapOriginalModelsInPlan( for (Map.Entry assignment : nodeAssignments.entrySet()) { Node originalNode = originalNodeById.get(assignment.getKey().id()); planBuilder.assignModelToNode(originalDeployment, originalNode, assignment.getValue()); - if (originalDeployment.currentAllocationsByNodeId().containsKey(originalNode.id())) { - // TODO (#101612) requiredMemory should be calculated by the AssignmentPlan.Builder - // As the node has all its available memory we need to manually account memory of models with - // current allocations. - long requiredMemory = originalDeployment.estimateMemoryUsageBytes( - originalDeployment.currentAllocationsByNodeId().get(originalNode.id()) - ); - planBuilder.accountMemory(m, originalNode, requiredMemory); - } + // As the node has all its available memory we need to manually account memory of models with + // current allocations. + planBuilder.accountMemory(originalDeployment, originalNode); } } return planBuilder.build(); From 2a84b62efcf9673a44068afd75290d3c6bed1f97 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lorenzo=20Dematt=C3=A9?= Date: Fri, 12 Jan 2024 11:07:17 +0100 Subject: [PATCH 04/35] Fix: remove wrong assertion from `ESRestTestFeatureService#clusterHasFeature` (#104299) The check is currently broken - feature check always falls back to historical features, and only then if the feature is not one of the historical ones, and exception (assert) is risen. This is wrong: it could be that we are testing for a non-historical feature, and the cluster simply does not have it. --- .../test/rest/ESRestTestCase.java | 26 ++++++++++++++----- .../test/rest/ESRestTestFeatureService.java | 26 ++++--------------- 2 files changed, 24 insertions(+), 28 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 20cd1997fd70e..a2806663ff321 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -59,6 +59,7 @@ import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.PathUtils; +import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.features.FeatureSpecification; @@ -84,6 +85,7 @@ import org.junit.Before; import java.io.BufferedReader; +import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; @@ -333,19 +335,28 @@ public void initClient() throws IOException { assert nodesVersions != null; } - protected static TestFeatureService createTestFeatureService( + protected TestFeatureService createTestFeatureService( Map> clusterStateFeatures, Set semanticNodeVersions ) { // Historical features information is unavailable when using legacy test plugins boolean hasHistoricalFeaturesInformation = System.getProperty("tests.features.metadata.path") != null; - var providers = hasHistoricalFeaturesInformation - ? List.of(new RestTestLegacyFeatures(), new ESRestTestCaseHistoricalFeatures()) - : List.of(new RestTestLegacyFeatures()); + + final List featureSpecifications; + if (hasHistoricalFeaturesInformation) { + featureSpecifications = List.of(new RestTestLegacyFeatures(), new ESRestTestCaseHistoricalFeatures()); + } else { + logger.warn( + "This test is running on the legacy test framework; historical features from production code will not be available. " + + "You need to port the test to the new test plugins in order to use historical features from production code. " + + "If this is a legacy feature used only in tests, you can add it to a test-only FeatureSpecification such as {}.", + RestTestLegacyFeatures.class.getCanonicalName() + ); + featureSpecifications = List.of(new RestTestLegacyFeatures()); + } return new ESRestTestFeatureService( - hasHistoricalFeaturesInformation, - providers, + featureSpecifications, semanticNodeVersions, ClusterFeatures.calculateAllNodeFeatures(clusterStateFeatures.values()) ); @@ -2343,6 +2354,7 @@ private static class ESRestTestCaseHistoricalFeatures implements FeatureSpecific private static Map historicalFeatures; @Override + @SuppressForbidden(reason = "File#pathSeparator has not equivalent in java.nio.file") public Map getHistoricalFeatures() { if (historicalFeatures == null) { Map historicalFeaturesMap = new HashMap<>(); @@ -2353,7 +2365,7 @@ public Map getHistoricalFeatures() { ); } - String[] metadataFiles = metadataPath.split(System.getProperty("path.separator")); + String[] metadataFiles = metadataPath.split(File.pathSeparator); for (String metadataFile : metadataFiles) { try ( InputStream in = Files.newInputStream(PathUtils.get(metadataFile)); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java index 5bb22058e4688..a73c43f4fc46a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java @@ -9,11 +9,11 @@ package org.elasticsearch.test.rest; import org.elasticsearch.Version; -import org.elasticsearch.core.Strings; import org.elasticsearch.features.FeatureData; import org.elasticsearch.features.FeatureSpecification; import java.util.Collection; +import java.util.Comparator; import java.util.List; import java.util.NavigableMap; import java.util.Set; @@ -24,33 +24,17 @@ class ESRestTestFeatureService implements TestFeatureService { private final Set clusterStateFeatures; ESRestTestFeatureService( - boolean hasHistoricalFeaturesInformation, List specs, Collection nodeVersions, Set clusterStateFeatures ) { - var minNodeVersion = nodeVersions.stream().min(Version::compareTo); + var minNodeVersion = nodeVersions.stream().min(Comparator.naturalOrder()); var featureData = FeatureData.createFromSpecifications(specs); var historicalFeatures = featureData.getHistoricalFeatures(); - var allHistoricalFeatures = historicalFeatures.lastEntry() == null ? Set.of() : historicalFeatures.lastEntry().getValue(); - var errorMessage = Strings.format( - hasHistoricalFeaturesInformation - ? "Check the feature has been added to the correct FeatureSpecification in the relevant module or, if this is a " - + "legacy feature used only in tests, to a test-only FeatureSpecification such as %s." - : "This test is running on the legacy test framework; historical features from production code will not be available. " - + "You need to port the test to the new test plugins in order to use historical features from production code. " - + "If this is a legacy feature used only in tests, you can add it to a test-only FeatureSpecification such as %s.", - RestTestLegacyFeatures.class.getCanonicalName() - ); - this.historicalFeaturesPredicate = minNodeVersion.>map(v -> featureId -> { - assert allHistoricalFeatures.contains(featureId) : Strings.format("Unknown historical feature %s: %s", featureId, errorMessage); - return hasHistoricalFeature(historicalFeatures, v, featureId); - }).orElse(featureId -> { - // We can safely assume that new non-semantic versions (serverless) support all historical features - assert allHistoricalFeatures.contains(featureId) : Strings.format("Unknown historical feature %s: %s", featureId, errorMessage); - return true; - }); + this.historicalFeaturesPredicate = minNodeVersion.>map( + v -> featureId -> hasHistoricalFeature(historicalFeatures, v, featureId) + ).orElse(featureId -> true); // We can safely assume that new non-semantic versions (serverless) support all historical features this.clusterStateFeatures = clusterStateFeatures; } From 6187e905564b8a7b1cc346759bdaa09bfa913f9f Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Fri, 12 Jan 2024 11:41:59 +0100 Subject: [PATCH 05/35] Replace some ActionListener.wrap with more efficient delegateFailureAndWrap in ml module (#103891) There's loads of these spots in the ml codebase. We can save some code, memory and improve readability here by moving to the new delegateFailureAndWrap. --- .../xpack/ml/MachineLearning.java | 169 ++++++++---------- .../TransportDeleteCalendarEventAction.java | 90 +++++----- .../action/TransportDeleteDatafeedAction.java | 22 ++- .../TransportDeleteExpiredDataAction.java | 9 +- .../action/TransportDeleteFilterAction.java | 8 +- .../ml/action/TransportDeleteJobAction.java | 27 ++- .../TransportDeleteTrainedModelAction.java | 23 +-- .../TransportEvaluateDataFrameAction.java | 12 +- ...nsportExplainDataFrameAnalyticsAction.java | 35 ++-- .../TransportFinalizeJobExecutionAction.java | 8 +- .../ml/action/TransportFlushJobAction.java | 17 +- .../ml/action/TransportGetBucketsAction.java | 12 +- .../TransportGetCalendarEventsAction.java | 26 +-- .../TransportGetDataFrameAnalyticsAction.java | 2 +- .../ml/action/TransportGetFiltersAction.java | 2 +- .../action/TransportGetJobsStatsAction.java | 9 +- .../TransportGetMlAutoscalingStats.java | 2 +- .../TransportGetModelSnapshotsAction.java | 2 +- .../TransportGetOverallBucketsAction.java | 40 +++-- ...nsportPreviewDataFrameAnalyticsAction.java | 10 +- .../TransportPreviewDatafeedAction.java | 24 ++- .../TransportPutDataFrameAnalyticsAction.java | 36 ++-- .../TransportPutTrainedModelAction.java | 32 +--- .../extractor/DataFrameDataExtractor.java | 15 +- 24 files changed, 274 insertions(+), 358 deletions(-) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index 61835c4838110..09cb8644dba4f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -1963,7 +1963,7 @@ public void prepareForIndicesMigration(ClusterService clusterService, Client cli originClient.execute( SetUpgradeModeAction.INSTANCE, new SetUpgradeModeAction.Request(true), - ActionListener.wrap(r -> listener.onResponse(Collections.singletonMap("already_in_upgrade_mode", false)), listener::onFailure) + listener.delegateFailureAndWrap((l, r) -> l.onResponse(Collections.singletonMap("already_in_upgrade_mode", false))) ); } @@ -1985,7 +1985,7 @@ public void indicesMigrationComplete( originClient.execute( SetUpgradeModeAction.INSTANCE, new SetUpgradeModeAction.Request(false), - ActionListener.wrap(r -> listener.onResponse(r.isAcknowledged()), listener::onFailure) + listener.delegateFailureAndWrap((l, r) -> l.onResponse(r.isAcknowledged())) ); } @@ -2086,40 +2086,39 @@ public void cleanUpFeature( } ); - ActionListener afterWaitingForTasks = ActionListener.wrap(listTasksResponse -> { - listTasksResponse.rethrowFailures("Waiting for indexing requests for .ml-* indices"); - if (results.values().stream().allMatch(b -> b)) { - if (memoryTracker.get() != null) { - memoryTracker.get() - .awaitAndClear( - ActionListener.wrap( - cacheCleared -> SystemIndexPlugin.super.cleanUpFeature(clusterService, client, unsetResetModeListener), - clearFailed -> { - logger.error( - "failed to clear memory tracker cache via machine learning reset feature API", - clearFailed - ); - SystemIndexPlugin.super.cleanUpFeature(clusterService, client, unsetResetModeListener); - } - ) - ); - return; + // Stop all model deployments + ActionListener pipelineValidation = unsetResetModeListener.delegateFailureAndWrap( + (delegate, listTasksResponse) -> { + listTasksResponse.rethrowFailures("Waiting for indexing requests for .ml-* indices"); + if (results.values().stream().allMatch(b -> b)) { + if (memoryTracker.get() != null) { + memoryTracker.get() + .awaitAndClear( + ActionListener.wrap( + cacheCleared -> SystemIndexPlugin.super.cleanUpFeature(clusterService, client, delegate), + clearFailed -> { + logger.error( + "failed to clear memory tracker cache via machine learning reset feature API", + clearFailed + ); + SystemIndexPlugin.super.cleanUpFeature(clusterService, client, delegate); + } + ) + ); + return; + } + // Call into the original listener to clean up the indices and then clear ml memory cache + SystemIndexPlugin.super.cleanUpFeature(clusterService, client, delegate); + } else { + final List failedComponents = results.entrySet() + .stream() + .filter(result -> result.getValue() == false) + .map(Map.Entry::getKey) + .toList(); + delegate.onFailure(new RuntimeException("Some machine learning components failed to reset: " + failedComponents)); } - // Call into the original listener to clean up the indices and then clear ml memory cache - SystemIndexPlugin.super.cleanUpFeature(clusterService, client, unsetResetModeListener); - } else { - final List failedComponents = results.entrySet() - .stream() - .filter(result -> result.getValue() == false) - .map(Map.Entry::getKey) - .toList(); - unsetResetModeListener.onFailure( - new RuntimeException("Some machine learning components failed to reset: " + failedComponents) - ); } - }, unsetResetModeListener::onFailure); - - ActionListener afterDataframesStopped = ActionListener.wrap(dataFrameStopResponse -> { + ).delegateFailureAndWrap((delegate, dataFrameStopResponse) -> { // Handle the response results.put("data_frame/analytics", dataFrameStopResponse.isStopped()); if (results.values().stream().allMatch(b -> b)) { @@ -2129,7 +2128,7 @@ public void cleanUpFeature( // This waits for all xpack actions including: allocations, anomaly detections, analytics .setActions("xpack/ml/*") .setWaitForCompletion(true) - .execute(ActionListener.wrap(listMlTasks -> { + .execute(delegate.delegateFailureAndWrap((l, listMlTasks) -> { listMlTasks.rethrowFailures("Waiting for machine learning tasks"); client.admin() .cluster() @@ -2138,48 +2137,37 @@ public void cleanUpFeature( .setDetailed(true) .setWaitForCompletion(true) .setDescriptions("*.ml-*") - .execute(afterWaitingForTasks); - }, unsetResetModeListener::onFailure)); + .execute(l); + })); } else { final List failedComponents = results.entrySet() .stream() .filter(result -> result.getValue() == false) .map(Map.Entry::getKey) .toList(); - unsetResetModeListener.onFailure( - new RuntimeException("Some machine learning components failed to reset: " + failedComponents) - ); + delegate.onFailure(new RuntimeException("Some machine learning components failed to reset: " + failedComponents)); } - }, unsetResetModeListener::onFailure); - - ActionListener afterAnomalyDetectionClosed = ActionListener.wrap(closeJobResponse -> { + }).delegateFailureAndWrap((delegate, closeJobResponse) -> { // Handle the response results.put("anomaly_detectors", closeJobResponse.isClosed()); if (machineLearningExtension.get().isDataFrameAnalyticsEnabled() == false) { - afterDataframesStopped.onResponse(new StopDataFrameAnalyticsAction.Response(true)); + delegate.onResponse(new StopDataFrameAnalyticsAction.Response(true)); return; } // Stop data frame analytics StopDataFrameAnalyticsAction.Request stopDataFramesReq = new StopDataFrameAnalyticsAction.Request("_all").setAllowNoMatch(true); - client.execute( - StopDataFrameAnalyticsAction.INSTANCE, - stopDataFramesReq, - ActionListener.wrap(afterDataframesStopped::onResponse, failure -> { - logger.warn( - "failed stopping data frame analytics jobs for machine learning feature reset. Attempting with force=true", - failure - ); - client.execute(StopDataFrameAnalyticsAction.INSTANCE, stopDataFramesReq.setForce(true), afterDataframesStopped); - }) - ); - }, unsetResetModeListener::onFailure); - - // Close anomaly detection jobs - ActionListener afterDataFeedsStopped = ActionListener.wrap(datafeedResponse -> { + client.execute(StopDataFrameAnalyticsAction.INSTANCE, stopDataFramesReq, ActionListener.wrap(delegate::onResponse, failure -> { + logger.warn( + "failed stopping data frame analytics jobs for machine learning feature reset. Attempting with force=true", + failure + ); + client.execute(StopDataFrameAnalyticsAction.INSTANCE, stopDataFramesReq.setForce(true), delegate); + })); + }).delegateFailureAndWrap((delegate, datafeedResponse) -> { // Handle the response results.put("datafeeds", datafeedResponse.isStopped()); if (machineLearningExtension.get().isAnomalyDetectionEnabled() == false) { - afterAnomalyDetectionClosed.onResponse(new CloseJobAction.Response(true)); + delegate.onResponse(new CloseJobAction.Response(true)); return; } CloseJobAction.Request closeJobsRequest = new CloseJobAction.Request().setAllowNoMatch(true).setJobId("_all"); @@ -2187,65 +2175,48 @@ public void cleanUpFeature( client.execute( KillProcessAction.INSTANCE, new KillProcessAction.Request("*"), - ActionListener.wrap( + delegate.delegateFailureAndWrap( // If successful, close and wait for jobs - success -> client.execute( + (l, success) -> client.execute( CloseJobAction.INSTANCE, closeJobsRequest, - ActionListener.wrap(afterAnomalyDetectionClosed::onResponse, failure -> { + ActionListener.wrap(l::onResponse, failure -> { logger.warn( "failed closing anomaly jobs for machine learning feature reset. Attempting with force=true", failure ); - client.execute(CloseJobAction.INSTANCE, closeJobsRequest.setForce(true), afterAnomalyDetectionClosed); + client.execute(CloseJobAction.INSTANCE, closeJobsRequest.setForce(true), l); }) - ), - unsetResetModeListener::onFailure + ) ) ); - }, unsetResetModeListener::onFailure); - - // Stop data feeds - ActionListener cancelSnapshotUpgradesListener = ActionListener.wrap( - cancelUpgradesResponse -> { - if (machineLearningExtension.get().isAnomalyDetectionEnabled() == false) { - afterDataFeedsStopped.onResponse(new StopDatafeedAction.Response(true)); - return; - } - StopDatafeedAction.Request stopDatafeedsReq = new StopDatafeedAction.Request("_all").setAllowNoMatch(true); - client.execute( - StopDatafeedAction.INSTANCE, - stopDatafeedsReq, - ActionListener.wrap(afterDataFeedsStopped::onResponse, failure -> { - logger.warn("failed stopping datafeeds for machine learning feature reset. Attempting with force=true", failure); - client.execute(StopDatafeedAction.INSTANCE, stopDatafeedsReq.setForce(true), afterDataFeedsStopped); - }) - ); - }, - unsetResetModeListener::onFailure - ); - - // Cancel model snapshot upgrades - ActionListener stopDeploymentsListener = ActionListener.wrap(acknowledgedResponse -> { + }).delegateFailureAndWrap((delegate, cancelUpgradesResponse) -> { + if (machineLearningExtension.get().isAnomalyDetectionEnabled() == false) { + delegate.onResponse(new StopDatafeedAction.Response(true)); + return; + } + StopDatafeedAction.Request stopDatafeedsReq = new StopDatafeedAction.Request("_all").setAllowNoMatch(true); + client.execute(StopDatafeedAction.INSTANCE, stopDatafeedsReq, ActionListener.wrap(delegate::onResponse, failure -> { + logger.warn("failed stopping datafeeds for machine learning feature reset. Attempting with force=true", failure); + client.execute(StopDatafeedAction.INSTANCE, stopDatafeedsReq.setForce(true), delegate); + })); + }).delegateFailureAndWrap((delegate, acknowledgedResponse) -> { if (machineLearningExtension.get().isAnomalyDetectionEnabled() == false) { - cancelSnapshotUpgradesListener.onResponse(new CancelJobModelSnapshotUpgradeAction.Response(true)); + delegate.onResponse(new CancelJobModelSnapshotUpgradeAction.Response(true)); return; } CancelJobModelSnapshotUpgradeAction.Request cancelSnapshotUpgradesReq = new CancelJobModelSnapshotUpgradeAction.Request( "_all", "_all" ); - client.execute(CancelJobModelSnapshotUpgradeAction.INSTANCE, cancelSnapshotUpgradesReq, cancelSnapshotUpgradesListener); - }, unsetResetModeListener::onFailure); - - // Stop all model deployments - ActionListener pipelineValidation = ActionListener.wrap(acknowledgedResponse -> { + client.execute(CancelJobModelSnapshotUpgradeAction.INSTANCE, cancelSnapshotUpgradesReq, delegate); + }).delegateFailureAndWrap((delegate, acknowledgedResponse) -> { if (trainedModelAllocationClusterServiceSetOnce.get() == null || machineLearningExtension.get().isNlpEnabled() == false) { - stopDeploymentsListener.onResponse(AcknowledgedResponse.TRUE); + delegate.onResponse(AcknowledgedResponse.TRUE); return; } - trainedModelAllocationClusterServiceSetOnce.get().removeAllModelAssignments(stopDeploymentsListener); - }, unsetResetModeListener::onFailure); + trainedModelAllocationClusterServiceSetOnce.get().removeAllModelAssignments(delegate); + }); // validate no pipelines are using machine learning models ActionListener afterResetModeSet = ActionListener.wrap(acknowledgedResponse -> { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java index 60993e12a2088..e4c73106852ef 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java @@ -65,50 +65,54 @@ public TransportDeleteCalendarEventAction( protected void doExecute(Task task, DeleteCalendarEventAction.Request request, ActionListener listener) { final String eventId = request.getEventId(); - ActionListener calendarListener = ActionListener.wrap(calendar -> { - GetRequest getRequest = new GetRequest(MlMetaIndex.indexName(), eventId); - executeAsyncWithOrigin(client, ML_ORIGIN, TransportGetAction.TYPE, getRequest, ActionListener.wrap(getResponse -> { - if (getResponse.isExists() == false) { - listener.onFailure(new ResourceNotFoundException("No event with id [" + eventId + "]")); - return; - } - - Map source = getResponse.getSourceAsMap(); - String calendarId = (String) source.get(Calendar.ID.getPreferredName()); - if (calendarId == null) { - listener.onFailure( - ExceptionsHelper.badRequestException( - "Event [" + eventId + "] does not have a valid " + Calendar.ID.getPreferredName() - ) - ); - return; - } - - if (calendarId.equals(request.getCalendarId()) == false) { - listener.onFailure( - ExceptionsHelper.badRequestException( - "Event [" - + eventId - + "] has " - + Calendar.ID.getPreferredName() - + " [" - + calendarId - + "] which does not match the request " - + Calendar.ID.getPreferredName() - + " [" - + request.getCalendarId() - + "]" - ) - ); - return; - } - - deleteEvent(eventId, calendar, listener); - }, listener::onFailure)); - }, listener::onFailure); - // Get the calendar first so we check the calendar exists before checking the event exists - jobResultsProvider.calendar(request.getCalendarId(), calendarListener); + jobResultsProvider.calendar(request.getCalendarId(), listener.delegateFailureAndWrap((l, calendar) -> { + GetRequest getRequest = new GetRequest(MlMetaIndex.indexName(), eventId); + executeAsyncWithOrigin( + client, + ML_ORIGIN, + TransportGetAction.TYPE, + getRequest, + l.delegateFailureAndWrap((delegate, getResponse) -> { + if (getResponse.isExists() == false) { + delegate.onFailure(new ResourceNotFoundException("No event with id [" + eventId + "]")); + return; + } + + Map source = getResponse.getSourceAsMap(); + String calendarId = (String) source.get(Calendar.ID.getPreferredName()); + if (calendarId == null) { + delegate.onFailure( + ExceptionsHelper.badRequestException( + "Event [" + eventId + "] does not have a valid " + Calendar.ID.getPreferredName() + ) + ); + return; + } + + if (calendarId.equals(request.getCalendarId()) == false) { + delegate.onFailure( + ExceptionsHelper.badRequestException( + "Event [" + + eventId + + "] has " + + Calendar.ID.getPreferredName() + + " [" + + calendarId + + "] which does not match the request " + + Calendar.ID.getPreferredName() + + " [" + + request.getCalendarId() + + "]" + ) + ); + return; + } + + deleteEvent(eventId, calendar, delegate); + }) + ); + })); } private void deleteEvent(String eventId, Calendar calendar, ActionListener listener) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteDatafeedAction.java index 64ad51fc0f722..49c6021a6ed8b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteDatafeedAction.java @@ -84,19 +84,17 @@ private void forceDeleteDatafeed( ClusterState state, ActionListener listener ) { - ActionListener finalListener = ActionListener.wrap( - // use clusterService.state() here so that the updated state without the task is available - response -> datafeedManager.deleteDatafeed(request, clusterService.state(), listener), - listener::onFailure - ); - - ActionListener isolateDatafeedHandler = ActionListener.wrap( - response -> removeDatafeedTask(request, state, finalListener), - listener::onFailure - ); - IsolateDatafeedAction.Request isolateDatafeedRequest = new IsolateDatafeedAction.Request(request.getDatafeedId()); - executeAsyncWithOrigin(client, ML_ORIGIN, IsolateDatafeedAction.INSTANCE, isolateDatafeedRequest, isolateDatafeedHandler); + executeAsyncWithOrigin( + client, + ML_ORIGIN, + IsolateDatafeedAction.INSTANCE, + isolateDatafeedRequest, + listener.delegateFailureAndWrap( + // use clusterService.state() here so that the updated state without the task is available + (l, response) -> datafeedManager.deleteDatafeed(request, clusterService.state(), l) + ).delegateFailureAndWrap((l, response) -> removeDatafeedTask(request, state, l)) + ); } private void removeDatafeedTask(DeleteDatafeedAction.Request request, ClusterState state, ActionListener listener) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataAction.java index b28d37022e171..ad85f22873cce 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataAction.java @@ -146,16 +146,15 @@ protected void doExecute( false, true, null, - ActionListener.wrap( - jobBuilders -> threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME) - .execute(ActionRunnable.wrap(listener, l -> { + listener.delegateFailureAndWrap( + (delegate, jobBuilders) -> threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME) + .execute(ActionRunnable.wrap(delegate, l -> { List jobs = jobBuilders.stream().map(Job.Builder::build).collect(Collectors.toList()); String[] jobIds = jobs.stream().map(Job::getId).toArray(String[]::new); request.setExpandedJobIds(jobIds); List dataRemovers = createDataRemovers(jobs, taskId, anomalyDetectionAuditor); deleteExpiredData(request, dataRemovers, l, isTimedOutSupplier); - })), - listener::onFailure + })) ) ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteFilterAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteFilterAction.java index 45bbd6256c205..ceae2a680feb0 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteFilterAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteFilterAction.java @@ -63,16 +63,16 @@ public TransportDeleteFilterAction( @Override protected void doExecute(Task task, DeleteFilterAction.Request request, ActionListener listener) { final String filterId = request.getFilterId(); - jobConfigProvider.findJobsWithCustomRules(ActionListener.wrap(jobs -> { + jobConfigProvider.findJobsWithCustomRules(listener.delegateFailureAndWrap((delegate, jobs) -> { List currentlyUsedBy = findJobsUsingFilter(jobs, filterId); if (currentlyUsedBy.isEmpty() == false) { - listener.onFailure( + delegate.onFailure( ExceptionsHelper.conflictStatusException(Messages.getMessage(Messages.FILTER_CANNOT_DELETE, filterId, currentlyUsedBy)) ); } else { - deleteFilter(filterId, listener); + deleteFilter(filterId, delegate); } - }, listener::onFailure)); + })); } private static List findJobsUsingFilter(List jobs, String filterId) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java index f3b0fcd669637..f694e85144b48 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java @@ -167,28 +167,23 @@ protected void masterOperation( } ); - ActionListener markAsDeletingListener = finalListener.delegateFailureAndWrap((delegate, response) -> { - if (request.isForce()) { - forceDeleteJob(parentTaskClient, request, state, delegate); - } else { - normalDeleteJob(parentTaskClient, request, state, delegate); + ActionListener datafeedDeleteListener = finalListener.delegateFailureAndWrap( + (delegate, response) -> { + if (request.isForce()) { + forceDeleteJob(parentTaskClient, request, state, delegate); + } else { + normalDeleteJob(parentTaskClient, request, state, delegate); + } } - }); - - ActionListener datafeedDeleteListener = ActionListener.wrap(response -> { + ).delegateFailureAndWrap((delegate, response) -> { auditor.info(request.getJobId(), Messages.getMessage(Messages.JOB_AUDIT_DELETING, taskId)); cancelResetTaskIfExists( request.getJobId(), - ActionListener.wrap( - r -> jobConfigProvider.updateJobBlockReason( - request.getJobId(), - new Blocked(Blocked.Reason.DELETE, taskId), - markAsDeletingListener - ), - finalListener::onFailure + delegate.delegateFailureAndWrap( + (l, r) -> jobConfigProvider.updateJobBlockReason(request.getJobId(), new Blocked(Blocked.Reason.DELETE, taskId), l) ) ); - }, finalListener::onFailure); + }); ActionListener jobExistsListener = ActionListener.wrap( response -> deleteDatafeedIfNecessary(request, datafeedDeleteListener), diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelAction.java index d19871d0e1b2f..7f2d0e47975e3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelAction.java @@ -110,14 +110,8 @@ protected void masterOperation( ) { logger.debug(() -> format("[%s] Request to delete trained model%s", request.getId(), request.isForce() ? " (force)" : "")); - ActionListener performDeletion = ActionListener.wrap( - ignored -> deleteModel(request, state, listener), - listener::onFailure - ); - String id = request.getId(); - - cancelDownloadTask(client, id, performDeletion, request.timeout()); + cancelDownloadTask(client, id, listener.delegateFailureAndWrap((l, ignored) -> deleteModel(request, state, l)), request.timeout()); } // package-private for testing @@ -218,10 +212,7 @@ private void deleteModel(DeleteTrainedModelAction.Request request, ClusterState if (request.isForce()) { forceStopDeployment( request.getId(), - ActionListener.wrap( - stopDeploymentResponse -> deleteAliasesAndModel(request, modelAliases, listener), - listener::onFailure - ) + listener.delegateFailureAndWrap((l, stopDeploymentResponse) -> deleteAliasesAndModel(request, modelAliases, l)) ); } else { listener.onFailure( @@ -250,13 +241,11 @@ private void deleteAliasesAndModel( ) { logger.debug(() -> "[" + request.getId() + "] Deleting model"); - ActionListener nameDeletionListener = ActionListener.wrap( - ack -> trainedModelProvider.deleteTrainedModel(request.getId(), ActionListener.wrap(r -> { + ActionListener nameDeletionListener = listener.delegateFailureAndWrap( + (delegate, ack) -> trainedModelProvider.deleteTrainedModel(request.getId(), delegate.delegateFailureAndWrap((l, r) -> { auditor.info(request.getId(), "trained model deleted"); - listener.onResponse(AcknowledgedResponse.TRUE); - }, listener::onFailure)), - - listener::onFailure + l.onResponse(AcknowledgedResponse.TRUE); + })) ); // No reason to update cluster state, simply delete the model diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportEvaluateDataFrameAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportEvaluateDataFrameAction.java index 3865858f527b4..61db7f683f0f3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportEvaluateDataFrameAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportEvaluateDataFrameAction.java @@ -82,13 +82,11 @@ protected void doExecute( ActionListener listener ) { TaskId parentTaskId = new TaskId(clusterService.localNode().getId(), task.getId()); - ActionListener> resultsListener = ActionListener.wrap(unused -> { - EvaluateDataFrameAction.Response response = new EvaluateDataFrameAction.Response( - request.getEvaluation().getName(), - request.getEvaluation().getResults() - ); - listener.onResponse(response); - }, listener::onFailure); + ActionListener> resultsListener = listener.delegateFailureAndWrap( + (delegate, unused) -> delegate.onResponse( + new EvaluateDataFrameAction.Response(request.getEvaluation().getName(), request.getEvaluation().getResults()) + ) + ); // Create an immutable collection of parameters to be used by evaluation metrics. EvaluationParameters parameters = new EvaluationParameters(maxBuckets.get()); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportExplainDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportExplainDataFrameAnalyticsAction.java index d19b67b52afe1..b1f5eda679006 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportExplainDataFrameAnalyticsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportExplainDataFrameAnalyticsAction.java @@ -147,9 +147,8 @@ private void explain( ).build(); extractedFieldsDetectorFactory.createFromSource( config, - ActionListener.wrap( - extractedFieldsDetector -> explain(parentTaskId, config, extractedFieldsDetector, listener), - listener::onFailure + listener.delegateFailureAndWrap( + (l, extractedFieldsDetector) -> explain(parentTaskId, config, extractedFieldsDetector, l) ) ); }); @@ -160,14 +159,8 @@ private void explain( ); extractedFieldsDetectorFactory.createFromSource( request.getConfig(), - ActionListener.wrap( - extractedFieldsDetector -> explain( - parentTaskId, - request.getConfig(), - extractedFieldsDetector, - responseHeaderPreservingListener - ), - responseHeaderPreservingListener::onFailure + responseHeaderPreservingListener.delegateFailureAndWrap( + (l, extractedFieldsDetector) -> explain(parentTaskId, request.getConfig(), extractedFieldsDetector, l) ) ); } @@ -189,13 +182,14 @@ private void explain( ); return; } - - ActionListener memoryEstimationListener = ActionListener.wrap( - memoryEstimation -> listener.onResponse(new ExplainDataFrameAnalyticsAction.Response(fieldExtraction.v2(), memoryEstimation)), - listener::onFailure + estimateMemoryUsage( + parentTaskId, + config, + fieldExtraction.v1(), + listener.delegateFailureAndWrap( + (l, memoryEstimation) -> l.onResponse(new ExplainDataFrameAnalyticsAction.Response(fieldExtraction.v2(), memoryEstimation)) + ) ); - - estimateMemoryUsage(parentTaskId, config, fieldExtraction.v1(), memoryEstimationListener); } /** @@ -220,11 +214,8 @@ private void estimateMemoryUsage( estimateMemoryTaskId, config, extractorFactory, - ActionListener.wrap( - result -> listener.onResponse( - new MemoryEstimation(result.getExpectedMemoryWithoutDisk(), result.getExpectedMemoryWithDisk()) - ), - listener::onFailure + listener.delegateFailureAndWrap( + (l, result) -> l.onResponse(new MemoryEstimation(result.getExpectedMemoryWithoutDisk(), result.getExpectedMemoryWithDisk())) ) ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFinalizeJobExecutionAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFinalizeJobExecutionAction.java index 6d183501d2043..5aed29fd6d152 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFinalizeJobExecutionAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFinalizeJobExecutionAction.java @@ -96,15 +96,15 @@ protected void masterOperation( ML_ORIGIN, TransportUpdateAction.TYPE, updateRequest, - ActionListener.wrap(updateResponse -> chainedListener.onResponse(null), chainedListener::onFailure) + chainedListener.delegateFailureAndWrap((l, updateResponse) -> l.onResponse(null)) ); }); } - voidChainTaskExecutor.execute(ActionListener.wrap(aVoids -> { + voidChainTaskExecutor.execute(listener.delegateFailureAndWrap((l, aVoids) -> { logger.debug("finalized job [{}]", jobIdString); - listener.onResponse(AcknowledgedResponse.TRUE); - }, listener::onFailure)); + l.onResponse(AcknowledgedResponse.TRUE); + })); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFlushJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFlushJobAction.java index a5fe3ad67ca06..17f1459984736 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFlushJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFlushJobAction.java @@ -66,10 +66,17 @@ protected void taskOperation( timeRangeBuilder.endTime(request.getEnd()); } paramsBuilder.forTimeRange(timeRangeBuilder.build()); - processManager.flushJob(task, paramsBuilder.build(), ActionListener.wrap(flushAcknowledgement -> { - listener.onResponse( - new FlushJobAction.Response(true, flushAcknowledgement == null ? null : flushAcknowledgement.getLastFinalizedBucketEnd()) - ); - }, listener::onFailure)); + processManager.flushJob( + task, + paramsBuilder.build(), + listener.delegateFailureAndWrap( + (l, flushAcknowledgement) -> l.onResponse( + new FlushJobAction.Response( + true, + flushAcknowledgement == null ? null : flushAcknowledgement.getLastFinalizedBucketEnd() + ) + ) + ) + ); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetBucketsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetBucketsAction.java index e42c2b5d87f9e..58de04146aa52 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetBucketsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetBucketsAction.java @@ -41,7 +41,7 @@ public TransportGetBucketsAction( @Override protected void doExecute(Task task, GetBucketsAction.Request request, ActionListener listener) { - jobManager.jobExists(request.getJobId(), null, ActionListener.wrap(ok -> { + jobManager.jobExists(request.getJobId(), null, listener.delegateFailureAndWrap((delegate, ok) -> { BucketsQueryBuilder query = new BucketsQueryBuilder().expand(request.isExpand()) .includeInterim(request.isExcludeInterim() == false) .start(request.getStart()) @@ -62,14 +62,10 @@ protected void doExecute(Task task, GetBucketsAction.Request request, ActionList jobResultsProvider.buckets( request.getJobId(), query, - q -> listener.onResponse(new GetBucketsAction.Response(q)), - listener::onFailure, + q -> delegate.onResponse(new GetBucketsAction.Response(q)), + delegate::onFailure, client ); - - }, - listener::onFailure - - )); + })); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java index 3e35429d352c2..89527d2cd12d8 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java @@ -58,16 +58,15 @@ protected void doExecute( ActionListener listener ) { final String[] calendarId = Strings.splitStringByCommaToArray(request.getCalendarId()); - ActionListener calendarExistsListener = ActionListener.wrap(r -> { + checkCalendarExists(calendarId, listener.delegateFailureAndWrap((outerDelegate, r) -> { ScheduledEventsQueryBuilder query = new ScheduledEventsQueryBuilder().start(request.getStart()) .end(request.getEnd()) .from(request.getPageParams().getFrom()) .size(request.getPageParams().getSize()) .calendarIds(calendarId); - ActionListener> eventsListener = ActionListener.wrap( - events -> listener.onResponse(new GetCalendarEventsAction.Response(events)), - listener::onFailure + ActionListener> eventsListener = outerDelegate.delegateFailureAndWrap( + (l, events) -> l.onResponse(new GetCalendarEventsAction.Response(events)) ); if (request.getJobId() != null) { @@ -78,25 +77,18 @@ protected void doExecute( }, jobNotFound -> { // is the request Id a group? - jobConfigProvider.groupExists(request.getJobId(), ActionListener.wrap(groupExists -> { + jobConfigProvider.groupExists(request.getJobId(), eventsListener.delegateFailureAndWrap((delegate, groupExists) -> { if (groupExists) { - jobResultsProvider.scheduledEventsForJob( - null, - Collections.singletonList(request.getJobId()), - query, - eventsListener - ); + jobResultsProvider.scheduledEventsForJob(null, Collections.singletonList(request.getJobId()), query, delegate); } else { - listener.onFailure(ExceptionsHelper.missingJobException(request.getJobId())); + delegate.onFailure(ExceptionsHelper.missingJobException(request.getJobId())); } - }, listener::onFailure)); + })); })); } else { jobResultsProvider.scheduledEvents(query, eventsListener); } - }, listener::onFailure); - - checkCalendarExists(calendarId, calendarExistsListener); + })); } private void checkCalendarExists(String[] calendarId, ActionListener listener) { @@ -107,7 +99,7 @@ private void checkCalendarExists(String[] calendarId, ActionListener li jobResultsProvider.calendars( CalendarQueryBuilder.builder().calendarIdTokens(calendarId), - ActionListener.wrap(c -> listener.onResponse(true), listener::onFailure) + listener.delegateFailureAndWrap((l, c) -> l.onResponse(true)) ); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDataFrameAnalyticsAction.java index bec0b86e77edb..eecc5999f842b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDataFrameAnalyticsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDataFrameAnalyticsAction.java @@ -83,7 +83,7 @@ protected void doExecute( searchResources( request, new TaskId(clusterService.localNode().getId(), task.getId()), - ActionListener.wrap(queryPage -> listener.onResponse(new GetDataFrameAnalyticsAction.Response(queryPage)), listener::onFailure) + listener.delegateFailureAndWrap((l, queryPage) -> l.onResponse(new GetDataFrameAnalyticsAction.Response(queryPage))) ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java index 4d307546fda95..0ca5c706e5b8c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java @@ -54,7 +54,7 @@ protected void doExecute(Task task, GetFiltersAction.Request request, ActionList searchResources( request, new TaskId(clusterService.localNode().getId(), task.getId()), - ActionListener.wrap(filters -> listener.onResponse(new GetFiltersAction.Response(filters)), listener::onFailure) + listener.delegateFailureAndWrap((l, filters) -> l.onResponse(new GetFiltersAction.Response(filters))) ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java index 666e6bf478429..4ae6512fcaff4 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java @@ -108,14 +108,13 @@ protected void doExecute(Task task, GetJobsStatsAction.Request request, ActionLi tasks, true, parentTaskId, - ActionListener.wrap(expandedIds -> { + finalListener.delegateFailureAndWrap((delegate, expandedIds) -> { request.setExpandedJobsIds(new ArrayList<>(expandedIds)); - ActionListener jobStatsListener = ActionListener.wrap( - response -> gatherStatsForClosedJobs(request, response, parentTaskId, finalListener), - finalListener::onFailure + ActionListener jobStatsListener = delegate.delegateFailureAndWrap( + (l, response) -> gatherStatsForClosedJobs(request, response, parentTaskId, l) ); super.doExecute(task, request, jobStatsListener); - }, finalListener::onFailure) + }) ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetMlAutoscalingStats.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetMlAutoscalingStats.java index 78ef2f815c0fb..ab5949412927c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetMlAutoscalingStats.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetMlAutoscalingStats.java @@ -78,7 +78,7 @@ protected void masterOperation(Task task, Request request, ClusterState state, A clusterService.getClusterSettings(), mlMemoryTracker, settings, - ActionListener.wrap(autoscalingResources -> listener.onResponse(new Response(autoscalingResources)), listener::onFailure) + listener.delegateFailureAndWrap((l, autoscalingResources) -> l.onResponse(new Response(autoscalingResources))) ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetModelSnapshotsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetModelSnapshotsAction.java index a5cc23544fbc9..67838fcfa26df 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetModelSnapshotsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetModelSnapshotsAction.java @@ -82,7 +82,7 @@ protected void doExecute( jobManager.jobExists( request.getJobId(), parentTaskId, - ActionListener.wrap(ok -> getModelSnapshots(request, parentTaskId, listener), listener::onFailure) + listener.delegateFailureAndWrap((l, ok) -> getModelSnapshots(request, parentTaskId, l)) ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java index d9dfd0fb23eeb..38c7f85b189f2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java @@ -120,22 +120,6 @@ private void getOverallBuckets( ) { JobsContext jobsContext = JobsContext.build(jobs, request); - ActionListener> overallBucketsListener = ActionListener.wrap(overallBuckets -> { - listener.onResponse( - new GetOverallBucketsAction.Response(new QueryPage<>(overallBuckets, overallBuckets.size(), OverallBucket.RESULTS_FIELD)) - ); - }, listener::onFailure); - - ActionListener chunkedBucketSearcherListener = ActionListener.wrap(searcher -> { - if (searcher == null) { - listener.onResponse( - new GetOverallBucketsAction.Response(new QueryPage<>(Collections.emptyList(), 0, OverallBucket.RESULTS_FIELD)) - ); - return; - } - searcher.searchAndComputeOverallBuckets(overallBucketsListener); - }, listener::onFailure); - OverallBucketsProvider overallBucketsProvider = new OverallBucketsProvider( jobsContext.maxBucketSpan, request.getTopN(), @@ -144,7 +128,29 @@ private void getOverallBuckets( OverallBucketsProcessor overallBucketsProcessor = requiresAggregation(request, jobsContext.maxBucketSpan) ? new OverallBucketsAggregator(request.getBucketSpan()) : new OverallBucketsCollector(); - initChunkedBucketSearcher(request, jobsContext, overallBucketsProvider, overallBucketsProcessor, chunkedBucketSearcherListener); + initChunkedBucketSearcher( + request, + jobsContext, + overallBucketsProvider, + overallBucketsProcessor, + listener.delegateFailureAndWrap((l, searcher) -> { + if (searcher == null) { + l.onResponse( + new GetOverallBucketsAction.Response(new QueryPage<>(Collections.emptyList(), 0, OverallBucket.RESULTS_FIELD)) + ); + return; + } + searcher.searchAndComputeOverallBuckets( + l.delegateFailureAndWrap( + (ll, overallBuckets) -> ll.onResponse( + new GetOverallBucketsAction.Response( + new QueryPage<>(overallBuckets, overallBuckets.size(), OverallBucket.RESULTS_FIELD) + ) + ) + ) + ); + }) + ); } private static boolean requiresAggregation(GetOverallBucketsAction.Request request, TimeValue maxBucketSpan) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDataFrameAnalyticsAction.java index 9bf18671e7c11..8fe7c3686dcb9 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDataFrameAnalyticsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDataFrameAnalyticsAction.java @@ -112,18 +112,18 @@ void preview(Task task, DataFrameAnalyticsConfig config, ActionListener { + extractedFieldsDetectorFactory.createFromSource(config, listener.delegateFailureAndWrap((delegate, extractedFieldsDetector) -> { DataFrameDataExtractor extractor = DataFrameDataExtractorFactory.createForSourceIndices( client, parentTaskId.toString(), config, extractedFieldsDetector.detect().v1() ).newExtractor(false); - extractor.preview(ActionListener.wrap(rows -> { + extractor.preview(delegate.delegateFailureAndWrap((l, rows) -> { List fieldNames = extractor.getFieldNames(); - listener.onResponse(new Response(rows.stream().map((r) -> mergeRow(r, fieldNames)).collect(Collectors.toList()))); - }, listener::onFailure)); - }, listener::onFailure)); + l.onResponse(new Response(rows.stream().map((r) -> mergeRow(r, fieldNames)).collect(Collectors.toList()))); + })); + })); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDatafeedAction.java index 5ceb34bfc0510..d567a823c62cd 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDatafeedAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.action; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.fieldcaps.FieldCapabilities; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest; import org.elasticsearch.action.fieldcaps.TransportFieldCapabilitiesAction; import org.elasticsearch.action.support.ActionFilters; @@ -49,7 +48,6 @@ import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; import java.util.Date; -import java.util.Map; import java.util.Optional; import java.util.stream.Collectors; @@ -101,27 +99,26 @@ public TransportPreviewDatafeedAction( @Override protected void doExecute(Task task, PreviewDatafeedAction.Request request, ActionListener listener) { TaskId parentTaskId = new TaskId(clusterService.localNode().getId(), task.getId()); - ActionListener datafeedConfigActionListener = ActionListener.wrap(datafeedConfig -> { + ActionListener datafeedConfigActionListener = listener.delegateFailureAndWrap((delegate, datafeedConfig) -> { if (request.getJobConfig() != null) { - previewDatafeed(parentTaskId, datafeedConfig, request.getJobConfig().build(new Date()), request, listener); + previewDatafeed(parentTaskId, datafeedConfig, request.getJobConfig().build(new Date()), request, delegate); return; } jobConfigProvider.getJob( datafeedConfig.getJobId(), parentTaskId, - ActionListener.wrap( - jobBuilder -> previewDatafeed(parentTaskId, datafeedConfig, jobBuilder.build(), request, listener), - listener::onFailure + delegate.delegateFailureAndWrap( + (l, jobBuilder) -> previewDatafeed(parentTaskId, datafeedConfig, jobBuilder.build(), request, l) ) ); - }, listener::onFailure); + }); if (request.getDatafeedConfig() != null) { datafeedConfigActionListener.onResponse(request.getDatafeedConfig()); } else { datafeedConfigProvider.getDatafeedConfig( request.getDatafeedId(), parentTaskId, - ActionListener.wrap(builder -> datafeedConfigActionListener.onResponse(builder.build()), listener::onFailure) + datafeedConfigActionListener.delegateFailureAndWrap((l, builder) -> l.onResponse(builder.build())) ); } } @@ -209,10 +206,11 @@ private void isDateNanos(DatafeedConfig datafeed, String timeField, ActionListen client, TransportFieldCapabilitiesAction.TYPE, fieldCapabilitiesRequest, - ActionListener.wrap(fieldCapsResponse -> { - Map timeFieldCaps = fieldCapsResponse.getField(timeField); - listener.onResponse(timeFieldCaps.containsKey(DateFieldMapper.DATE_NANOS_CONTENT_TYPE)); - }, listener::onFailure) + listener.delegateFailureAndWrap( + (l, fieldCapsResponse) -> l.onResponse( + fieldCapsResponse.getField(timeField).containsKey(DateFieldMapper.DATE_NANOS_CONTENT_TYPE) + ) + ) ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDataFrameAnalyticsAction.java index d73b942e766cf..77bcc9dbcf7d8 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDataFrameAnalyticsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDataFrameAnalyticsAction.java @@ -136,18 +136,13 @@ protected void masterOperation( final DataFrameAnalyticsConfig config = request.getConfig(); - ActionListener sourceDestValidationListener = ActionListener.wrap( - aBoolean -> putValidatedConfig(config, request.masterNodeTimeout(), listener), - listener::onFailure - ); - sourceDestValidator.validate( clusterService.state(), config.getSource().getIndex(), config.getDest().getIndex(), null, SourceDestValidations.ALL_VALIDATIONS, - sourceDestValidationListener + listener.delegateFailureAndWrap((l, aBoolean) -> putValidatedConfig(config, request.masterNodeTimeout(), l)) ); } @@ -191,22 +186,20 @@ private void putValidatedConfig( } privRequest.indexPrivileges(indicesPrivileges); - ActionListener privResponseListener = ActionListener.wrap( - r -> handlePrivsResponse(username, preparedForPutConfig, r, masterNodeTimeout, listener), - listener::onFailure + client.execute( + HasPrivilegesAction.INSTANCE, + privRequest, + listener.delegateFailureAndWrap( + (l, r) -> handlePrivsResponse(username, preparedForPutConfig, r, masterNodeTimeout, listener) + ) ); - - client.execute(HasPrivilegesAction.INSTANCE, privRequest, privResponseListener); }); } else { updateDocMappingAndPutConfig( preparedForPutConfig, threadPool.getThreadContext().getHeaders(), masterNodeTimeout, - ActionListener.wrap( - finalConfig -> listener.onResponse(new PutDataFrameAnalyticsAction.Response(finalConfig)), - listener::onFailure - ) + listener.delegateFailureAndWrap((l, finalConfig) -> l.onResponse(new PutDataFrameAnalyticsAction.Response(finalConfig))) ); } } @@ -223,10 +216,7 @@ private void handlePrivsResponse( memoryCappedConfig, threadPool.getThreadContext().getHeaders(), masterNodeTimeout, - ActionListener.wrap( - finalConfig -> listener.onResponse(new PutDataFrameAnalyticsAction.Response(finalConfig)), - listener::onFailure - ) + listener.delegateFailureAndWrap((l, finalConfig) -> l.onResponse(new PutDataFrameAnalyticsAction.Response(finalConfig))) ); } else { XContentBuilder builder = JsonXContent.contentBuilder(); @@ -254,13 +244,13 @@ private void updateDocMappingAndPutConfig( TimeValue masterNodeTimeout, ActionListener listener ) { - ActionListener auditingListener = ActionListener.wrap(finalConfig -> { + ActionListener auditingListener = listener.delegateFailureAndWrap((delegate, finalConfig) -> { auditor.info( finalConfig.getId(), Messages.getMessage(Messages.DATA_FRAME_ANALYTICS_AUDIT_CREATED, finalConfig.getAnalysis().getWriteableName()) ); - listener.onResponse(finalConfig); - }, listener::onFailure); + delegate.onResponse(finalConfig); + }); ClusterState clusterState = clusterService.state(); if (clusterState == null) { @@ -274,7 +264,7 @@ private void updateDocMappingAndPutConfig( client, clusterState, masterNodeTimeout, - ActionListener.wrap(unused -> configProvider.put(config, headers, masterNodeTimeout, auditingListener), listener::onFailure), + auditingListener.delegateFailureAndWrap((l, unused) -> configProvider.put(config, headers, masterNodeTimeout, l)), MlConfigIndex.CONFIG_INDEX_MAPPINGS_VERSION ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java index d6e52b6de1fd4..c89b5005444b5 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java @@ -237,38 +237,24 @@ protected void masterOperation( return; } - ActionListener finalResponseAction = ActionListener.wrap( - (configToReturn) -> finalResponseListener.onResponse(new Response(configToReturn)), - finalResponseListener::onFailure - ); - - ActionListener verifyClusterAndModelArchitectures = ActionListener.wrap( - (configToReturn) -> verifyMlNodesAndModelArchitectures(configToReturn, client, threadPool, finalResponseAction), - finalResponseListener::onFailure - ); - - ActionListener finishedStoringListener = ActionListener.wrap(bool -> { + var isPackageModel = config.isPackagedModel(); + ActionListener checkStorageIndexSizeListener = finalResponseListener.delegateFailureAndWrap((delegate, bool) -> { TrainedModelConfig configToReturn = trainedModelConfig.clearDefinition().build(); if (modelPackageConfigHolder.get() != null) { triggerModelFetchIfNecessary( configToReturn.getModelId(), modelPackageConfigHolder.get(), request.isWaitForCompletion(), - ActionListener.wrap( - downloadTriggered -> verifyClusterAndModelArchitectures.onResponse(configToReturn), - finalResponseListener::onFailure - ) + delegate.delegateFailureAndWrap((l, cfg) -> l.onResponse(new Response(cfg))) + .delegateFailureAndWrap( + (l, cfg) -> verifyMlNodesAndModelArchitectures(cfg, client, threadPool, l) + ) + .delegateFailureAndWrap((l, downloadTriggered) -> l.onResponse(configToReturn)) ); } else { - finalResponseListener.onResponse(new PutTrainedModelAction.Response(configToReturn)); + delegate.onResponse(new PutTrainedModelAction.Response(configToReturn)); } - }, finalResponseListener::onFailure); - - var isPackageModel = config.isPackagedModel(); - ActionListener checkStorageIndexSizeListener = ActionListener.wrap( - r -> trainedModelProvider.storeTrainedModel(trainedModelConfig.build(), finishedStoringListener, isPackageModel), - finalResponseListener::onFailure - ); + }).delegateFailureAndWrap((l, r) -> trainedModelProvider.storeTrainedModel(trainedModelConfig.build(), l, isPackageModel)); ActionListener tagsModelIdCheckListener = ActionListener.wrap(r -> { if (TrainedModelType.PYTORCH.equals(trainedModelConfig.getModelType())) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java index 6c3fb28fe2c83..4119b23747fcb 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java @@ -148,9 +148,9 @@ public void preview(ActionListener> listener) { client, TransportSearchAction.TYPE, searchRequestBuilder.request(), - ActionListener.wrap(searchResponse -> { + listener.delegateFailureAndWrap((delegate, searchResponse) -> { if (searchResponse.getHits().getHits().length == 0) { - listener.onResponse(Collections.emptyList()); + delegate.onResponse(Collections.emptyList()); return; } @@ -160,8 +160,8 @@ public void preview(ActionListener> listener) { String[] extractedValues = extractValues(hit); rows.add(extractedValues == null ? new Row(null, hit, true) : new Row(extractedValues, hit, false)); } - listener.onResponse(rows); - }, listener::onFailure) + delegate.onResponse(rows); + }) ); } @@ -393,11 +393,8 @@ public void collectDataSummaryAsync(ActionListener dataSummaryActio client, TransportSearchAction.TYPE, searchRequestBuilder.request(), - ActionListener.wrap( - searchResponse -> dataSummaryActionListener.onResponse( - new DataSummary(searchResponse.getHits().getTotalHits().value, numberOfFields) - ), - dataSummaryActionListener::onFailure + dataSummaryActionListener.delegateFailureAndWrap( + (l, searchResponse) -> l.onResponse(new DataSummary(searchResponse.getHits().getTotalHits().value, numberOfFields)) ) ); } From 01751c0298de44ce13f96a7ad8763c9e2c7f0873 Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Fri, 12 Jan 2024 12:13:34 +0100 Subject: [PATCH 06/35] [Docs] Add connectors links, cleanup connectors API docs (#104262) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * [Docs] Add connectors links * 🧹 Cleanup abbreviations, add missing tech preview labels * Unify remaining tech preview, abbreviations, update CLI verbiage * Unify remaining tech preview, abbreviations --- .../apis/cancel-connector-sync-job-api.asciidoc | 2 ++ .../connector/apis/check-in-connector-api.asciidoc | 5 ++--- .../apis/check-in-connector-sync-job-api.asciidoc | 2 ++ .../connector/apis/connector-apis.asciidoc | 13 ++++++------- .../connector/apis/create-connector-api.asciidoc | 2 ++ .../apis/create-connector-sync-job-api.asciidoc | 3 +++ .../connector/apis/delete-connector-api.asciidoc | 5 ++--- .../apis/delete-connector-sync-job-api.asciidoc | 5 ++--- .../connector/apis/get-connector-api.asciidoc | 3 ++- .../apis/get-connector-sync-job-api.asciidoc | 3 ++- .../apis/list-connector-sync-jobs-api.asciidoc | 5 ++--- .../connector/apis/list-connectors-api.asciidoc | 5 ++--- .../apis/set-connector-sync-job-error-api.asciidoc | 2 ++ .../apis/set-connector-sync-job-stats-api.asciidoc | 2 ++ .../update-connector-configuration-api.asciidoc | 5 ++--- .../apis/update-connector-error-api.asciidoc | 5 ++--- .../apis/update-connector-filtering-api.asciidoc | 6 +++--- .../apis/update-connector-last-sync-api.asciidoc | 5 ++--- .../update-connector-name-description-api.asciidoc | 6 +++--- .../apis/update-connector-pipeline-api.asciidoc | 5 ++--- .../apis/update-connector-scheduling-api.asciidoc | 5 ++--- 21 files changed, 49 insertions(+), 45 deletions(-) diff --git a/docs/reference/connector/apis/cancel-connector-sync-job-api.asciidoc b/docs/reference/connector/apis/cancel-connector-sync-job-api.asciidoc index 6123b7eb5511d..bd886bf923af8 100644 --- a/docs/reference/connector/apis/cancel-connector-sync-job-api.asciidoc +++ b/docs/reference/connector/apis/cancel-connector-sync-job-api.asciidoc @@ -4,6 +4,8 @@ Cancel connector sync job ++++ +preview::[] + Cancels a connector sync job. [[cancel-connector-sync-job-api-request]] diff --git a/docs/reference/connector/apis/check-in-connector-api.asciidoc b/docs/reference/connector/apis/check-in-connector-api.asciidoc index c0c021f1304dc..9f88c595e3a67 100644 --- a/docs/reference/connector/apis/check-in-connector-api.asciidoc +++ b/docs/reference/connector/apis/check-in-connector-api.asciidoc @@ -1,12 +1,11 @@ [[check-in-connector-api]] === Check in connector API - -preview::[] - ++++ Check in a connector ++++ +preview::[] + Updates the `last_seen` field of a connector with current timestamp. [[check-in-connector-api-request]] diff --git a/docs/reference/connector/apis/check-in-connector-sync-job-api.asciidoc b/docs/reference/connector/apis/check-in-connector-sync-job-api.asciidoc index 04c8057e2c115..d3cc34bf025ed 100644 --- a/docs/reference/connector/apis/check-in-connector-sync-job-api.asciidoc +++ b/docs/reference/connector/apis/check-in-connector-sync-job-api.asciidoc @@ -4,6 +4,8 @@ Check in connector sync job ++++ +preview::[] + Checks in a connector sync job (updates `last_seen` to the current time). [[check-in-connector-sync-job-api-request]] diff --git a/docs/reference/connector/apis/connector-apis.asciidoc b/docs/reference/connector/apis/connector-apis.asciidoc index e127dc07446b5..eabb531551fe5 100644 --- a/docs/reference/connector/apis/connector-apis.asciidoc +++ b/docs/reference/connector/apis/connector-apis.asciidoc @@ -3,17 +3,16 @@ preview::[] -++++ -Connector APIs -++++ - ---- - -The connector and sync jobs API provides a convenient way to create and manage Elastic connectors and sync jobs in an internal index. +The connector and sync jobs API provides a convenient way to create and manage Elastic {enterprise-search-ref}/connectors.html[connectors^] and sync jobs in an internal index. This API provides an alternative to relying solely on {kib} UI for connector and sync job management. The API comes with a set of validations and assertions to ensure that the state representation in the internal index remains valid. +[TIP] +==== +We also have a command-line interface for Elastic connectors. Learn more in the https://github.com/elastic/connectors/blob/main/docs/CLI.md[elastic/connectors] repository. +==== + [discrete] [[elastic-connector-apis]] === Connector APIs diff --git a/docs/reference/connector/apis/create-connector-api.asciidoc b/docs/reference/connector/apis/create-connector-api.asciidoc index b62ca4ad070a4..2c1c4c9ba7bc4 100644 --- a/docs/reference/connector/apis/create-connector-api.asciidoc +++ b/docs/reference/connector/apis/create-connector-api.asciidoc @@ -4,6 +4,8 @@ Create connector ++++ +preview::[] + Creates a connector. diff --git a/docs/reference/connector/apis/create-connector-sync-job-api.asciidoc b/docs/reference/connector/apis/create-connector-sync-job-api.asciidoc index e8c2c364797c4..b036485285256 100644 --- a/docs/reference/connector/apis/create-connector-sync-job-api.asciidoc +++ b/docs/reference/connector/apis/create-connector-sync-job-api.asciidoc @@ -4,6 +4,9 @@ Create connector sync job ++++ +preview::[] + + Creates a connector sync job. [source, console] diff --git a/docs/reference/connector/apis/delete-connector-api.asciidoc b/docs/reference/connector/apis/delete-connector-api.asciidoc index 6d3a120df785a..c7e9dcd94d2ad 100644 --- a/docs/reference/connector/apis/delete-connector-api.asciidoc +++ b/docs/reference/connector/apis/delete-connector-api.asciidoc @@ -1,12 +1,11 @@ [[delete-connector-api]] === Delete connector API - -preview::[] - ++++ Delete connector ++++ +preview::[] + Removes a connector and its associated data. This is a destructive action that is not recoverable. diff --git a/docs/reference/connector/apis/delete-connector-sync-job-api.asciidoc b/docs/reference/connector/apis/delete-connector-sync-job-api.asciidoc index 8641794576bf1..32df172df758a 100644 --- a/docs/reference/connector/apis/delete-connector-sync-job-api.asciidoc +++ b/docs/reference/connector/apis/delete-connector-sync-job-api.asciidoc @@ -1,12 +1,11 @@ [[delete-connector-sync-job-api]] === Delete connector sync job API - -preview::[] - ++++ Delete connector sync job ++++ +preview::[] + Removes a connector sync job and its associated data. This is a destructive action that is not recoverable. diff --git a/docs/reference/connector/apis/get-connector-api.asciidoc b/docs/reference/connector/apis/get-connector-api.asciidoc index ab4a2758ce4f1..693a9fd767806 100644 --- a/docs/reference/connector/apis/get-connector-api.asciidoc +++ b/docs/reference/connector/apis/get-connector-api.asciidoc @@ -1,10 +1,11 @@ [[get-connector-api]] === Get connector API -preview::[] ++++ Get connector ++++ +preview::[] + Retrieves the details about a connector. [[get-connector-api-request]] diff --git a/docs/reference/connector/apis/get-connector-sync-job-api.asciidoc b/docs/reference/connector/apis/get-connector-sync-job-api.asciidoc index b33aec8c55e60..bfa82ea0d345c 100644 --- a/docs/reference/connector/apis/get-connector-sync-job-api.asciidoc +++ b/docs/reference/connector/apis/get-connector-sync-job-api.asciidoc @@ -1,10 +1,11 @@ [[get-connector-sync-job-api]] === Get connector sync job API -preview::[] ++++ Get connector sync job ++++ +preview::[] + Retrieves the details about a connector sync job. [[get-connector-sync-job-api-request]] diff --git a/docs/reference/connector/apis/list-connector-sync-jobs-api.asciidoc b/docs/reference/connector/apis/list-connector-sync-jobs-api.asciidoc index 8b88f318f5304..a8851885b5051 100644 --- a/docs/reference/connector/apis/list-connector-sync-jobs-api.asciidoc +++ b/docs/reference/connector/apis/list-connector-sync-jobs-api.asciidoc @@ -1,13 +1,12 @@ [role="xpack"] [[list-connector-sync-jobs-api]] === List connector sync jobs API - -preview::[] - ++++ List connector sync jobs ++++ +preview::[] + Returns information about all stored connector sync jobs ordered by their creation date in ascending order. diff --git a/docs/reference/connector/apis/list-connectors-api.asciidoc b/docs/reference/connector/apis/list-connectors-api.asciidoc index 57d3cc47aeb7a..9b3fc50690243 100644 --- a/docs/reference/connector/apis/list-connectors-api.asciidoc +++ b/docs/reference/connector/apis/list-connectors-api.asciidoc @@ -1,13 +1,12 @@ [role="xpack"] [[list-connector-api]] === List connectors API - -preview::[] - ++++ List connectors ++++ +preview::[] + Returns information about all stored connectors. diff --git a/docs/reference/connector/apis/set-connector-sync-job-error-api.asciidoc b/docs/reference/connector/apis/set-connector-sync-job-error-api.asciidoc index 935fcccc77fcf..a9dbf5ceb1eb2 100644 --- a/docs/reference/connector/apis/set-connector-sync-job-error-api.asciidoc +++ b/docs/reference/connector/apis/set-connector-sync-job-error-api.asciidoc @@ -4,6 +4,8 @@ Set connector sync job error ++++ +preview::[] + Sets a connector sync job error. [[set-connector-sync-job-error-api-request]] diff --git a/docs/reference/connector/apis/set-connector-sync-job-stats-api.asciidoc b/docs/reference/connector/apis/set-connector-sync-job-stats-api.asciidoc index 0513155312bb4..a417bcf8b9e9f 100644 --- a/docs/reference/connector/apis/set-connector-sync-job-stats-api.asciidoc +++ b/docs/reference/connector/apis/set-connector-sync-job-stats-api.asciidoc @@ -4,6 +4,8 @@ Set connector sync job stats ++++ +preview::[] + Sets connector sync job stats. [[set-connector-sync-job-stats-api-request]] diff --git a/docs/reference/connector/apis/update-connector-configuration-api.asciidoc b/docs/reference/connector/apis/update-connector-configuration-api.asciidoc index 6d6591a6f00bc..57484c14d0f90 100644 --- a/docs/reference/connector/apis/update-connector-configuration-api.asciidoc +++ b/docs/reference/connector/apis/update-connector-configuration-api.asciidoc @@ -1,12 +1,11 @@ [[update-connector-configuration-api]] === Update connector configuration API - -preview::[] - ++++ Update connector configuration ++++ +preview::[] + Updates the `configuration` of a connector. diff --git a/docs/reference/connector/apis/update-connector-error-api.asciidoc b/docs/reference/connector/apis/update-connector-error-api.asciidoc index 19bc15f0dc60a..dbed25f1bf8d5 100644 --- a/docs/reference/connector/apis/update-connector-error-api.asciidoc +++ b/docs/reference/connector/apis/update-connector-error-api.asciidoc @@ -1,12 +1,11 @@ [[update-connector-error-api]] === Update connector error API - -preview::[] - ++++ Update connector error ++++ +preview::[] + Updates the `error` field of a connector. [[update-connector-error-api-request]] diff --git a/docs/reference/connector/apis/update-connector-filtering-api.asciidoc b/docs/reference/connector/apis/update-connector-filtering-api.asciidoc index d4c7bb16a3304..3e81f0fda2ce7 100644 --- a/docs/reference/connector/apis/update-connector-filtering-api.asciidoc +++ b/docs/reference/connector/apis/update-connector-filtering-api.asciidoc @@ -1,12 +1,12 @@ [[update-connector-filtering-api]] === Update connector filtering API - -preview::[] - ++++ Update connector filtering ++++ +preview::[] + + Updates the `filtering` configuration of a connector. Learn more about filtering in the {enterprise-search-ref}/sync-rules.html[sync rules] documentation. [[update-connector-filtering-api-request]] diff --git a/docs/reference/connector/apis/update-connector-last-sync-api.asciidoc b/docs/reference/connector/apis/update-connector-last-sync-api.asciidoc index e9fffd22b21cd..6f41925e3676f 100644 --- a/docs/reference/connector/apis/update-connector-last-sync-api.asciidoc +++ b/docs/reference/connector/apis/update-connector-last-sync-api.asciidoc @@ -1,12 +1,11 @@ [[update-connector-last-sync-api]] === Update connector last sync stats API - -preview::[] - ++++ Update connector last sync stats ++++ +preview::[] + Updates the fields related to the last sync of a connector. This action is used for analytics and monitoring. diff --git a/docs/reference/connector/apis/update-connector-name-description-api.asciidoc b/docs/reference/connector/apis/update-connector-name-description-api.asciidoc index d45fb545e168b..c54dba8dd72b5 100644 --- a/docs/reference/connector/apis/update-connector-name-description-api.asciidoc +++ b/docs/reference/connector/apis/update-connector-name-description-api.asciidoc @@ -1,12 +1,12 @@ [[update-connector-name-description-api]] === Update connector name and description API - -preview::[] - ++++ Update connector name and description ++++ +preview::[] + + Updates the `name` and `description` fields of a connector. [[update-connector-name-description-api-request]] diff --git a/docs/reference/connector/apis/update-connector-pipeline-api.asciidoc b/docs/reference/connector/apis/update-connector-pipeline-api.asciidoc index 6938506703da8..63872bf96aa55 100644 --- a/docs/reference/connector/apis/update-connector-pipeline-api.asciidoc +++ b/docs/reference/connector/apis/update-connector-pipeline-api.asciidoc @@ -1,12 +1,11 @@ [[update-connector-pipeline-api]] === Update connector pipeline API - -preview::[] - ++++ Update connector pipeline ++++ +preview::[] + Updates the `pipeline` configuration of a connector. When you create a new connector, the configuration of an <> is populated with default settings. diff --git a/docs/reference/connector/apis/update-connector-scheduling-api.asciidoc b/docs/reference/connector/apis/update-connector-scheduling-api.asciidoc index c47e6d4c0367b..7a2f33bcaeaa8 100644 --- a/docs/reference/connector/apis/update-connector-scheduling-api.asciidoc +++ b/docs/reference/connector/apis/update-connector-scheduling-api.asciidoc @@ -1,12 +1,11 @@ [[update-connector-scheduling-api]] === Update connector scheduling API - -preview::[] - ++++ Update connector scheduling ++++ +preview::[] + Updates the `scheduling` configuration of a connector. [[update-connector-scheduling-api-request]] From 1dfc72311669af21a1832eb3ca3912eae3ef367d Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Fri, 12 Jan 2024 12:21:42 +0100 Subject: [PATCH 07/35] Speed up search cancellation checks by storing them in list (#104304) We only add lambdas to this thing, the contains check is pointless, so we might as well use a list here that iterates more efficiently. relates #104273 --- .../search/internal/ContextIndexSearcher.java | 8 +++----- .../org/elasticsearch/search/SearchCancellationTests.java | 2 -- 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java index d834c12d0abe1..0263c6e83b17a 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java @@ -50,7 +50,6 @@ import java.util.Arrays; import java.util.Collections; import java.util.Comparator; -import java.util.HashSet; import java.util.List; import java.util.Objects; import java.util.PriorityQueue; @@ -525,13 +524,12 @@ public DirectoryReader getDirectoryReader() { private static class MutableQueryTimeout implements ExitableDirectoryReader.QueryCancellation { - private final Set runnables = new HashSet<>(); + private final List runnables = new ArrayList<>(); private Runnable add(Runnable action) { Objects.requireNonNull(action, "cancellation runnable should not be null"); - if (runnables.add(action) == false) { - throw new IllegalArgumentException("Cancellation runnable already added"); - } + assert runnables.contains(action) == false : "Cancellation runnable already added"; + runnables.add(action); return action; } diff --git a/server/src/test/java/org/elasticsearch/search/SearchCancellationTests.java b/server/src/test/java/org/elasticsearch/search/SearchCancellationTests.java index 19f6400badcf5..79f16ab390dd2 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchCancellationTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchCancellationTests.java @@ -92,8 +92,6 @@ public void testAddingCancellationActions() throws IOException { Runnable r = () -> {}; searcher.addQueryCancellation(r); - IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> searcher.addQueryCancellation(r)); - assertEquals("Cancellation runnable already added", iae.getMessage()); } public void testCancellableCollector() throws IOException { From ed8c98095a83d5ce8eec73119affa013a9ad3fe8 Mon Sep 17 00:00:00 2001 From: David Turner Date: Fri, 12 Jan 2024 11:22:27 +0000 Subject: [PATCH 08/35] Reduce contention in `CancellableTask#ensureNotCancelled` (#104305) No need to acquire the mutex to read the volatile `isCancelled` field, we can check that first to avoid contention. Relates #104273 --- .../main/java/org/elasticsearch/tasks/CancellableTask.java | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/tasks/CancellableTask.java b/server/src/main/java/org/elasticsearch/tasks/CancellableTask.java index 16eb7b7b2fb0f..8a0aa2033a30e 100644 --- a/server/src/main/java/org/elasticsearch/tasks/CancellableTask.java +++ b/server/src/main/java/org/elasticsearch/tasks/CancellableTask.java @@ -85,9 +85,11 @@ protected void onCancelled() {} /** * Throws a {@link TaskCancelledException} if this task has been cancelled, otherwise does nothing. */ - public final synchronized void ensureNotCancelled() { + public final void ensureNotCancelled() { if (isCancelled()) { - throw getTaskCancelledException(); + synchronized (this) { + throw getTaskCancelledException(); + } } } From 8acfebeb0840e228fa35c3ce1566728a07b1b452 Mon Sep 17 00:00:00 2001 From: Niels Bauman <33722607+nielsbauman@users.noreply.github.com> Date: Fri, 12 Jan 2024 12:36:09 +0100 Subject: [PATCH 09/35] Cleanup `OperationModeUpdateTask` (#104265) The `hashCode` implementation called `super.hashCode` which effectively (incorrectly) called `Object.hashCode`. The hash code didn't seem to be used anywhere, so removing made more sense than fixing. The `equals` also looked a bit off as it did include `priority` from the parent class, but not `timeout`. Rather than implementing a proper equals method (in both `OperationModeUpdateTask` and `ClusterStateUpdateTask`, it's parent), I decided to just remove the method as I could only find one real use case, which was inside a test. --- docs/changelog/104265.yaml | 6 ++++++ .../core/ilm/OperationModeUpdateTask.java | 21 ------------------- .../xpack/ilm/IndexLifecycleServiceTests.java | 7 +++---- .../slm/SnapshotLifecycleServiceTests.java | 11 ++++++---- 4 files changed, 16 insertions(+), 29 deletions(-) create mode 100644 docs/changelog/104265.yaml diff --git a/docs/changelog/104265.yaml b/docs/changelog/104265.yaml new file mode 100644 index 0000000000000..88c3d72ee81d0 --- /dev/null +++ b/docs/changelog/104265.yaml @@ -0,0 +1,6 @@ +pr: 104265 +summary: Remove `hashCode` and `equals` from `OperationModeUpdateTask` +area: ILM+SLM +type: bug +issues: + - 100871 diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/OperationModeUpdateTask.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/OperationModeUpdateTask.java index c36d73e8d12d6..1072e6ee4c899 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/OperationModeUpdateTask.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/OperationModeUpdateTask.java @@ -18,8 +18,6 @@ import org.elasticsearch.common.Priority; import org.elasticsearch.core.Nullable; -import java.util.Objects; - import static org.elasticsearch.xpack.core.ilm.LifecycleOperationMetadata.currentILMMode; import static org.elasticsearch.xpack.core.ilm.LifecycleOperationMetadata.currentSLMMode; @@ -157,23 +155,4 @@ public void clusterStateProcessed(ClusterState oldState, ClusterState newState) logger.info("SLM operation mode updated to {}", slmMode); } } - - @Override - public int hashCode() { - return Objects.hash(super.hashCode(), ilmMode, slmMode); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (obj.getClass() != getClass()) { - return false; - } - OperationModeUpdateTask other = (OperationModeUpdateTask) obj; - return Objects.equals(priority(), other.priority()) - && Objects.equals(ilmMode, other.ilmMode) - && Objects.equals(slmMode, other.slmMode); - } } diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java index f47fc38206183..dd1e2bb9d8dd7 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java @@ -282,13 +282,12 @@ private void verifyCanStopWithStep(String stoppableStep) { ClusterChangedEvent event = new ClusterChangedEvent("_source", currentState, ClusterState.EMPTY_STATE); SetOnce changedOperationMode = new SetOnce<>(); doAnswer(invocationOnMock -> { + OperationModeUpdateTask task = (OperationModeUpdateTask) invocationOnMock.getArguments()[1]; + assertEquals(task.getILMOperationMode(), OperationMode.STOPPED); changedOperationMode.set(true); return null; }).when(clusterService) - .submitUnbatchedStateUpdateTask( - eq("ilm_operation_mode_update[stopped]"), - eq(OperationModeUpdateTask.ilmMode(OperationMode.STOPPED)) - ); + .submitUnbatchedStateUpdateTask(eq("ilm_operation_mode_update[stopped]"), any(OperationModeUpdateTask.class)); indexLifecycleService.applyClusterState(event); indexLifecycleService.triggerPolicies(currentState, true); assertTrue(changedOperationMode.get()); diff --git a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleServiceTests.java b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleServiceTests.java index 3541edfa20c93..9bbb08e89166e 100644 --- a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleServiceTests.java +++ b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleServiceTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.cluster.service.ClusterApplierService; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.MasterService; +import org.elasticsearch.common.Priority; import org.elasticsearch.common.scheduler.SchedulerEngine; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; @@ -454,13 +455,13 @@ public void testStoppedPriority() { ) ) ); - final SetOnce task = new SetOnce<>(); + final SetOnce task = new SetOnce<>(); ClusterService fakeService = new ClusterService(Settings.EMPTY, clusterSettings, threadPool, null) { @Override public void submitUnbatchedStateUpdateTask(String source, ClusterStateUpdateTask updateTask) { logger.info("--> got task: [source: {}]: {}", source, updateTask); - if (updateTask instanceof OperationModeUpdateTask) { - task.set(updateTask); + if (updateTask instanceof OperationModeUpdateTask operationModeUpdateTask) { + task.set(operationModeUpdateTask); } } }; @@ -476,7 +477,9 @@ public void submitUnbatchedStateUpdateTask(String source, ClusterStateUpdateTask true ); service.clusterChanged(new ClusterChangedEvent("blah", state, ClusterState.EMPTY_STATE)); - assertThat(task.get(), equalTo(OperationModeUpdateTask.slmMode(OperationMode.STOPPED))); + assertEquals(task.get().priority(), Priority.IMMEDIATE); + assertNull(task.get().getILMOperationMode()); + assertEquals(task.get().getSLMOperationMode(), OperationMode.STOPPED); threadPool.shutdownNow(); } From 8a159b74bf9cca019316c1342d4d491b7079057e Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Fri, 12 Jan 2024 13:39:04 +0100 Subject: [PATCH 10/35] Chunk BulkResponse on REST layer (#104310) If there's a large number of failures in these they can grow very large. Chunking these at least avoids the O(n) memory cost when sending these out over the REST layer. Even without failures large REST responses for these can grow to a size that exceeds what can be written to the channel right away and take very visible time to serialize (~7% of all coordinating node CPU time during ingest for the http_logs rally track!!). Better to smooth out the cost as write capacity becomes available. --- .../action/bulk/BulkResponse.java | 36 +++++++++---------- .../rest/action/document/RestBulkAction.java | 4 +-- .../ingest/RestSimulateIngestAction.java | 7 ++-- .../action/bulk/BulkResponseTests.java | 8 ++++- 4 files changed, 28 insertions(+), 27 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkResponse.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkResponse.java index 0ce472520a4fd..2065a31ce5566 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkResponse.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkResponse.java @@ -12,9 +12,9 @@ import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; @@ -31,7 +31,7 @@ * bulk requests. Each item holds the index/type/id is operated on, and if it failed or not (with the * failure message). */ -public class BulkResponse extends ActionResponse implements Iterable, ToXContentObject { +public class BulkResponse extends ActionResponse implements Iterable, ChunkedToXContentObject { private static final String ITEMS = "items"; private static final String ERRORS = "errors"; @@ -133,23 +133,6 @@ public void writeTo(StreamOutput out) throws IOException { out.writeZLong(ingestTookInMillis); } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(ERRORS, hasFailures()); - builder.field(TOOK, tookInMillis); - if (ingestTookInMillis != BulkResponse.NO_INGEST_TOOK) { - builder.field(INGEST_TOOK, ingestTookInMillis); - } - builder.startArray(ITEMS); - for (BulkItemResponse item : this) { - item.toXContent(builder, params); - } - builder.endArray(); - builder.endObject(); - return builder; - } - public static BulkResponse fromXContent(XContentParser parser) throws IOException { XContentParser.Token token = parser.nextToken(); ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser); @@ -184,4 +167,17 @@ public static BulkResponse fromXContent(XContentParser parser) throws IOExceptio } return new BulkResponse(items.toArray(new BulkItemResponse[items.size()]), took, ingestTook); } + + @Override + public Iterator toXContentChunked(ToXContent.Params params) { + return Iterators.concat(Iterators.single((builder, p) -> { + builder.startObject(); + builder.field(ERRORS, hasFailures()); + builder.field(TOOK, tookInMillis); + if (ingestTookInMillis != BulkResponse.NO_INGEST_TOOK) { + builder.field(INGEST_TOOK, ingestTookInMillis); + } + return builder.startArray(ITEMS); + }), Iterators.forArray(responses), Iterators.single((builder, p) -> builder.endArray().endObject())); + } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java index 7bfac46495b23..83a7728b82a4a 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java @@ -19,7 +19,7 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; -import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.rest.action.RestRefCountedChunkedToXContentListener; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import java.io.IOException; @@ -95,7 +95,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC request.getRestApiVersion() ); - return channel -> client.bulk(bulkRequest, new RestToXContentListener<>(channel)); + return channel -> client.bulk(bulkRequest, new RestRefCountedChunkedToXContentListener<>(channel)); } @Override diff --git a/server/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulateIngestAction.java b/server/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulateIngestAction.java index e0d9dd95206cf..2c9b84f78636a 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulateIngestAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulateIngestAction.java @@ -26,7 +26,7 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; -import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.rest.action.RestBuilderListener; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -140,7 +140,7 @@ static BytesReference convertToBulkRequestXContentBytes(Map sour * simulate-style xcontent. * Non-private for unit testing */ - static class SimulateIngestRestToXContentListener extends RestToXContentListener { + static class SimulateIngestRestToXContentListener extends RestBuilderListener { SimulateIngestRestToXContentListener(RestChannel channel) { super(channel); @@ -150,8 +150,7 @@ static class SimulateIngestRestToXContentListener extends RestToXContentListener public RestResponse buildResponse(BulkResponse response, XContentBuilder builder) throws Exception { assert response.isFragment() == false; toXContent(response, builder, channel.request()); - RestStatus restStatus = statusFunction.apply(response); - return new RestResponse(restStatus, builder); + return new RestResponse(RestStatus.OK, builder); } private static void toXContent(BulkResponse response, XContentBuilder builder, ToXContent.Params params) throws IOException { diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkResponseTests.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkResponseTests.java index 5a1c7f1572e23..c1cd88e0864a4 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkResponseTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.action.index.IndexResponseTests; import org.elasticsearch.action.update.UpdateResponseTests; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.core.Tuple; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.ToXContent; @@ -68,7 +69,12 @@ public void testToAndFromXContent() throws IOException { } BulkResponse bulkResponse = new BulkResponse(bulkItems, took, ingestTook); - BytesReference originalBytes = toShuffledXContent(bulkResponse, xContentType, ToXContent.EMPTY_PARAMS, humanReadable); + BytesReference originalBytes = toShuffledXContent( + ChunkedToXContent.wrapAsToXContent(bulkResponse), + xContentType, + ToXContent.EMPTY_PARAMS, + humanReadable + ); BulkResponse parsedBulkResponse; try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) { From c22ff0659c5f4c91cd4994db2e4a7ae961acadbd Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 12 Jan 2024 07:58:00 -0500 Subject: [PATCH 11/35] ESQL: Test all operators with breaker (#104267) This modifies the operator tests to *always* test with a breaker without the ability to opt out. Previously three operations opted out: 1. mv_expand never through exceptions even though it should. It was using the block factory of the test blocks which didn't have a breaker. I modified the test blocks to take a breaker so now it properly breaks. 2. reading values was throwing strange exceptions when I first wrote these tests and I didn't have time to get it work. I don't 100% recall what those exceptions were but they seem to be gone now. Good fairies? 3. The "project" operator doesn't allocate much of anything - it just drops or shifts block around. But the work I did to move the test blocks under the limit makes it so the test itself can throw. That's good enough for this. --- .../operator/CannedSourceOperator.java | 27 ++++++++++++------- .../compute/operator/LimitOperatorTests.java | 7 ----- .../operator/MvExpandOperatorTests.java | 7 ----- .../compute/operator/OperatorTestCase.java | 16 +++++------ .../operator/ProjectOperatorTests.java | 7 ----- 5 files changed, 23 insertions(+), 41 deletions(-) diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/CannedSourceOperator.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/CannedSourceOperator.java index 01f51b32edb1d..4d5a6260ed02d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/CannedSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/CannedSourceOperator.java @@ -79,18 +79,25 @@ public static Page mergePages(List pages) { * Make a deep copy of some pages. Useful so that when the originals are * released the copies are still live. */ - public static List deepCopyOf(List pages) { + public static List deepCopyOf(BlockFactory blockFactory, List pages) { List out = new ArrayList<>(pages.size()); - BlockFactory blockFactory = TestBlockFactory.getNonBreakingInstance(); - for (Page p : pages) { - Block[] blocks = new Block[p.getBlockCount()]; - for (int b = 0; b < blocks.length; b++) { - Block orig = p.getBlock(b); - Block.Builder builder = orig.elementType().newBlockBuilder(p.getPositionCount(), blockFactory); - builder.copyFrom(orig, 0, p.getPositionCount()); - blocks[b] = builder.build(); + try { + for (Page p : pages) { + Block[] blocks = new Block[p.getBlockCount()]; + for (int b = 0; b < blocks.length; b++) { + Block orig = p.getBlock(b); + try (Block.Builder builder = orig.elementType().newBlockBuilder(p.getPositionCount(), blockFactory)) { + builder.copyFrom(orig, 0, p.getPositionCount()); + blocks[b] = builder.build(); + } + } + out.add(new Page(blocks)); + } + } finally { + if (pages.size() != out.size()) { + // failed to copy all the pages, we're bubbling out an exception. So we have to close the copy. + Releasables.closeExpectNoException(Releasables.wrap(() -> Iterators.map(out.iterator(), p -> p::releaseBlocks))); } - out.add(new Page(blocks)); } return out; } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java index e366646ecd0f5..d2db9c7b48da6 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.compute.operator; -import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.compute.data.BasicBlockTests; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; @@ -49,12 +48,6 @@ protected void assertSimpleOutput(List input, List results) { assertThat(outputPositionCount, equalTo(Math.min(100, inputPositionCount))); } - @Override - protected ByteSizeValue enoughMemoryForSimple() { - assumeFalse("doesn't allocate, just filters", true); - return null; - } - public void testStatus() { BlockFactory blockFactory = driverContext().blockFactory(); LimitOperator op = simple().get(driverContext()); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java index 165e5b80b9a58..02517e8fafe1a 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.compute.operator; -import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.ElementType; @@ -199,12 +198,6 @@ protected void assertSimpleOutput(List input, List results) { assertThat(resultIter2.hasNext(), equalTo(false)); } - @Override - protected ByteSizeValue enoughMemoryForSimple() { - assumeFalse("doesn't throw in tests but probably should", true); - return ByteSizeValue.ofBytes(1); - } - public void testNoopStatus() { BlockFactory blockFactory = blockFactory(); MvExpandOperator op = new MvExpandOperator(0, randomIntBetween(1, 1000)); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java index 0890ba669f0a2..68a2bde0c2f6c 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java @@ -41,6 +41,7 @@ import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.in; /** * Base tests for {@link Operator}s that are not {@link SourceOperator} or {@link SinkOperator}. @@ -97,16 +98,10 @@ public final void testSimpleCircuitBreaking() { DriverContext inputFactoryContext = driverContext(); List input = CannedSourceOperator.collectPages(simpleInput(inputFactoryContext.blockFactory(), between(1_000, 10_000))); try { - ByteSizeValue limit = BreakerTestUtil.findBreakerLimit( - memoryLimitForSimple, - l -> runWithLimit(simple, CannedSourceOperator.deepCopyOf(input), l) - ); + ByteSizeValue limit = BreakerTestUtil.findBreakerLimit(memoryLimitForSimple, l -> runWithLimit(simple, input, l)); ByteSizeValue testWithSize = ByteSizeValue.ofBytes(randomLongBetween(0, limit.getBytes())); logger.info("testing with {} against a limit of {}", testWithSize, limit); - Exception e = expectThrows( - CircuitBreakingException.class, - () -> runWithLimit(simple, CannedSourceOperator.deepCopyOf(input), testWithSize) - ); + Exception e = expectThrows(CircuitBreakingException.class, () -> runWithLimit(simple, input, testWithSize)); assertThat(e.getMessage(), equalTo(MockBigArrays.ERROR_MESSAGE)); } finally { Releasables.closeExpectNoException(Releasables.wrap(() -> Iterators.map(input.iterator(), p -> p::releaseBlocks))); @@ -119,15 +114,16 @@ private void runWithLimit(Operator.OperatorFactory factory, List input, By CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); BlockFactory blockFactory = BlockFactory.getInstance(breaker, bigArrays); DriverContext driverContext = new DriverContext(bigArrays, blockFactory); + List localInput = CannedSourceOperator.deepCopyOf(blockFactory, input); boolean driverStarted = false; try { var operator = factory.get(driverContext); driverStarted = true; - drive(operator, input.iterator(), driverContext); + drive(operator, localInput.iterator(), driverContext); } finally { if (driverStarted == false) { // if drive hasn't even started then we need to release the input pages manually - Releasables.closeExpectNoException(Releasables.wrap(() -> Iterators.map(input.iterator(), p -> p::releaseBlocks))); + Releasables.closeExpectNoException(Releasables.wrap(() -> Iterators.map(localInput.iterator(), p -> p::releaseBlocks))); } assertThat(bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST).getUsed(), equalTo(0L)); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ProjectOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ProjectOperatorTests.java index 572657c7c8226..26b9b16d7b24e 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ProjectOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ProjectOperatorTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.compute.operator; -import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntBlock; @@ -95,12 +94,6 @@ protected void assertSimpleOutput(List input, List results) { assertThat(total, equalTo(input.stream().mapToInt(Page::getPositionCount).sum())); } - @Override - protected ByteSizeValue enoughMemoryForSimple() { - assumeTrue("doesn't allocate", false); - return null; - } - public void testDescriptionOfMany() { ProjectOperator.ProjectOperatorFactory factory = new ProjectOperator.ProjectOperatorFactory( IntStream.range(0, 100).boxed().toList() From bb1a9874d0af66032de2e4cc09331c3942ec7a69 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 12 Jan 2024 08:08:32 -0500 Subject: [PATCH 12/35] Disable ESQL async test It's failing due to some timing issues. I'll build a fix later today but let's get it out of everyone's way for now. Tracked by #104294 --- .../qa/single_node/EsqlClientYamlAsyncSubmitAndFetchIT.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncSubmitAndFetchIT.java b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncSubmitAndFetchIT.java index 312175c92246a..be2bfcb8a2787 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncSubmitAndFetchIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncSubmitAndFetchIT.java @@ -9,6 +9,7 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.section.ApiCallSection; import org.elasticsearch.test.rest.yaml.section.DoSection; @@ -20,6 +21,7 @@ /** * Run the ESQL yaml tests async and then fetch the results with a long wait time. */ +@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104294") public class EsqlClientYamlAsyncSubmitAndFetchIT extends AbstractEsqlClientYamlIT { public EsqlClientYamlAsyncSubmitAndFetchIT(final ClientYamlTestCandidate testCandidate) { super(testCandidate); From ce94c10a358b67f86ecd5e0c48401f22d4df7c47 Mon Sep 17 00:00:00 2001 From: Pooya Salehi Date: Fri, 12 Jan 2024 14:37:28 +0100 Subject: [PATCH 13/35] Consider old version map in IndexBufferRAMBytesUsed (#104122) Separated from https://github.com/elastic/elasticsearch/pull/103979. Currently, `InternalEngine#getIndexBufferRAMBytesUsed` considers `versionMap.ramBytesUsedForRefresh()` as the refresh-related memory usage of the version map. However, `versionMap.ramBytesUsedForRefresh()` only considers the `current` map. This value (as part of `shard.getIndexBufferRAMBytesUsed()`) is used in the `IndexingMemoryController` as the current usage and then the amount of heap currently being freed (which for version map is the `old` map) is [subtracted from it](https://github.com/elastic/elasticsearch/blob/main/server/src/main/java/org/elasticsearch/indices/IndexingMemoryController.java#L372). I think, we should consider both `old` and `current` in `getIndexBufferRAMBytesUsed`. --- docs/changelog/104122.yaml | 5 +++++ .../elasticsearch/index/engine/Engine.java | 3 ++- .../index/engine/InternalEngine.java | 4 ++-- .../index/engine/LiveVersionMap.java | 12 +++++++++-- .../index/engine/LiveVersionMapTests.java | 21 +++++++++++++++++++ 5 files changed, 40 insertions(+), 5 deletions(-) create mode 100644 docs/changelog/104122.yaml diff --git a/docs/changelog/104122.yaml b/docs/changelog/104122.yaml new file mode 100644 index 0000000000000..a88d7499bd44e --- /dev/null +++ b/docs/changelog/104122.yaml @@ -0,0 +1,5 @@ +pr: 104122 +summary: Consider currently refreshing data in the memory usage of refresh +area: Engine +type: bug +issues: [] diff --git a/server/src/main/java/org/elasticsearch/index/engine/Engine.java b/server/src/main/java/org/elasticsearch/index/engine/Engine.java index 9b9cf8ad35c04..3849095a94e6e 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -1007,7 +1007,8 @@ protected void writerSegmentStats(SegmentsStats stats) { stats.addIndexWriterMemoryInBytes(0); } - /** How much heap is used that would be freed by a refresh. Note that this may throw {@link AlreadyClosedException}. */ + /** How much heap is used that would be freed by a refresh. This includes both the current memory being freed and any remaining + * memory usage that could be freed, e.g., by refreshing. Note that this may throw {@link AlreadyClosedException}. */ public abstract long getIndexBufferRAMBytesUsed(); final Segment[] getSegmentInfo(SegmentInfos lastCommittedSegmentInfos) { diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index 8affee4330074..65834a8c011f2 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -2093,14 +2093,14 @@ protected final RefreshResult refresh(String source, SearcherScope scope, boolea @Override public void writeIndexingBuffer() throws IOException { - final long versionMapBytesUsed = versionMap.ramBytesUsedForRefresh(); + final long reclaimableVersionMapBytes = versionMap.reclaimableRefreshRamBytes(); // Only count bytes that are not already being written to disk. Note: this number may be negative at times if these two metrics get // updated concurrently. It's fine as it's only being used as a heuristic to decide on a full refresh vs. writing a single segment. // TODO: it might be more relevant to use the RAM usage of the largest DWPT as opposed to the overall RAM usage? Can we get this // exposed in Lucene? final long indexWriterBytesUsed = indexWriter.ramBytesUsed() - indexWriter.getFlushingBytes(); - if (versionMapBytesUsed >= indexWriterBytesUsed) { + if (reclaimableVersionMapBytes >= indexWriterBytesUsed) { // This method expects to reclaim memory quickly, so if the version map is using more memory than the IndexWriter buffer then we // do a refresh, which is the only way to reclaim memory from the version map. IndexWriter#flushNextBuffer has similar logic: if // pending deletes occupy more than half of RAMBufferSizeMB then deletes are applied too. diff --git a/server/src/main/java/org/elasticsearch/index/engine/LiveVersionMap.java b/server/src/main/java/org/elasticsearch/index/engine/LiveVersionMap.java index 1cee2a90ec3f1..7cc1b92b43c43 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/LiveVersionMap.java +++ b/server/src/main/java/org/elasticsearch/index/engine/LiveVersionMap.java @@ -476,10 +476,18 @@ public long ramBytesUsed() { } /** - * Returns how much RAM would be freed up by refreshing. This is the RAM usage of the current version map. It doesn't include tombstones - * since they don't get cleared on refresh, nor the old version map that is being reclaimed. + * Returns how much RAM is used by refresh. This is the RAM usage of the current and old version maps. */ long ramBytesUsedForRefresh() { + return maps.ramBytesUsed(); + } + + /** + * Returns how much RAM could be reclaimed from the version map. This is the RAM usage of the current version map, and could be + * reclaimed by refreshing. It doesn't include tombstones since they don't get cleared on refresh, nor the old version map that + * is being reclaimed. + */ + long reclaimableRefreshRamBytes() { return maps.current.ramBytesUsed.get(); } diff --git a/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java b/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java index 5ca7aadc35fa7..8d357413b09cd 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java @@ -495,4 +495,25 @@ public void testVersionLookupRamBytesUsed() { .sum(); assertEquals(actualRamBytesUsed, vl.ramBytesUsed()); } + + public void testVersionMapReclaimableRamBytes() throws IOException { + LiveVersionMap map = new LiveVersionMap(); + assertEquals(map.ramBytesUsedForRefresh(), 0L); + assertEquals(map.reclaimableRefreshRamBytes(), 0L); + IntStream.range(0, randomIntBetween(10, 100)).forEach(i -> { + BytesRefBuilder uid = new BytesRefBuilder(); + uid.copyChars(TestUtil.randomSimpleString(random(), 10, 20)); + try (Releasable r = map.acquireLock(uid.toBytesRef())) { + map.putIndexUnderLock(uid.toBytesRef(), randomIndexVersionValue()); + } + }); + assertThat(map.reclaimableRefreshRamBytes(), greaterThan(0L)); + assertEquals(map.reclaimableRefreshRamBytes(), map.ramBytesUsedForRefresh()); + map.beforeRefresh(); + assertEquals(map.reclaimableRefreshRamBytes(), 0L); + assertThat(map.ramBytesUsedForRefresh(), greaterThan(0L)); + map.afterRefresh(randomBoolean()); + assertEquals(map.reclaimableRefreshRamBytes(), 0L); + assertEquals(map.ramBytesUsedForRefresh(), 0L); + } } From 063fc26a20beadddad39d0d217ea26e0139a5052 Mon Sep 17 00:00:00 2001 From: Moritz Mack Date: Fri, 12 Jan 2024 14:56:32 +0100 Subject: [PATCH 14/35] Temporarily tolerate tracing.apm.agent.global_labels.XYZ settings (#104315) --- .../telemetry/apm/internal/APMAgentSettings.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java index 0ee13dae70740..12e81e7ae78e1 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java @@ -227,6 +227,10 @@ public void setAgentSetting(String key, String value) { final String key = parts[parts.length - 1]; return new Setting<>(qualifiedKey, "", (value) -> { if (qualifiedKey.equals("_na_") == false && PERMITTED_AGENT_KEYS.contains(key) == false) { + // TODO figure out why those settings are kept, these should be reformatted / removed by now + if (key.startsWith("global_labels.")) { + return value; + } throw new IllegalArgumentException("Configuration [" + qualifiedKey + "] is either prohibited or unknown."); } return value; From 5ca63c5b501a8c9d16eda6a3ff57c9ed0597cb3a Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 12 Jan 2024 09:03:07 -0500 Subject: [PATCH 15/35] ESQL: Remove SearchContext from Operators (#104290) This removes the *giant* `SearchContext` class from our `Operator`s. It's just *so* **so** ***so*** big. It's hard to test with. You can do it. There's a lot of crazy mocking. Lots of reading. The replacement is two interfaces, on in the compute engine, called `ShardContext`. It contains the methods that are required to power all of the `LuceneSourceOperator`s. It has the `IndexSearcher`, can build sorts, and has a few identifiers. The other interface is also called `ShardContext`, but this one lives in esql itself and it has all of the things needed to power the `EsPhysicalOperationProviders` - mostly stuff to configure the value fetching operator and build queries. There exists a "production" implementation of these interfaces in `EsPhysicalOperationProviders` and a test implementation in `LuceneSourceOperatorTests`. It's super easy to plug in non-production implementations that don't drag in 45% of all of Elasticsearch. --- .../compute/lucene/BlockReaderFactories.java | 74 ------ .../compute/lucene/LuceneCountOperator.java | 7 +- .../compute/lucene/LuceneOperator.java | 27 +-- .../compute/lucene/LuceneSlice.java | 4 +- .../compute/lucene/LuceneSliceQueue.java | 16 +- .../compute/lucene/LuceneSourceOperator.java | 9 +- .../lucene/LuceneTopNSourceOperator.java | 27 +-- .../compute/lucene/ShardContext.java | 42 ++++ .../elasticsearch/compute/OperatorTests.java | 6 +- .../lucene/LuceneCountOperatorTests.java | 5 +- .../lucene/LuceneSourceOperatorTests.java | 92 ++++---- .../lucene/LuceneTopNSourceOperatorTests.java | 43 ++-- .../ValuesSourceReaderOperatorTests.java | 10 +- .../esql/enrich/EnrichLookupService.java | 8 +- .../planner/EsPhysicalOperationProviders.java | 212 +++++++++++++----- .../esql/planner/LocalExecutionPlanner.java | 16 +- .../xpack/esql/plugin/ComputeService.java | 13 +- .../planner/LocalExecutionPlannerTests.java | 16 +- 18 files changed, 332 insertions(+), 295 deletions(-) delete mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/BlockReaderFactories.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ShardContext.java diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/BlockReaderFactories.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/BlockReaderFactories.java deleted file mode 100644 index 95b3ee9c10ff0..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/BlockReaderFactories.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.compute.lucene; - -import org.elasticsearch.common.logging.HeaderWarning; -import org.elasticsearch.index.mapper.BlockLoader; -import org.elasticsearch.index.mapper.FieldNamesFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.query.SearchExecutionContext; -import org.elasticsearch.search.lookup.SearchLookup; - -import java.util.Set; - -/** - * Resolves *how* ESQL loads field values. - */ -public final class BlockReaderFactories { - private BlockReaderFactories() {} - - /** - * Resolves *how* ESQL loads field values. - * @param ctx a search context for the index we're loading field from - * @param fieldName the name of the field to load - * @param asUnsupportedSource should the field be loaded as "unsupported"? - * These will always have {@code null} values - */ - public static BlockLoader loader(SearchExecutionContext ctx, String fieldName, boolean asUnsupportedSource) { - if (asUnsupportedSource) { - return BlockLoader.CONSTANT_NULLS; - } - MappedFieldType fieldType = ctx.getFieldType(fieldName); - if (fieldType == null) { - // the field does not exist in this context - return BlockLoader.CONSTANT_NULLS; - } - BlockLoader loader = fieldType.blockLoader(new MappedFieldType.BlockLoaderContext() { - @Override - public String indexName() { - return ctx.getFullyQualifiedIndex().getName(); - } - - @Override - public SearchLookup lookup() { - return ctx.lookup(); - } - - @Override - public Set sourcePaths(String name) { - return ctx.sourcePath(name); - } - - @Override - public String parentField(String field) { - return ctx.parentPath(field); - } - - @Override - public FieldNamesFieldMapper.FieldNamesFieldType fieldNames() { - return (FieldNamesFieldMapper.FieldNamesFieldType) ctx.lookup().fieldType(FieldNamesFieldMapper.NAME); - } - }); - if (loader == null) { - HeaderWarning.addWarning("Field [{}] cannot be retrieved, it is unsupported or not indexed; returning null", fieldName); - return BlockLoader.CONSTANT_NULLS; - } - - return loader; - } -} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneCountOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneCountOperator.java index 4ed32d6552497..4dda5c16295fb 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneCountOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneCountOperator.java @@ -20,7 +20,6 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.SourceOperator; import org.elasticsearch.core.Releasables; -import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.io.UncheckedIOException; @@ -49,8 +48,8 @@ public static class Factory implements LuceneOperator.Factory { private final LuceneSliceQueue sliceQueue; public Factory( - List searchContexts, - Function queryFunction, + List contexts, + Function queryFunction, DataPartitioning dataPartitioning, int taskConcurrency, int limit @@ -58,7 +57,7 @@ public Factory( this.limit = limit; this.dataPartitioning = dataPartitioning; var weightFunction = weightFunction(queryFunction, ScoreMode.COMPLETE_NO_SCORES); - this.sliceQueue = LuceneSliceQueue.create(searchContexts, weightFunction, dataPartitioning, taskConcurrency); + this.sliceQueue = LuceneSliceQueue.create(contexts, weightFunction, dataPartitioning, taskConcurrency); this.taskConcurrency = Math.min(sliceQueue.totalSlices(), taskConcurrency); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java index 21b2a4cfaeb0b..1eeedd06d058d 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java @@ -26,7 +26,6 @@ import org.elasticsearch.compute.operator.SourceOperator; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -90,11 +89,7 @@ LuceneScorer getCurrentOrLoadNextScorer() { continue; } processedSlices++; - processedShards.add( - currentSlice.searchContext().getSearchExecutionContext().getFullyQualifiedIndex().getName() - + ":" - + currentSlice.searchContext().getSearchExecutionContext().getShardId() - ); + processedShards.add(currentSlice.shardContext().shardIdentifier()); } final PartialLeafReaderContext partialLeaf = currentSlice.getLeaf(sliceIndex++); logger.trace("Starting {}", partialLeaf); @@ -102,7 +97,7 @@ LuceneScorer getCurrentOrLoadNextScorer() { if (currentScorer == null || currentScorer.leafReaderContext() != leaf) { final Weight weight = currentSlice.weight().get(); processedQueries.add(weight.getQuery()); - currentScorer = new LuceneScorer(currentSlice.shardIndex(), currentSlice.searchContext(), weight, leaf); + currentScorer = new LuceneScorer(currentSlice.shardContext(), weight, leaf); } assert currentScorer.maxPosition <= partialLeaf.maxDoc() : currentScorer.maxPosition + ">" + partialLeaf.maxDoc(); currentScorer.maxPosition = partialLeaf.maxDoc(); @@ -118,8 +113,7 @@ LuceneScorer getCurrentOrLoadNextScorer() { * Wraps a {@link BulkScorer} with shard information */ static final class LuceneScorer { - private final int shardIndex; - private final SearchContext searchContext; + private final ShardContext shardContext; private final Weight weight; private final LeafReaderContext leafReaderContext; @@ -128,9 +122,8 @@ static final class LuceneScorer { private int maxPosition; private Thread executingThread; - LuceneScorer(int shardIndex, SearchContext searchContext, Weight weight, LeafReaderContext leafReaderContext) { - this.shardIndex = shardIndex; - this.searchContext = searchContext; + LuceneScorer(ShardContext shardContext, Weight weight, LeafReaderContext leafReaderContext) { + this.shardContext = shardContext; this.weight = weight; this.leafReaderContext = leafReaderContext; reinitialize(); @@ -165,12 +158,8 @@ void markAsDone() { position = DocIdSetIterator.NO_MORE_DOCS; } - int shardIndex() { - return shardIndex; - } - - SearchContext searchContext() { - return searchContext; + ShardContext shardContext() { + return shardContext; } Weight weight() { @@ -377,7 +366,7 @@ public String toString() { } } - static Function weightFunction(Function queryFunction, ScoreMode scoreMode) { + static Function weightFunction(Function queryFunction, ScoreMode scoreMode) { return ctx -> { final var query = queryFunction.apply(ctx); final var searcher = ctx.searcher(); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSlice.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSlice.java index c3fe03ae88bb3..716df6844e79f 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSlice.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSlice.java @@ -8,7 +8,6 @@ package org.elasticsearch.compute.lucene; import org.apache.lucene.search.Weight; -import org.elasticsearch.search.internal.SearchContext; import java.util.List; import java.util.function.Supplier; @@ -16,8 +15,7 @@ /** * Holds a list of multiple partial Lucene segments */ -public record LuceneSlice(int shardIndex, SearchContext searchContext, List leaves, Supplier weight) { - +public record LuceneSlice(ShardContext shardContext, List leaves, Supplier weight) { int numLeaves() { return leaves.size(); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java index faf3d6437282a..d0329174f2839 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java @@ -12,7 +12,6 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Weight; import org.elasticsearch.core.Nullable; -import org.elasticsearch.search.internal.SearchContext; import java.util.ArrayList; import java.util.Arrays; @@ -48,24 +47,23 @@ public int totalSlices() { } public static LuceneSliceQueue create( - List searchContexts, - Function weightFunction, + List contexts, + Function weightFunction, DataPartitioning dataPartitioning, int taskConcurrency ) { final List slices = new ArrayList<>(); - for (int shardIndex = 0; shardIndex < searchContexts.size(); shardIndex++) { - final SearchContext searchContext = searchContexts.get(shardIndex); - final List leafContexts = searchContext.searcher().getLeafContexts(); + for (ShardContext ctx : contexts) { + final List leafContexts = ctx.searcher().getLeafContexts(); List> groups = switch (dataPartitioning) { case SHARD -> Collections.singletonList(leafContexts.stream().map(PartialLeafReaderContext::new).toList()); case SEGMENT -> segmentSlices(leafContexts); - case DOC -> docSlices(searchContext.searcher().getIndexReader(), taskConcurrency); + case DOC -> docSlices(ctx.searcher().getIndexReader(), taskConcurrency); }; final Weight[] cachedWeight = new Weight[1]; final Supplier weight = () -> { if (cachedWeight[0] == null) { - cachedWeight[0] = weightFunction.apply(searchContext); + cachedWeight[0] = weightFunction.apply(ctx); } return cachedWeight[0]; }; @@ -73,7 +71,7 @@ public static LuceneSliceQueue create( weight.get(); // eagerly build Weight once } for (List group : groups) { - slices.add(new LuceneSlice(shardIndex, searchContext, group, weight)); + slices.add(new LuceneSlice(ctx, group, weight)); } } return new LuceneSliceQueue(slices); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java index b636e4aba8a5e..9d6e3f46d0e1e 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java @@ -19,7 +19,6 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.SourceOperator; import org.elasticsearch.core.Releasables; -import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.io.UncheckedIOException; @@ -46,8 +45,8 @@ public static class Factory implements LuceneOperator.Factory { private final LuceneSliceQueue sliceQueue; public Factory( - List searchContexts, - Function queryFunction, + List contexts, + Function queryFunction, DataPartitioning dataPartitioning, int taskConcurrency, int maxPageSize, @@ -57,7 +56,7 @@ public Factory( this.limit = limit; this.dataPartitioning = dataPartitioning; var weightFunction = weightFunction(queryFunction, ScoreMode.COMPLETE_NO_SCORES); - this.sliceQueue = LuceneSliceQueue.create(searchContexts, weightFunction, dataPartitioning, taskConcurrency); + this.sliceQueue = LuceneSliceQueue.create(contexts, weightFunction, dataPartitioning, taskConcurrency); this.taskConcurrency = Math.min(sliceQueue.totalSlices(), taskConcurrency); } @@ -149,7 +148,7 @@ public Page getOutput() { IntBlock leaf = null; IntVector docs = null; try { - shard = blockFactory.newConstantIntBlockWith(scorer.shardIndex(), currentPagePos); + shard = blockFactory.newConstantIntBlockWith(scorer.shardContext().index(), currentPagePos); leaf = blockFactory.newConstantIntBlockWith(scorer.leafReaderContext().ord, currentPagePos); docs = docsBuilder.build(); docsBuilder = blockFactory.newIntVectorBuilder(Math.min(remainingDocs, maxPageSize)); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java index 7f08c8ca66821..8cb9173adc197 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java @@ -24,7 +24,6 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.SourceOperator; import org.elasticsearch.core.Releasables; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.sort.SortAndFormats; import org.elasticsearch.search.sort.SortBuilder; @@ -49,8 +48,8 @@ public static final class Factory implements LuceneOperator.Factory { private final LuceneSliceQueue sliceQueue; public Factory( - List searchContexts, - Function queryFunction, + List contexts, + Function queryFunction, DataPartitioning dataPartitioning, int taskConcurrency, int maxPageSize, @@ -62,7 +61,7 @@ public Factory( this.limit = limit; this.dataPartitioning = dataPartitioning; var weightFunction = weightFunction(queryFunction, ScoreMode.TOP_DOCS); - this.sliceQueue = LuceneSliceQueue.create(searchContexts, weightFunction, dataPartitioning, taskConcurrency); + this.sliceQueue = LuceneSliceQueue.create(contexts, weightFunction, dataPartitioning, taskConcurrency); this.taskConcurrency = Math.min(sliceQueue.totalSlices(), taskConcurrency); } @@ -156,9 +155,9 @@ private Page collect() { return emit(true); } try { - if (perShardCollector == null || perShardCollector.shardIndex != scorer.shardIndex()) { + if (perShardCollector == null || perShardCollector.shardContext.index() != scorer.shardContext().index()) { // TODO: share the bottom between shardCollectors - perShardCollector = new PerShardCollector(scorer.shardIndex(), scorer.searchContext(), sorts, limit); + perShardCollector = new PerShardCollector(scorer.shardContext(), sorts, limit); } var leafCollector = perShardCollector.getLeafCollector(scorer.leafReaderContext()); scorer.scoreNextRange(leafCollector, scorer.leafReaderContext().reader().getLiveDocs(), maxPageSize); @@ -170,7 +169,7 @@ private Page collect() { } if (scorer.isDone()) { var nextScorer = getCurrentOrLoadNextScorer(); - if (nextScorer == null || nextScorer.shardIndex() != scorer.shardIndex()) { + if (nextScorer == null || nextScorer.shardContext().index() != scorer.shardContext().index()) { return emit(true); } } @@ -205,7 +204,7 @@ private Page emit(boolean startEmitting) { ) { int start = offset; offset += size; - List leafContexts = perShardCollector.searchContext.searcher().getLeafContexts(); + List leafContexts = perShardCollector.shardContext.searcher().getLeafContexts(); for (int i = start; i < offset; i++) { int doc = scoreDocs[i].doc; int segment = ReaderUtil.subIndex(doc, leafContexts); @@ -213,7 +212,7 @@ private Page emit(boolean startEmitting) { currentDocsBuilder.appendInt(doc - leafContexts.get(segment).docBase); // the offset inside the segment } - shard = blockFactory.newConstantIntBlockWith(perShardCollector.shardIndex, size); + shard = blockFactory.newConstantIntBlockWith(perShardCollector.shardContext.index(), size); segments = currentSegmentBuilder.build(); docs = currentDocsBuilder.build(); page = new Page(size, new DocVector(shard.asVector(), segments, docs, null).asBlock()); @@ -233,17 +232,15 @@ protected void describe(StringBuilder sb) { } static final class PerShardCollector { - private final int shardIndex; - private final SearchContext searchContext; + private final ShardContext shardContext; private final TopFieldCollector topFieldCollector; private int leafIndex; private LeafCollector leafCollector; private Thread currentThread; - PerShardCollector(int shardIndex, SearchContext searchContext, List> sorts, int limit) throws IOException { - this.shardIndex = shardIndex; - this.searchContext = searchContext; - Optional sortAndFormats = SortBuilder.buildSort(sorts, searchContext.getSearchExecutionContext()); + PerShardCollector(ShardContext shardContext, List> sorts, int limit) throws IOException { + this.shardContext = shardContext; + Optional sortAndFormats = shardContext.buildSort(sorts); if (sortAndFormats.isEmpty()) { throw new IllegalStateException("sorts must not be disabled in TopN"); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ShardContext.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ShardContext.java new file mode 100644 index 0000000000000..5bf6ac8532f48 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ShardContext.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.lucene; + +import org.apache.lucene.search.IndexSearcher; +import org.elasticsearch.search.sort.SortAndFormats; +import org.elasticsearch.search.sort.SortBuilder; + +import java.io.IOException; +import java.util.List; +import java.util.Optional; + +/** + * Context of each shard we're operating against. + */ +public interface ShardContext { + /** + * The index of this shard in the list of shards being processed. + */ + int index(); + + /** + * Get {@link IndexSearcher} holding the actual data. + */ + IndexSearcher searcher(); + + /** + * Build a "sort" configuration from an Elasticsearch style builder. + */ + Optional buildSort(List> sorts) throws IOException; + + /** + * A "name" for the shard that you can look up against other APIs like + * {@code _cat/shards}. + */ + String shardIdentifier(); +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java index 8415b3883ad3a..45a019328940c 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java @@ -47,6 +47,8 @@ import org.elasticsearch.compute.lucene.DataPartitioning; import org.elasticsearch.compute.lucene.LuceneOperator; import org.elasticsearch.compute.lucene.LuceneSourceOperator; +import org.elasticsearch.compute.lucene.LuceneSourceOperatorTests; +import org.elasticsearch.compute.lucene.ShardContext; import org.elasticsearch.compute.lucene.ValuesSourceReaderOperator; import org.elasticsearch.compute.operator.AbstractPageMappingOperator; import org.elasticsearch.compute.operator.Driver; @@ -65,7 +67,6 @@ import org.elasticsearch.index.mapper.SourceLoader; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.ESTestCase; import java.io.IOException; @@ -79,7 +80,6 @@ import static org.elasticsearch.compute.aggregation.AggregatorMode.FINAL; import static org.elasticsearch.compute.aggregation.AggregatorMode.INITIAL; -import static org.elasticsearch.compute.lucene.LuceneSourceOperatorTests.mockSearchContext; import static org.elasticsearch.compute.operator.OperatorTestCase.randomPageSize; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.empty; @@ -345,7 +345,7 @@ public static void assertDriverContext(DriverContext driverContext) { } static LuceneOperator.Factory luceneOperatorFactory(IndexReader reader, Query query, int limit) { - final SearchContext searchContext = mockSearchContext(reader, 0); + final ShardContext searchContext = new LuceneSourceOperatorTests.MockShardContext(reader, 0); return new LuceneSourceOperator.Factory( List.of(searchContext), ctx -> query, diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java index 8d401c2099b85..0c41cfc704f56 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java @@ -26,7 +26,6 @@ import org.elasticsearch.compute.operator.TestResultPageSinkOperator; import org.elasticsearch.core.IOUtils; import org.elasticsearch.indices.CrankyCircuitBreakerService; -import org.elasticsearch.search.internal.SearchContext; import org.junit.After; import java.io.IOException; @@ -38,7 +37,6 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThanOrEqualTo; -import static org.mockito.Mockito.when; public class LuceneCountOperatorTests extends AnyOperatorTestCase { private Directory directory = newDirectory(); @@ -82,8 +80,7 @@ private LuceneCountOperator.Factory simple(DataPartitioning dataPartitioning, in throw new RuntimeException(e); } - SearchContext ctx = LuceneSourceOperatorTests.mockSearchContext(reader, 0); - when(ctx.getSearchExecutionContext().getIndexReader()).thenReturn(reader); + ShardContext ctx = new LuceneSourceOperatorTests.MockShardContext(reader, 0); final Query query; if (enableShortcut && randomBoolean()) { query = new MatchAllDocsQuery(); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java index 19e16144e11c5..a4c6622344bea 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java @@ -27,26 +27,19 @@ import org.elasticsearch.compute.operator.OperatorTestCase; import org.elasticsearch.compute.operator.TestResultPageSinkOperator; import org.elasticsearch.core.IOUtils; -import org.elasticsearch.index.Index; import org.elasticsearch.index.cache.query.TrivialQueryCachingPolicy; -import org.elasticsearch.index.fielddata.FieldDataContext; -import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.NestedLookup; import org.elasticsearch.index.mapper.NumberFieldMapper; -import org.elasticsearch.index.query.SearchExecutionContext; -import org.elasticsearch.index.query.support.NestedScope; import org.elasticsearch.indices.CrankyCircuitBreakerService; -import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.search.internal.ContextIndexSearcher; -import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.search.sort.SortAndFormats; +import org.elasticsearch.search.sort.SortBuilder; import org.junit.After; import java.io.IOException; -import java.io.UncheckedIOException; import java.util.ArrayList; import java.util.List; +import java.util.Optional; import java.util.function.Function; import static org.hamcrest.Matchers.both; @@ -55,10 +48,6 @@ import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.lessThanOrEqualTo; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; public class LuceneSourceOperatorTests extends AnyOperatorTestCase { private static final MappedFieldType S_FIELD = new NumberFieldMapper.NumberFieldType("s", NumberFieldMapper.NumberType.LONG); @@ -97,24 +86,8 @@ private LuceneSourceOperator.Factory simple(DataPartitioning dataPartitioning, i throw new RuntimeException(e); } - SearchContext ctx = mockSearchContext(reader, 0); - when(ctx.getSearchExecutionContext().getFieldType(anyString())).thenAnswer(inv -> { - String name = inv.getArgument(0); - return switch (name) { - case "s" -> S_FIELD; - default -> throw new IllegalArgumentException("don't support [" + name + "]"); - }; - }); - when(ctx.getSearchExecutionContext().getForField(any(), any())).thenAnswer(inv -> { - MappedFieldType ft = inv.getArgument(0); - IndexFieldData.Builder builder = ft.fielddataBuilder(FieldDataContext.noRuntimeFields("test")); - // This breaker is for fielddata from text fields. We don't test it so it won't break not test not to use a breaker here. - return builder.build(new IndexFieldDataCache.None(), new NoneCircuitBreakerService()); - }); - when(ctx.getSearchExecutionContext().nestedScope()).thenReturn(new NestedScope()); - when(ctx.getSearchExecutionContext().nestedLookup()).thenReturn(NestedLookup.EMPTY); - when(ctx.getSearchExecutionContext().getIndexReader()).thenReturn(reader); - Function queryFunction = c -> new MatchAllDocsQuery(); + ShardContext ctx = new MockShardContext(reader, 0); + Function queryFunction = c -> new MatchAllDocsQuery(); int maxPageSize = between(10, Math.max(10, numDocs)); return new LuceneSourceOperator.Factory(List.of(ctx), queryFunction, dataPartitioning, 1, maxPageSize, limit); } @@ -206,24 +179,43 @@ private void testSimple(DriverContext ctx, int size, int limit) { * Creates a mock search context with the given index reader. * The returned mock search context can be used to test with {@link LuceneOperator}. */ - public static SearchContext mockSearchContext(IndexReader reader, int shardId) { - try { - ContextIndexSearcher searcher = new ContextIndexSearcher( - reader, - IndexSearcher.getDefaultSimilarity(), - IndexSearcher.getDefaultQueryCache(), - TrivialQueryCachingPolicy.NEVER, - true - ); - SearchContext searchContext = mock(SearchContext.class); - when(searchContext.searcher()).thenReturn(searcher); - SearchExecutionContext searchExecutionContext = mock(SearchExecutionContext.class); - when(searchContext.getSearchExecutionContext()).thenReturn(searchExecutionContext); - when(searchExecutionContext.getFullyQualifiedIndex()).thenReturn(new Index("test", "uid")); - when(searchExecutionContext.getShardId()).thenReturn(shardId); - return searchContext; - } catch (IOException e) { - throw new UncheckedIOException(e); + public static class MockShardContext implements ShardContext { + private final int index; + private final ContextIndexSearcher searcher; + + public MockShardContext(IndexReader reader, int index) { + this.index = index; + try { + this.searcher = new ContextIndexSearcher( + reader, + IndexSearcher.getDefaultSimilarity(), + IndexSearcher.getDefaultQueryCache(), + TrivialQueryCachingPolicy.NEVER, + true + ); + } catch (IOException e) { + throw new AssertionError(e); + } + } + + @Override + public int index() { + return index; + } + + @Override + public IndexSearcher searcher() { + return searcher; + } + + @Override + public Optional buildSort(List> sorts) { + return Optional.empty(); + } + + @Override + public String shardIdentifier() { + return "test"; } } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java index 5776c45274ad1..57f3dd5412ca1 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java @@ -13,6 +13,10 @@ import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.search.Sort; +import org.apache.lucene.search.SortField; +import org.apache.lucene.search.SortedNumericSelector; +import org.apache.lucene.search.SortedNumericSortField; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.common.breaker.CircuitBreakingException; @@ -26,30 +30,23 @@ import org.elasticsearch.compute.operator.OperatorTestCase; import org.elasticsearch.compute.operator.TestResultPageSinkOperator; import org.elasticsearch.core.IOUtils; -import org.elasticsearch.index.fielddata.FieldDataContext; -import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.NestedLookup; import org.elasticsearch.index.mapper.NumberFieldMapper; -import org.elasticsearch.index.query.support.NestedScope; import org.elasticsearch.indices.CrankyCircuitBreakerService; -import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; -import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.sort.FieldSortBuilder; +import org.elasticsearch.search.sort.SortAndFormats; import org.elasticsearch.search.sort.SortBuilder; import org.junit.After; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Optional; import java.util.function.Function; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.when; public class LuceneTopNSourceOperatorTests extends AnyOperatorTestCase { private static final MappedFieldType S_FIELD = new NumberFieldMapper.NumberFieldType("s", NumberFieldMapper.NumberType.LONG); @@ -88,24 +85,14 @@ private LuceneTopNSourceOperator.Factory simple(DataPartitioning dataPartitionin throw new RuntimeException(e); } - SearchContext ctx = LuceneSourceOperatorTests.mockSearchContext(reader, 0); - when(ctx.getSearchExecutionContext().getFieldType(anyString())).thenAnswer(inv -> { - String name = inv.getArgument(0); - return switch (name) { - case "s" -> S_FIELD; - default -> throw new IllegalArgumentException("don't support [" + name + "]"); - }; - }); - when(ctx.getSearchExecutionContext().getForField(any(), any())).thenAnswer(inv -> { - MappedFieldType ft = inv.getArgument(0); - IndexFieldData.Builder builder = ft.fielddataBuilder(FieldDataContext.noRuntimeFields("test")); - // This breaker is used for fielddata but we're not testing that. - return builder.build(new IndexFieldDataCache.None(), new NoneCircuitBreakerService()); - }); - when(ctx.getSearchExecutionContext().nestedScope()).thenReturn(new NestedScope()); - when(ctx.getSearchExecutionContext().nestedLookup()).thenReturn(NestedLookup.EMPTY); - when(ctx.getSearchExecutionContext().getIndexReader()).thenReturn(reader); - Function queryFunction = c -> new MatchAllDocsQuery(); + ShardContext ctx = new LuceneSourceOperatorTests.MockShardContext(reader, 0) { + @Override + public Optional buildSort(List> sorts) { + SortField field = new SortedNumericSortField("s", SortField.Type.LONG, false, SortedNumericSelector.Type.MIN); + return Optional.of(new SortAndFormats(new Sort(field), new DocValueFormat[] { null })); + } + }; + Function queryFunction = c -> new MatchAllDocsQuery(); int taskConcurrency = 0; int maxPageSize = between(10, Math.max(10, size)); List> sorts = List.of(new FieldSortBuilder("s")); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java index ada0582a2fad8..330a7293a9a67 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java @@ -65,7 +65,6 @@ import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.index.mapper.TextSearchInfo; import org.elasticsearch.index.mapper.TsidExtractingIdFieldMapper; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; @@ -86,7 +85,6 @@ import java.util.stream.Collectors; import java.util.stream.IntStream; -import static org.elasticsearch.compute.lucene.LuceneSourceOperatorTests.mockSearchContext; import static org.elasticsearch.test.MapMatcher.assertMap; import static org.elasticsearch.test.MapMatcher.matchesMap; import static org.hamcrest.Matchers.equalTo; @@ -164,7 +162,7 @@ private SourceOperator simpleInput(DriverContext context, int size, int commitEv throw new RuntimeException(e); } var luceneFactory = new LuceneSourceOperator.Factory( - List.of(mockSearchContext(reader, 0)), + List.of(new LuceneSourceOperatorTests.MockShardContext(reader, 0)), ctx -> new MatchAllDocsQuery(), DataPartitioning.SHARD, randomIntBetween(1, 10), @@ -1268,7 +1266,7 @@ public void testWithNulls() throws IOException { DriverContext driverContext = driverContext(); var luceneFactory = new LuceneSourceOperator.Factory( - List.of(mockSearchContext(reader, 0)), + List.of(new LuceneSourceOperatorTests.MockShardContext(reader, 0)), ctx -> new MatchAllDocsQuery(), randomFrom(DataPartitioning.values()), randomIntBetween(1, 10), @@ -1483,10 +1481,10 @@ public void testManyShards() throws IOException { closeMe[d * 2 + 1] = dirs[d] = newDirectory(); closeMe[d * 2] = readers[d] = initIndex(dirs[d], size, between(10, size * 2)); } - List contexts = new ArrayList<>(); + List contexts = new ArrayList<>(); List readerShardContexts = new ArrayList<>(); for (int s = 0; s < shardCount; s++) { - contexts.add(mockSearchContext(readers[s], s)); + contexts.add(new LuceneSourceOperatorTests.MockShardContext(readers[s], s)); readerShardContexts.add(new ValuesSourceReaderOperator.ShardContext(readers[s], () -> SourceLoader.FROM_STORED_SOURCE)); } var luceneFactory = new LuceneSourceOperator.Factory( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java index a533c373ad2ca..789c15ee156ea 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java @@ -31,7 +31,6 @@ import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.LocalCircuitBreaker; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.lucene.BlockReaderFactories; import org.elasticsearch.compute.lucene.ValuesSourceReaderOperator; import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; @@ -74,6 +73,7 @@ import org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; +import org.elasticsearch.xpack.esql.planner.EsPhysicalOperationProviders; import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; @@ -274,8 +274,12 @@ private void doLookup( NamedExpression extractField = extractFields.get(i); final ElementType elementType = PlannerUtils.toElementType(extractField.dataType()); mergingTypes[i] = elementType; - BlockLoader loader = BlockReaderFactories.loader( + EsPhysicalOperationProviders.ShardContext ctx = new EsPhysicalOperationProviders.DefaultShardContext( + 0, searchContext.getSearchExecutionContext(), + searchContext.request().getAliasFilter() + ); + BlockLoader loader = ctx.blockLoader( extractField instanceof Alias a ? ((NamedExpression) a.child()).name() : extractField.name(), EsqlDataTypes.isUnsupported(extractField.dataType()) ); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java index b324cf7c4056a..43d02a00c4db4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java @@ -9,24 +9,32 @@ import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; +import org.elasticsearch.common.logging.HeaderWarning; import org.elasticsearch.compute.aggregation.GroupingAggregator; +import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.lucene.BlockReaderFactories; +import org.elasticsearch.compute.lucene.LuceneCountOperator; import org.elasticsearch.compute.lucene.LuceneOperator; import org.elasticsearch.compute.lucene.LuceneSourceOperator; import org.elasticsearch.compute.lucene.LuceneTopNSourceOperator; import org.elasticsearch.compute.lucene.ValuesSourceReaderOperator; import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.compute.operator.OrdinalsGroupingOperator; +import org.elasticsearch.compute.operator.SourceOperator; import org.elasticsearch.index.mapper.BlockLoader; +import org.elasticsearch.index.mapper.FieldNamesFieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NestedLookup; +import org.elasticsearch.index.mapper.SourceLoader; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.search.NestedHelper; import org.elasticsearch.search.internal.AliasFilter; -import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.search.lookup.SearchLookup; +import org.elasticsearch.search.sort.SortAndFormats; import org.elasticsearch.search.sort.SortBuilder; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; @@ -37,10 +45,14 @@ import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner.PhysicalOperation; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Attribute; +import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.type.DataType; +import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Optional; +import java.util.Set; import java.util.function.Function; import java.util.function.IntFunction; @@ -48,15 +60,30 @@ import static org.elasticsearch.compute.lucene.LuceneSourceOperator.NO_LIMIT; public class EsPhysicalOperationProviders extends AbstractPhysicalOperationProviders { + /** + * Context of each shard we're operating against. + */ + public interface ShardContext extends org.elasticsearch.compute.lucene.ShardContext { + /** + * Build something to load source {@code _source}. + */ + SourceLoader newSourceLoader(); - private final List searchContexts; + /** + * Convert a {@link QueryBuilder} into a real {@link Query lucene query}. + */ + Query toQuery(QueryBuilder queryBuilder); - public EsPhysicalOperationProviders(List searchContexts) { - this.searchContexts = searchContexts; + /** + * Returns something to load values from this field into a {@link Block}. + */ + BlockLoader blockLoader(String name, boolean asUnsupportedSource); } - public List searchContexts() { - return searchContexts; + private final List shardContexts; + + public EsPhysicalOperationProviders(List shardContexts) { + this.shardContexts = shardContexts; } @Override @@ -66,7 +93,7 @@ public final PhysicalOperation fieldExtractPhysicalOperation(FieldExtractExec fi // to GeoPointFieldMapper.blockLoader Layout.Builder layout = source.layout.builder(); var sourceAttr = fieldExtractExec.sourceAttribute(); - List readers = searchContexts.stream() + List readers = shardContexts.stream() .map(s -> new ValuesSourceReaderOperator.ShardContext(s.searcher().getIndexReader(), s::newSourceLoader)) .toList(); List fields = new ArrayList<>(); @@ -77,46 +104,19 @@ public final PhysicalOperation fieldExtractPhysicalOperation(FieldExtractExec fi ElementType elementType = PlannerUtils.toElementType(dataType); String fieldName = attr.name(); boolean isSupported = EsqlDataTypes.isUnsupported(dataType); - IntFunction loader = s -> BlockReaderFactories.loader( - searchContexts.get(s).getSearchExecutionContext(), - fieldName, - isSupported - ); + IntFunction loader = s -> shardContexts.get(s).blockLoader(fieldName, isSupported); fields.add(new ValuesSourceReaderOperator.FieldInfo(fieldName, elementType, loader)); } return source.with(new ValuesSourceReaderOperator.Factory(fields, readers, docChannel), layout.build()); } - public static Function querySupplier(QueryBuilder queryBuilder) { - final QueryBuilder qb = queryBuilder == null ? QueryBuilders.matchAllQuery() : queryBuilder; - - return searchContext -> { - SearchExecutionContext ctx = searchContext.getSearchExecutionContext(); - Query query = ctx.toQuery(qb).query(); - NestedLookup nestedLookup = ctx.nestedLookup(); - if (nestedLookup != NestedLookup.EMPTY) { - NestedHelper nestedHelper = new NestedHelper(nestedLookup, ctx::isFieldMapped); - if (nestedHelper.mightMatchNestedDocs(query)) { - // filter out nested documents - query = new BooleanQuery.Builder().add(query, BooleanClause.Occur.MUST) - .add(newNonNestedFilter(ctx.indexVersionCreated()), BooleanClause.Occur.FILTER) - .build(); - } - } - AliasFilter aliasFilter = searchContext.request().getAliasFilter(); - if (aliasFilter != AliasFilter.EMPTY) { - Query filterQuery = ctx.toQuery(aliasFilter.getQueryBuilder()).query(); - query = new BooleanQuery.Builder().add(query, BooleanClause.Occur.MUST) - .add(filterQuery, BooleanClause.Occur.FILTER) - .build(); - } - return query; - }; + public Function querySupplier(QueryBuilder builder) { + QueryBuilder qb = builder == null ? QueryBuilders.matchAllQuery() : builder; + return ctx -> shardContexts.get(ctx.index()).toQuery(qb); } @Override public final PhysicalOperation sourcePhysicalOperation(EsQueryExec esQueryExec, LocalExecutionPlannerContext context) { - Function querySupplier = querySupplier(esQueryExec.query()); final LuceneOperator.Factory luceneFactory; List sorts = esQueryExec.sorts(); @@ -130,8 +130,8 @@ public final PhysicalOperation sourcePhysicalOperation(EsQueryExec esQueryExec, fieldSorts.add(sort.fieldSortBuilder()); } luceneFactory = new LuceneTopNSourceOperator.Factory( - searchContexts, - querySupplier, + shardContexts, + querySupplier(esQueryExec.query()), context.queryPragmas().dataPartitioning(), context.queryPragmas().taskConcurrency(), context.pageSize(rowEstimatedSize), @@ -140,8 +140,8 @@ public final PhysicalOperation sourcePhysicalOperation(EsQueryExec esQueryExec, ); } else { luceneFactory = new LuceneSourceOperator.Factory( - searchContexts, - querySupplier, + shardContexts, + querySupplier(esQueryExec.query()), context.queryPragmas().dataPartitioning(), context.queryPragmas().taskConcurrency(), context.pageSize(rowEstimatedSize), @@ -155,6 +155,19 @@ public final PhysicalOperation sourcePhysicalOperation(EsQueryExec esQueryExec, return PhysicalOperation.fromSource(luceneFactory, layout.build()); } + /** + * Build a {@link SourceOperator.SourceOperatorFactory} that counts documents in the search index. + */ + public LuceneCountOperator.Factory countSource(LocalExecutionPlannerContext context, QueryBuilder queryBuilder, Expression limit) { + return new LuceneCountOperator.Factory( + shardContexts, + querySupplier(queryBuilder), + context.queryPragmas().dataPartitioning(), + context.queryPragmas().taskConcurrency(), + limit == null ? NO_LIMIT : (Integer) limit.fold() + ); + } + @Override public final Operator.OperatorFactory ordinalGroupingOperatorFactory( LocalExecutionPlanner.PhysicalOperation source, @@ -166,19 +179,15 @@ public final Operator.OperatorFactory ordinalGroupingOperatorFactory( ) { var sourceAttribute = FieldExtractExec.extractSourceAttributesFrom(aggregateExec.child()); int docChannel = source.layout.get(sourceAttribute.id()).channel(); - List shardContexts = searchContexts.stream() + List vsShardContexts = shardContexts.stream() .map(s -> new ValuesSourceReaderOperator.ShardContext(s.searcher().getIndexReader(), s::newSourceLoader)) .toList(); // The grouping-by values are ready, let's group on them directly. // Costin: why are they ready and not already exposed in the layout? boolean isUnsupported = EsqlDataTypes.isUnsupported(attrSource.dataType()); return new OrdinalsGroupingOperator.OrdinalsGroupingOperatorFactory( - shardIdx -> BlockReaderFactories.loader( - searchContexts.get(shardIdx).getSearchExecutionContext(), - attrSource.name(), - isUnsupported - ), - shardContexts, + shardIdx -> shardContexts.get(shardIdx).blockLoader(attrSource.name(), isUnsupported), + vsShardContexts, groupElementType, docChannel, attrSource.name(), @@ -186,4 +195,107 @@ public final Operator.OperatorFactory ordinalGroupingOperatorFactory( context.pageSize(aggregateExec.estimatedRowSize()) ); } + + public static class DefaultShardContext implements ShardContext { + private final int index; + private final SearchExecutionContext ctx; + private final AliasFilter aliasFilter; + + public DefaultShardContext(int index, SearchExecutionContext ctx, AliasFilter aliasFilter) { + this.index = index; + this.ctx = ctx; + this.aliasFilter = aliasFilter; + } + + @Override + public int index() { + return index; + } + + @Override + public IndexSearcher searcher() { + return ctx.searcher(); + } + + @Override + public Optional buildSort(List> sorts) throws IOException { + return SortBuilder.buildSort(sorts, ctx); + } + + @Override + public String shardIdentifier() { + return ctx.getFullyQualifiedIndex().getName() + ":" + ctx.getShardId(); + } + + @Override + public SourceLoader newSourceLoader() { + return ctx.newSourceLoader(false); + } + + @Override + public Query toQuery(QueryBuilder queryBuilder) { + Query query = ctx.toQuery(queryBuilder).query(); + NestedLookup nestedLookup = ctx.nestedLookup(); + if (nestedLookup != NestedLookup.EMPTY) { + NestedHelper nestedHelper = new NestedHelper(nestedLookup, ctx::isFieldMapped); + if (nestedHelper.mightMatchNestedDocs(query)) { + // filter out nested documents + query = new BooleanQuery.Builder().add(query, BooleanClause.Occur.MUST) + .add(newNonNestedFilter(ctx.indexVersionCreated()), BooleanClause.Occur.FILTER) + .build(); + } + } + if (aliasFilter != AliasFilter.EMPTY) { + Query filterQuery = ctx.toQuery(aliasFilter.getQueryBuilder()).query(); + query = new BooleanQuery.Builder().add(query, BooleanClause.Occur.MUST) + .add(filterQuery, BooleanClause.Occur.FILTER) + .build(); + } + return query; + } + + @Override + public BlockLoader blockLoader(String name, boolean asUnsupportedSource) { + if (asUnsupportedSource) { + return BlockLoader.CONSTANT_NULLS; + } + MappedFieldType fieldType = ctx.getFieldType(name); + if (fieldType == null) { + // the field does not exist in this context + return BlockLoader.CONSTANT_NULLS; + } + BlockLoader loader = fieldType.blockLoader(new MappedFieldType.BlockLoaderContext() { + @Override + public String indexName() { + return ctx.getFullyQualifiedIndex().getName(); + } + + @Override + public SearchLookup lookup() { + return ctx.lookup(); + } + + @Override + public Set sourcePaths(String name) { + return ctx.sourcePath(name); + } + + @Override + public String parentField(String field) { + return ctx.parentPath(field); + } + + @Override + public FieldNamesFieldMapper.FieldNamesFieldType fieldNames() { + return (FieldNamesFieldMapper.FieldNamesFieldType) ctx.lookup().fieldType(FieldNamesFieldMapper.NAME); + } + }); + if (loader == null) { + HeaderWarning.addWarning("Field [{}] cannot be retrieved, it is unsupported or not indexed; returning null", name); + return BlockLoader.CONSTANT_NULLS; + } + + return loader; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index 15aec4545e7e7..d79becfc8a736 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.esql.planner; -import org.apache.lucene.search.Query; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.iterable.Iterables; @@ -17,7 +16,6 @@ import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.LocalCircuitBreaker; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.lucene.LuceneCountOperator; import org.elasticsearch.compute.lucene.LuceneOperator; import org.elasticsearch.compute.operator.ColumnExtractOperator; import org.elasticsearch.compute.operator.Driver; @@ -49,7 +47,6 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.enrich.EnrichLookupOperator; @@ -102,7 +99,6 @@ import static java.util.Arrays.asList; import static java.util.stream.Collectors.joining; -import static org.elasticsearch.compute.lucene.LuceneOperator.NO_LIMIT; import static org.elasticsearch.compute.operator.LimitOperator.Factory; import static org.elasticsearch.compute.operator.ProjectOperator.ProjectOperatorFactory; @@ -253,17 +249,7 @@ private PhysicalOperation planEsStats(EsStatsQueryExec statsQuery, LocalExecutio EsStatsQueryExec.Stat stat = statsQuery.stats().get(0); EsPhysicalOperationProviders esProvider = (EsPhysicalOperationProviders) physicalOperationProviders; - Function querySupplier = EsPhysicalOperationProviders.querySupplier(stat.filter(statsQuery.query())); - - Expression limitExp = statsQuery.limit(); - int limit = limitExp != null ? (Integer) limitExp.fold() : NO_LIMIT; - final LuceneOperator.Factory luceneFactory = new LuceneCountOperator.Factory( - esProvider.searchContexts(), - querySupplier, - context.queryPragmas.dataPartitioning(), - context.queryPragmas.taskConcurrency(), - limit - ); + final LuceneOperator.Factory luceneFactory = esProvider.countSource(context, stat.filter(statsQuery.query()), statsQuery.limit()); Layout.Builder layout = new Layout.Builder(); layout.append(statsQuery.outputSet()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index aa1eafbf90265..e781ed4a60c35 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -363,6 +363,17 @@ private ActionListener cancelOnFailure(CancellableTask task, AtomicBoolean void runCompute(CancellableTask task, ComputeContext context, PhysicalPlan plan, ActionListener> listener) { listener = ActionListener.runAfter(listener, () -> Releasables.close(context.searchContexts)); + List contexts = new ArrayList<>(context.searchContexts.size()); + for (int i = 0; i < context.searchContexts.size(); i++) { + SearchContext searchContext = context.searchContexts.get(i); + contexts.add( + new EsPhysicalOperationProviders.DefaultShardContext( + i, + searchContext.getSearchExecutionContext(), + searchContext.request().getAliasFilter() + ) + ); + } final List drivers; try { LocalExecutionPlanner planner = new LocalExecutionPlanner( @@ -375,7 +386,7 @@ void runCompute(CancellableTask task, ComputeContext context, PhysicalPlan plan, context.exchangeSource(), context.exchangeSink(), enrichLookupService, - new EsPhysicalOperationProviders(context.searchContexts) + new EsPhysicalOperationProviders(contexts) ); LOGGER.debug("Received physical plan:\n{}", plan); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java index 27a45e71a69c1..3ac1453e6ad8f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java @@ -28,8 +28,6 @@ import org.elasticsearch.index.cache.query.TrivialQueryCachingPolicy; import org.elasticsearch.index.mapper.MapperServiceTestCase; import org.elasticsearch.search.internal.ContextIndexSearcher; -import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.test.TestSearchContext; import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; @@ -150,7 +148,7 @@ private EsqlConfiguration config() { private EsPhysicalOperationProviders esPhysicalOperationProviders() throws IOException { int numShards = randomIntBetween(1, 1000); - List searchContexts = new ArrayList<>(numShards); + List shardContexts = new ArrayList<>(numShards); var searcher = new ContextIndexSearcher( reader(), IndexSearcher.getDefaultSimilarity(), @@ -159,12 +157,16 @@ private EsPhysicalOperationProviders esPhysicalOperationProviders() throws IOExc true ); for (int i = 0; i < numShards; i++) { - searchContexts.add( - new TestSearchContext(createSearchExecutionContext(createMapperService(mapping(b -> {})), searcher), null, searcher) + shardContexts.add( + new EsPhysicalOperationProviders.DefaultShardContext( + i, + createSearchExecutionContext(createMapperService(mapping(b -> {})), searcher), + null + ) ); } - releasables.addAll(searchContexts); - return new EsPhysicalOperationProviders(searchContexts); + releasables.add(searcher); + return new EsPhysicalOperationProviders(shardContexts); } private IndexReader reader() { From c11f3297208a3acb7532897ab5f5b6bcceaa1c6c Mon Sep 17 00:00:00 2001 From: Moritz Mack Date: Fri, 12 Jan 2024 16:16:27 +0100 Subject: [PATCH 16/35] Temporarily tolerate tracing.apm.agent.global_labels.XYZ settings (#104317) --- .../elasticsearch/telemetry/apm/internal/APMAgentSettings.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java index 12e81e7ae78e1..f2e6b6372c267 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java @@ -228,7 +228,7 @@ public void setAgentSetting(String key, String value) { return new Setting<>(qualifiedKey, "", (value) -> { if (qualifiedKey.equals("_na_") == false && PERMITTED_AGENT_KEYS.contains(key) == false) { // TODO figure out why those settings are kept, these should be reformatted / removed by now - if (key.startsWith("global_labels.")) { + if (qualifiedKey.startsWith("tracing.apm.agent.global_labels.")) { return value; } throw new IllegalArgumentException("Configuration [" + qualifiedKey + "] is either prohibited or unknown."); From abccea00d11aef2815e48cfadf786b1ae5e50c4e Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Fri, 12 Jan 2024 16:41:37 +0100 Subject: [PATCH 17/35] Move some test-only parsing code to test modules (#104261) None of this code needs to live in the prod classes, moving it over to the test code where its used. --- .../mustache/SearchTemplateResponse.java | 30 ---- .../mustache/SearchTemplateResponseTests.java | 26 +++- .../cluster/health/ClusterHealthResponse.java | 133 +++--------------- .../settings/get/GetSettingsResponse.java | 58 -------- .../action/bulk/BulkItemResponse.java | 99 +------------ .../action/bulk/BulkResponse.java | 50 +------ .../elasticsearch/action/get/GetResponse.java | 34 ----- .../action/ingest/GetPipelineResponse.java | 31 ---- .../ingest/SimulateDocumentVerboseResult.java | 19 --- .../action/search/ClearScrollResponse.java | 25 +--- .../health/ClusterHealthResponsesTests.java | 97 ++++++++++++- .../get/GetSettingsResponseTests.java | 55 +++++++- .../action/bulk/BulkItemResponseTests.java | 83 ++++++++++- .../action/bulk/BulkResponseTests.java | 42 +++++- .../action/get/GetResponseTests.java | 20 ++- .../ingest/GetPipelineResponseTests.java | 22 ++- .../SimulateDocumentVerboseResultTests.java | 20 ++- .../search/ClearScrollResponseTests.java | 26 +++- 18 files changed, 403 insertions(+), 467 deletions(-) diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java index 34e771c51e4f4..39da4066a7859 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java @@ -14,7 +14,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ChunkedToXContent; -import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.AbstractRefCounted; import org.elasticsearch.core.RefCounted; import org.elasticsearch.rest.RestStatus; @@ -22,14 +21,10 @@ import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentFactory; -import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; import java.io.IOException; import java.io.InputStream; -import java.util.Map; public class SearchTemplateResponse extends ActionResponse implements ToXContentObject { public static ParseField TEMPLATE_OUTPUT_FIELD = new ParseField("template_output"); @@ -108,31 +103,6 @@ public boolean hasReferences() { return refCounted.hasReferences(); } - public static SearchTemplateResponse fromXContent(XContentParser parser) throws IOException { - SearchTemplateResponse searchTemplateResponse = new SearchTemplateResponse(); - Map contentAsMap = parser.map(); - - if (contentAsMap.containsKey(TEMPLATE_OUTPUT_FIELD.getPreferredName())) { - Object source = contentAsMap.get(TEMPLATE_OUTPUT_FIELD.getPreferredName()); - XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON).value(source); - searchTemplateResponse.setSource(BytesReference.bytes(builder)); - } else { - XContentType contentType = parser.contentType(); - XContentBuilder builder = XContentFactory.contentBuilder(contentType).map(contentAsMap); - try ( - XContentParser searchResponseParser = XContentHelper.createParserNotCompressed( - XContentParserConfiguration.EMPTY.withRegistry(parser.getXContentRegistry()) - .withDeprecationHandler(parser.getDeprecationHandler()), - BytesReference.bytes(builder), - contentType - ) - ) { - searchTemplateResponse.setResponse(SearchResponse.fromXContent(searchResponseParser)); - } - } - return searchTemplateResponse; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java index 73c8887669a02..fce288db66620 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchResponseUtils; @@ -20,9 +21,11 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; import java.io.IOException; +import java.util.Map; import java.util.function.Predicate; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; @@ -42,7 +45,28 @@ protected SearchTemplateResponse createTestInstance() { @Override protected SearchTemplateResponse doParseInstance(XContentParser parser) throws IOException { - return SearchTemplateResponse.fromXContent(parser); + SearchTemplateResponse searchTemplateResponse = new SearchTemplateResponse(); + Map contentAsMap = parser.map(); + + if (contentAsMap.containsKey(SearchTemplateResponse.TEMPLATE_OUTPUT_FIELD.getPreferredName())) { + Object source = contentAsMap.get(SearchTemplateResponse.TEMPLATE_OUTPUT_FIELD.getPreferredName()); + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON).value(source); + searchTemplateResponse.setSource(BytesReference.bytes(builder)); + } else { + XContentType contentType = parser.contentType(); + XContentBuilder builder = XContentFactory.contentBuilder(contentType).map(contentAsMap); + try ( + XContentParser searchResponseParser = XContentHelper.createParserNotCompressed( + XContentParserConfiguration.EMPTY.withRegistry(parser.getXContentRegistry()) + .withDeprecationHandler(parser.getDeprecationHandler()), + BytesReference.bytes(builder), + contentType + ) + ) { + searchTemplateResponse.setResponse(SearchResponse.fromXContent(searchResponseParser)); + } + } + return searchTemplateResponse; } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponse.java index 19e84e7443eed..e7e2679e84eb5 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponse.java @@ -17,132 +17,35 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.util.Maps; import org.elasticsearch.core.TimeValue; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; -import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Objects; -import static java.util.Collections.emptyMap; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - public class ClusterHealthResponse extends ActionResponse implements ToXContentObject { - private static final String CLUSTER_NAME = "cluster_name"; - private static final String STATUS = "status"; - private static final String TIMED_OUT = "timed_out"; - private static final String NUMBER_OF_NODES = "number_of_nodes"; - private static final String NUMBER_OF_DATA_NODES = "number_of_data_nodes"; - private static final String NUMBER_OF_PENDING_TASKS = "number_of_pending_tasks"; - private static final String NUMBER_OF_IN_FLIGHT_FETCH = "number_of_in_flight_fetch"; - private static final String DELAYED_UNASSIGNED_SHARDS = "delayed_unassigned_shards"; + static final String CLUSTER_NAME = "cluster_name"; + static final String STATUS = "status"; + static final String TIMED_OUT = "timed_out"; + static final String NUMBER_OF_NODES = "number_of_nodes"; + static final String NUMBER_OF_DATA_NODES = "number_of_data_nodes"; + static final String NUMBER_OF_PENDING_TASKS = "number_of_pending_tasks"; + static final String NUMBER_OF_IN_FLIGHT_FETCH = "number_of_in_flight_fetch"; + static final String DELAYED_UNASSIGNED_SHARDS = "delayed_unassigned_shards"; private static final String TASK_MAX_WAIT_TIME_IN_QUEUE = "task_max_waiting_in_queue"; - private static final String TASK_MAX_WAIT_TIME_IN_QUEUE_IN_MILLIS = "task_max_waiting_in_queue_millis"; - private static final String ACTIVE_SHARDS_PERCENT_AS_NUMBER = "active_shards_percent_as_number"; + static final String TASK_MAX_WAIT_TIME_IN_QUEUE_IN_MILLIS = "task_max_waiting_in_queue_millis"; + static final String ACTIVE_SHARDS_PERCENT_AS_NUMBER = "active_shards_percent_as_number"; private static final String ACTIVE_SHARDS_PERCENT = "active_shards_percent"; - private static final String ACTIVE_PRIMARY_SHARDS = "active_primary_shards"; - private static final String ACTIVE_SHARDS = "active_shards"; - private static final String RELOCATING_SHARDS = "relocating_shards"; - private static final String INITIALIZING_SHARDS = "initializing_shards"; - private static final String UNASSIGNED_SHARDS = "unassigned_shards"; - private static final String INDICES = "indices"; - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "cluster_health_response", - true, - parsedObjects -> { - int i = 0; - // ClusterStateHealth fields - int numberOfNodes = (int) parsedObjects[i++]; - int numberOfDataNodes = (int) parsedObjects[i++]; - int activeShards = (int) parsedObjects[i++]; - int relocatingShards = (int) parsedObjects[i++]; - int activePrimaryShards = (int) parsedObjects[i++]; - int initializingShards = (int) parsedObjects[i++]; - int unassignedShards = (int) parsedObjects[i++]; - double activeShardsPercent = (double) parsedObjects[i++]; - String statusStr = (String) parsedObjects[i++]; - ClusterHealthStatus status = ClusterHealthStatus.fromString(statusStr); - @SuppressWarnings("unchecked") - List indexList = (List) parsedObjects[i++]; - final Map indices; - if (indexList == null || indexList.isEmpty()) { - indices = emptyMap(); - } else { - indices = Maps.newMapWithExpectedSize(indexList.size()); - for (ClusterIndexHealth indexHealth : indexList) { - indices.put(indexHealth.getIndex(), indexHealth); - } - } - ClusterStateHealth stateHealth = new ClusterStateHealth( - activePrimaryShards, - activeShards, - relocatingShards, - initializingShards, - unassignedShards, - numberOfNodes, - numberOfDataNodes, - activeShardsPercent, - status, - indices - ); - - // ClusterHealthResponse fields - String clusterName = (String) parsedObjects[i++]; - int numberOfPendingTasks = (int) parsedObjects[i++]; - int numberOfInFlightFetch = (int) parsedObjects[i++]; - int delayedUnassignedShards = (int) parsedObjects[i++]; - long taskMaxWaitingTimeMillis = (long) parsedObjects[i++]; - boolean timedOut = (boolean) parsedObjects[i]; - return new ClusterHealthResponse( - clusterName, - numberOfPendingTasks, - numberOfInFlightFetch, - delayedUnassignedShards, - TimeValue.timeValueMillis(taskMaxWaitingTimeMillis), - timedOut, - stateHealth - ); - } - ); - - private static final ObjectParser.NamedObjectParser INDEX_PARSER = ( - XContentParser parser, - Void context, - String index) -> ClusterIndexHealth.innerFromXContent(parser, index); - - static { - // ClusterStateHealth fields - PARSER.declareInt(constructorArg(), new ParseField(NUMBER_OF_NODES)); - PARSER.declareInt(constructorArg(), new ParseField(NUMBER_OF_DATA_NODES)); - PARSER.declareInt(constructorArg(), new ParseField(ACTIVE_SHARDS)); - PARSER.declareInt(constructorArg(), new ParseField(RELOCATING_SHARDS)); - PARSER.declareInt(constructorArg(), new ParseField(ACTIVE_PRIMARY_SHARDS)); - PARSER.declareInt(constructorArg(), new ParseField(INITIALIZING_SHARDS)); - PARSER.declareInt(constructorArg(), new ParseField(UNASSIGNED_SHARDS)); - PARSER.declareDouble(constructorArg(), new ParseField(ACTIVE_SHARDS_PERCENT_AS_NUMBER)); - PARSER.declareString(constructorArg(), new ParseField(STATUS)); - // Can be absent if LEVEL == 'cluster' - PARSER.declareNamedObjects(optionalConstructorArg(), INDEX_PARSER, new ParseField(INDICES)); - - // ClusterHealthResponse fields - PARSER.declareString(constructorArg(), new ParseField(CLUSTER_NAME)); - PARSER.declareInt(constructorArg(), new ParseField(NUMBER_OF_PENDING_TASKS)); - PARSER.declareInt(constructorArg(), new ParseField(NUMBER_OF_IN_FLIGHT_FETCH)); - PARSER.declareInt(constructorArg(), new ParseField(DELAYED_UNASSIGNED_SHARDS)); - PARSER.declareLong(constructorArg(), new ParseField(TASK_MAX_WAIT_TIME_IN_QUEUE_IN_MILLIS)); - PARSER.declareBoolean(constructorArg(), new ParseField(TIMED_OUT)); - } + static final String ACTIVE_PRIMARY_SHARDS = "active_primary_shards"; + static final String ACTIVE_SHARDS = "active_shards"; + static final String RELOCATING_SHARDS = "relocating_shards"; + static final String INITIALIZING_SHARDS = "initializing_shards"; + static final String UNASSIGNED_SHARDS = "unassigned_shards"; + static final String INDICES = "indices"; private String clusterName; private int numberOfPendingTasks = 0; @@ -370,10 +273,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - public static ClusterHealthResponse fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsResponse.java index ec3a5f71c3a48..8a106d1b43d3e 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsResponse.java @@ -15,15 +15,12 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; -import org.elasticsearch.common.xcontent.XContentParserUtils; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.json.JsonXContent; import java.io.ByteArrayOutputStream; import java.io.IOException; -import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Objects; @@ -91,61 +88,6 @@ public void writeTo(StreamOutput out) throws IOException { out.writeMap(indexToDefaultSettings, StreamOutput::writeWriteable); } - private static void parseSettingsField( - XContentParser parser, - String currentIndexName, - Map indexToSettings, - Map indexToDefaultSettings - ) throws IOException { - - if (parser.currentToken() == XContentParser.Token.START_OBJECT) { - switch (parser.currentName()) { - case "settings" -> indexToSettings.put(currentIndexName, Settings.fromXContent(parser)); - case "defaults" -> indexToDefaultSettings.put(currentIndexName, Settings.fromXContent(parser)); - default -> parser.skipChildren(); - } - } else if (parser.currentToken() == XContentParser.Token.START_ARRAY) { - parser.skipChildren(); - } - parser.nextToken(); - } - - private static void parseIndexEntry( - XContentParser parser, - Map indexToSettings, - Map indexToDefaultSettings - ) throws IOException { - String indexName = parser.currentName(); - parser.nextToken(); - while (parser.isClosed() == false && parser.currentToken() != XContentParser.Token.END_OBJECT) { - parseSettingsField(parser, indexName, indexToSettings, indexToDefaultSettings); - } - } - - public static GetSettingsResponse fromXContent(XContentParser parser) throws IOException { - HashMap indexToSettings = new HashMap<>(); - HashMap indexToDefaultSettings = new HashMap<>(); - - if (parser.currentToken() == null) { - parser.nextToken(); - } - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); - parser.nextToken(); - - while (parser.isClosed() == false) { - if (parser.currentToken() == XContentParser.Token.START_OBJECT) { - // we must assume this is an index entry - parseIndexEntry(parser, indexToSettings, indexToDefaultSettings); - } else if (parser.currentToken() == XContentParser.Token.START_ARRAY) { - parser.skipChildren(); - } else { - parser.nextToken(); - } - } - - return new GetSettingsResponse(Map.copyOf(indexToSettings), Map.copyOf(indexToDefaultSettings)); - } - @Override public String toString() { try { diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkItemResponse.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkItemResponse.java index c2b6c666d829a..151e8795d0f82 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkItemResponse.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkItemResponse.java @@ -21,26 +21,17 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; -import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownField; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - /** * Represents a single item response for an action executed as part of the bulk API. Holds the index/type/id * of the relevant action, and if it has failed or not (with the failure message in case it failed). @@ -49,8 +40,8 @@ public class BulkItemResponse implements Writeable, ToXContentObject { private static final String _INDEX = "_index"; private static final String _ID = "_id"; - private static final String STATUS = "status"; - private static final String ERROR = "error"; + static final String STATUS = "status"; + static final String ERROR = "error"; public RestStatus status() { return failure == null ? response.status() : failure.getStatus(); @@ -80,80 +71,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - /** - * Reads a {@link BulkItemResponse} from a {@link XContentParser}. - * - * @param parser the {@link XContentParser} - * @param id the id to assign to the parsed {@link BulkItemResponse}. It is usually the index of - * the item in the {@link BulkResponse#getItems} array. - */ - public static BulkItemResponse fromXContent(XContentParser parser, int id) throws IOException { - ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); - - XContentParser.Token token = parser.nextToken(); - ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); - - String currentFieldName = parser.currentName(); - token = parser.nextToken(); - - final OpType opType = OpType.fromString(currentFieldName); - ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser); - - DocWriteResponse.Builder builder = null; - CheckedConsumer itemParser = null; - - if (opType == OpType.INDEX || opType == OpType.CREATE) { - final IndexResponse.Builder indexResponseBuilder = new IndexResponse.Builder(); - builder = indexResponseBuilder; - itemParser = (indexParser) -> IndexResponse.parseXContentFields(indexParser, indexResponseBuilder); - - } else if (opType == OpType.UPDATE) { - final UpdateResponse.Builder updateResponseBuilder = new UpdateResponse.Builder(); - builder = updateResponseBuilder; - itemParser = (updateParser) -> UpdateResponse.parseXContentFields(updateParser, updateResponseBuilder); - - } else if (opType == OpType.DELETE) { - final DeleteResponse.Builder deleteResponseBuilder = new DeleteResponse.Builder(); - builder = deleteResponseBuilder; - itemParser = (deleteParser) -> DeleteResponse.parseXContentFields(deleteParser, deleteResponseBuilder); - } else { - throwUnknownField(currentFieldName, parser); - } - - RestStatus status = null; - ElasticsearchException exception = null; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } - - if (ERROR.equals(currentFieldName)) { - if (token == XContentParser.Token.START_OBJECT) { - exception = ElasticsearchException.fromXContent(parser); - } - } else if (STATUS.equals(currentFieldName)) { - if (token == XContentParser.Token.VALUE_NUMBER) { - status = RestStatus.fromCode(parser.intValue()); - } - } else { - itemParser.accept(parser); - } - } - - ensureExpectedToken(XContentParser.Token.END_OBJECT, token, parser); - token = parser.nextToken(); - ensureExpectedToken(XContentParser.Token.END_OBJECT, token, parser); - - BulkItemResponse bulkItemResponse; - if (exception != null) { - Failure failure = new Failure(builder.getShardId().getIndexName(), builder.getId(), exception, status); - bulkItemResponse = BulkItemResponse.failure(id, opType, failure); - } else { - bulkItemResponse = BulkItemResponse.success(id, opType, builder.build()); - } - return bulkItemResponse; - } - /** * Represents a failure. */ @@ -171,18 +88,6 @@ public static class Failure implements Writeable, ToXContentFragment { private final long term; private final boolean aborted; - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "bulk_failures", - true, - a -> new Failure((String) a[0], (String) a[1], (Exception) a[2], RestStatus.fromCode((int) a[3])) - ); - static { - PARSER.declareString(constructorArg(), new ParseField(INDEX_FIELD)); - PARSER.declareString(optionalConstructorArg(), new ParseField(ID_FIELD)); - PARSER.declareObject(constructorArg(), (p, c) -> ElasticsearchException.fromXContent(p), new ParseField(CAUSE_FIELD)); - PARSER.declareInt(constructorArg(), new ParseField(STATUS_FIELD)); - } - /** * For write failures before operation was assigned a sequence number. * diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkResponse.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkResponse.java index 2065a31ce5566..111dbfb0f7af6 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkResponse.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkResponse.java @@ -15,16 +15,9 @@ import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; -import java.util.ArrayList; import java.util.Iterator; -import java.util.List; - -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; -import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownField; -import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownToken; /** * A response of a bulk execution. Holding a response for each item responding (in order) of the @@ -33,10 +26,10 @@ */ public class BulkResponse extends ActionResponse implements Iterable, ChunkedToXContentObject { - private static final String ITEMS = "items"; - private static final String ERRORS = "errors"; - private static final String TOOK = "took"; - private static final String INGEST_TOOK = "ingest_took"; + static final String ITEMS = "items"; + static final String ERRORS = "errors"; + static final String TOOK = "took"; + static final String INGEST_TOOK = "ingest_took"; public static final long NO_INGEST_TOOK = -1L; @@ -133,41 +126,6 @@ public void writeTo(StreamOutput out) throws IOException { out.writeZLong(ingestTookInMillis); } - public static BulkResponse fromXContent(XContentParser parser) throws IOException { - XContentParser.Token token = parser.nextToken(); - ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser); - - long took = -1L; - long ingestTook = NO_INGEST_TOOK; - List items = new ArrayList<>(); - - String currentFieldName = parser.currentName(); - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else if (token.isValue()) { - if (TOOK.equals(currentFieldName)) { - took = parser.longValue(); - } else if (INGEST_TOOK.equals(currentFieldName)) { - ingestTook = parser.longValue(); - } else if (ERRORS.equals(currentFieldName) == false) { - throwUnknownField(currentFieldName, parser); - } - } else if (token == XContentParser.Token.START_ARRAY) { - if (ITEMS.equals(currentFieldName)) { - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - items.add(BulkItemResponse.fromXContent(parser, items.size())); - } - } else { - throwUnknownField(currentFieldName, parser); - } - } else { - throwUnknownToken(token, parser); - } - } - return new BulkResponse(items.toArray(new BulkItemResponse[items.size()]), took, ingestTook); - } - @Override public Iterator toXContentChunked(ToXContent.Params params) { return Iterators.concat(Iterators.single((builder, p) -> { diff --git a/server/src/main/java/org/elasticsearch/action/get/GetResponse.java b/server/src/main/java/org/elasticsearch/action/get/GetResponse.java index 6871c60f11a15..5b407d0ebceb0 100644 --- a/server/src/main/java/org/elasticsearch/action/get/GetResponse.java +++ b/server/src/main/java/org/elasticsearch/action/get/GetResponse.java @@ -10,7 +10,6 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.document.DocumentField; @@ -19,11 +18,9 @@ import org.elasticsearch.index.get.GetResult; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.Iterator; -import java.util.Locale; import java.util.Map; import java.util.Objects; @@ -149,37 +146,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return getResult.toXContent(builder, params); } - /** - * This method can be used to parse a {@link GetResponse} object when it has been printed out - * as a xcontent using the {@link #toXContent(XContentBuilder, Params)} method. - *

- * For forward compatibility reason this method might not fail if it tries to parse a field it - * doesn't know. But before returning the result it will check that enough information were - * parsed to return a valid {@link GetResponse} instance and throws a {@link ParsingException} - * otherwise. This is the case when we get a 404 back, which can be parsed as a normal - * {@link GetResponse} with found set to false, or as an elasticsearch exception. The caller - * of this method needs a way to figure out whether we got back a valid get response, which - * can be done by catching ParsingException. - * - * @param parser {@link XContentParser} to parse the response from - * @return a {@link GetResponse} - * @throws IOException is an I/O exception occurs during the parsing - */ - public static GetResponse fromXContent(XContentParser parser) throws IOException { - GetResult getResult = GetResult.fromXContent(parser); - - // At this stage we ensure that we parsed enough information to return - // a valid GetResponse instance. If it's not the case, we throw an - // exception so that callers know it and can handle it correctly. - if (getResult.getIndex() == null && getResult.getId() == null) { - throw new ParsingException( - parser.getTokenLocation(), - String.format(Locale.ROOT, "Missing required fields [%s,%s]", GetResult._INDEX, GetResult._ID) - ); - } - return new GetResponse(getResult); - } - @Override public void writeTo(StreamOutput out) throws IOException { getResult.writeTo(out); diff --git a/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineResponse.java b/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineResponse.java index bc9c88a706f30..c685a49cddf2f 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineResponse.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineResponse.java @@ -10,15 +10,12 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.ingest.PipelineConfiguration; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xcontent.XContentParser.Token; import java.io.IOException; import java.util.ArrayList; @@ -27,8 +24,6 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; - public class GetPipelineResponse extends ActionResponse implements ToXContentObject { private final List pipelines; @@ -90,32 +85,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - /** - * - * @param parser the parser for the XContent that contains the serialized GetPipelineResponse. - * @return an instance of GetPipelineResponse read from the parser - * @throws IOException If the parsing fails - */ - public static GetPipelineResponse fromXContent(XContentParser parser) throws IOException { - ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - List pipelines = new ArrayList<>(); - while (parser.nextToken().equals(Token.FIELD_NAME)) { - String pipelineId = parser.currentName(); - parser.nextToken(); - try (XContentBuilder contentBuilder = XContentBuilder.builder(parser.contentType().xContent())) { - contentBuilder.generator().copyCurrentStructure(parser); - PipelineConfiguration pipeline = new PipelineConfiguration( - pipelineId, - BytesReference.bytes(contentBuilder), - contentBuilder.contentType() - ); - pipelines.add(pipeline); - } - } - ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.currentToken(), parser); - return new GetPipelineResponse(pipelines); - } - @Override public boolean equals(Object other) { if (other == null) { diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResult.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResult.java index 850ff50cd0187..28aec1ee0ebb8 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResult.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResult.java @@ -9,17 +9,12 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; import java.util.List; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - /** * Holds the result of what a pipeline did to a sample document via the simulate api, but instead of {@link SimulateDocumentBaseResult} * this result class holds the intermediate result each processor did to the sample document. @@ -28,16 +23,6 @@ public final class SimulateDocumentVerboseResult implements SimulateDocumentResu public static final String PROCESSOR_RESULT_FIELD = "processor_results"; private final List processorResults; - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "simulate_document_verbose_result", - true, - a -> new SimulateDocumentVerboseResult((List) a[0]) - ); - static { - PARSER.declareObjectArray(constructorArg(), SimulateProcessorResult.PARSER, new ParseField(PROCESSOR_RESULT_FIELD)); - } - public SimulateDocumentVerboseResult(List processorResults) { this.processorResults = processorResults; } @@ -73,8 +58,4 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.endObject(); return builder; } - - public static SimulateDocumentVerboseResult fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } } diff --git a/server/src/main/java/org/elasticsearch/action/search/ClearScrollResponse.java b/server/src/main/java/org/elasticsearch/action/search/ClearScrollResponse.java index 8b1116951df82..3d00d18565756 100644 --- a/server/src/main/java/org/elasticsearch/action/search/ClearScrollResponse.java +++ b/server/src/main/java/org/elasticsearch/action/search/ClearScrollResponse.java @@ -12,33 +12,19 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import static org.elasticsearch.rest.RestStatus.NOT_FOUND; import static org.elasticsearch.rest.RestStatus.OK; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; public class ClearScrollResponse extends ActionResponse implements ToXContentObject { - private static final ParseField SUCCEEDED = new ParseField("succeeded"); - private static final ParseField NUMFREED = new ParseField("num_freed"); - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "clear_scroll", - true, - a -> new ClosePointInTimeResponse((boolean) a[0], (int) a[1]) - ); - static { - PARSER.declareField(constructorArg(), (parser, context) -> parser.booleanValue(), SUCCEEDED, ObjectParser.ValueType.BOOLEAN); - PARSER.declareField(constructorArg(), (parser, context) -> parser.intValue(), NUMFREED, ObjectParser.ValueType.INT); - } + public static final ParseField SUCCEEDED = new ParseField("succeeded"); + public static final ParseField NUMFREED = new ParseField("num_freed"); private final boolean succeeded; private final int numFreed; @@ -82,13 +68,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - /** - * Parse the clear scroll response body into a new {@link ClearScrollResponse} object - */ - public static ClosePointInTimeResponse fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - @Override public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(succeeded); diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponsesTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponsesTests.java index 0f4c01c674b1a..d4231c9f7538b 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponsesTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponsesTests.java @@ -23,23 +23,118 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.AbstractXContentSerializingTestCase; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; +import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentParser; import org.hamcrest.Matchers; import java.io.IOException; import java.util.Collections; +import java.util.List; import java.util.Locale; import java.util.Map; import java.util.function.Predicate; import java.util.regex.Pattern; +import static java.util.Collections.emptyMap; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; import static org.hamcrest.CoreMatchers.allOf; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThanOrEqualTo; public class ClusterHealthResponsesTests extends AbstractXContentSerializingTestCase { + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "cluster_health_response", + true, + parsedObjects -> { + int i = 0; + // ClusterStateHealth fields + int numberOfNodes = (int) parsedObjects[i++]; + int numberOfDataNodes = (int) parsedObjects[i++]; + int activeShards = (int) parsedObjects[i++]; + int relocatingShards = (int) parsedObjects[i++]; + int activePrimaryShards = (int) parsedObjects[i++]; + int initializingShards = (int) parsedObjects[i++]; + int unassignedShards = (int) parsedObjects[i++]; + double activeShardsPercent = (double) parsedObjects[i++]; + String statusStr = (String) parsedObjects[i++]; + ClusterHealthStatus status = ClusterHealthStatus.fromString(statusStr); + @SuppressWarnings("unchecked") + List indexList = (List) parsedObjects[i++]; + final Map indices; + if (indexList == null || indexList.isEmpty()) { + indices = emptyMap(); + } else { + indices = Maps.newMapWithExpectedSize(indexList.size()); + for (ClusterIndexHealth indexHealth : indexList) { + indices.put(indexHealth.getIndex(), indexHealth); + } + } + ClusterStateHealth stateHealth = new ClusterStateHealth( + activePrimaryShards, + activeShards, + relocatingShards, + initializingShards, + unassignedShards, + numberOfNodes, + numberOfDataNodes, + activeShardsPercent, + status, + indices + ); + + // ClusterHealthResponse fields + String clusterName = (String) parsedObjects[i++]; + int numberOfPendingTasks = (int) parsedObjects[i++]; + int numberOfInFlightFetch = (int) parsedObjects[i++]; + int delayedUnassignedShards = (int) parsedObjects[i++]; + long taskMaxWaitingTimeMillis = (long) parsedObjects[i++]; + boolean timedOut = (boolean) parsedObjects[i]; + return new ClusterHealthResponse( + clusterName, + numberOfPendingTasks, + numberOfInFlightFetch, + delayedUnassignedShards, + TimeValue.timeValueMillis(taskMaxWaitingTimeMillis), + timedOut, + stateHealth + ); + } + ); + + private static final ObjectParser.NamedObjectParser INDEX_PARSER = ( + XContentParser parser, + Void context, + String index) -> ClusterIndexHealth.innerFromXContent(parser, index); + + static { + // ClusterStateHealth fields + PARSER.declareInt(constructorArg(), new ParseField(ClusterHealthResponse.NUMBER_OF_NODES)); + PARSER.declareInt(constructorArg(), new ParseField(ClusterHealthResponse.NUMBER_OF_DATA_NODES)); + PARSER.declareInt(constructorArg(), new ParseField(ClusterHealthResponse.ACTIVE_SHARDS)); + PARSER.declareInt(constructorArg(), new ParseField(ClusterHealthResponse.RELOCATING_SHARDS)); + PARSER.declareInt(constructorArg(), new ParseField(ClusterHealthResponse.ACTIVE_PRIMARY_SHARDS)); + PARSER.declareInt(constructorArg(), new ParseField(ClusterHealthResponse.INITIALIZING_SHARDS)); + PARSER.declareInt(constructorArg(), new ParseField(ClusterHealthResponse.UNASSIGNED_SHARDS)); + PARSER.declareDouble(constructorArg(), new ParseField(ClusterHealthResponse.ACTIVE_SHARDS_PERCENT_AS_NUMBER)); + PARSER.declareString(constructorArg(), new ParseField(ClusterHealthResponse.STATUS)); + // Can be absent if LEVEL == 'cluster' + PARSER.declareNamedObjects(optionalConstructorArg(), INDEX_PARSER, new ParseField(ClusterHealthResponse.INDICES)); + + // ClusterHealthResponse fields + PARSER.declareString(constructorArg(), new ParseField(ClusterHealthResponse.CLUSTER_NAME)); + PARSER.declareInt(constructorArg(), new ParseField(ClusterHealthResponse.NUMBER_OF_PENDING_TASKS)); + PARSER.declareInt(constructorArg(), new ParseField(ClusterHealthResponse.NUMBER_OF_IN_FLIGHT_FETCH)); + PARSER.declareInt(constructorArg(), new ParseField(ClusterHealthResponse.DELAYED_UNASSIGNED_SHARDS)); + PARSER.declareLong(constructorArg(), new ParseField(ClusterHealthResponse.TASK_MAX_WAIT_TIME_IN_QUEUE_IN_MILLIS)); + PARSER.declareBoolean(constructorArg(), new ParseField(ClusterHealthResponse.TIMED_OUT)); + } + private final ClusterStatsLevel level = randomFrom(ClusterStatsLevel.values()); public void testIsTimeout() { @@ -102,7 +197,7 @@ ClusterHealthResponse maybeSerialize(ClusterHealthResponse clusterHealth) throws @Override protected ClusterHealthResponse doParseInstance(XContentParser parser) { - return ClusterHealthResponse.fromXContent(parser); + return PARSER.apply(parser, null); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsResponseTests.java index 6c9297bb41ae0..86968bda62d91 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsResponseTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentParserUtils; import org.elasticsearch.index.RandomCreateIndexGenerator; import org.elasticsearch.test.AbstractChunkedSerializingTestCase; import org.elasticsearch.xcontent.XContentParser; @@ -18,6 +19,7 @@ import java.io.IOException; import java.util.HashMap; import java.util.HashSet; +import java.util.Map; import java.util.Set; import java.util.function.Predicate; @@ -70,7 +72,58 @@ protected Writeable.Reader instanceReader() { @Override protected GetSettingsResponse doParseInstance(XContentParser parser) throws IOException { - return GetSettingsResponse.fromXContent(parser); + HashMap indexToSettings = new HashMap<>(); + HashMap indexToDefaultSettings = new HashMap<>(); + + if (parser.currentToken() == null) { + parser.nextToken(); + } + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); + parser.nextToken(); + + while (parser.isClosed() == false) { + if (parser.currentToken() == XContentParser.Token.START_OBJECT) { + // we must assume this is an index entry + parseIndexEntry(parser, indexToSettings, indexToDefaultSettings); + } else if (parser.currentToken() == XContentParser.Token.START_ARRAY) { + parser.skipChildren(); + } else { + parser.nextToken(); + } + } + + return new GetSettingsResponse(Map.copyOf(indexToSettings), Map.copyOf(indexToDefaultSettings)); + } + + private static void parseIndexEntry( + XContentParser parser, + Map indexToSettings, + Map indexToDefaultSettings + ) throws IOException { + String indexName = parser.currentName(); + parser.nextToken(); + while (parser.isClosed() == false && parser.currentToken() != XContentParser.Token.END_OBJECT) { + parseSettingsField(parser, indexName, indexToSettings, indexToDefaultSettings); + } + } + + private static void parseSettingsField( + XContentParser parser, + String currentIndexName, + Map indexToSettings, + Map indexToDefaultSettings + ) throws IOException { + + if (parser.currentToken() == XContentParser.Token.START_OBJECT) { + switch (parser.currentName()) { + case "settings" -> indexToSettings.put(currentIndexName, Settings.fromXContent(parser)); + case "defaults" -> indexToDefaultSettings.put(currentIndexName, Settings.fromXContent(parser)); + default -> parser.skipChildren(); + } + } else if (parser.currentToken() == XContentParser.Token.START_ARRAY) { + parser.skipChildren(); + } + parser.nextToken(); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkItemResponseTests.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkItemResponseTests.java index ccf9681d3680b..76b1fa0011540 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkItemResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkItemResponseTests.java @@ -13,15 +13,18 @@ import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.bulk.BulkItemResponse.Failure; +import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.delete.DeleteResponseTests; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.index.IndexResponseTests; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.action.update.UpdateResponseTests; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -34,6 +37,8 @@ import static org.elasticsearch.ElasticsearchExceptionTests.assertDeepEquals; import static org.elasticsearch.ElasticsearchExceptionTests.randomExceptions; +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; +import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownField; import static org.hamcrest.Matchers.containsString; public class BulkItemResponseTests extends ESTestCase { @@ -93,7 +98,7 @@ public void testToAndFromXContent() throws IOException { BulkItemResponse parsedBulkItemResponse; try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) { assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); - parsedBulkItemResponse = BulkItemResponse.fromXContent(parser, bulkItemId); + parsedBulkItemResponse = itemResponseFromXContent(parser, bulkItemId); assertNull(parser.nextToken()); } assertBulkItemResponse(expectedBulkItemResponse, parsedBulkItemResponse); @@ -127,7 +132,7 @@ public void testFailureToAndFromXContent() throws IOException { BulkItemResponse parsedBulkItemResponse; try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) { assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); - parsedBulkItemResponse = BulkItemResponse.fromXContent(parser, itemId); + parsedBulkItemResponse = itemResponseFromXContent(parser, itemId); assertNull(parser.nextToken()); } assertBulkItemResponse(expectedBulkItemResponse, parsedBulkItemResponse); @@ -161,4 +166,78 @@ public static void assertBulkItemResponse(BulkItemResponse expected, BulkItemRes } } } + + /** + * Reads a {@link BulkItemResponse} from a {@link XContentParser}. + * + * @param parser the {@link XContentParser} + * @param id the id to assign to the parsed {@link BulkItemResponse}. It is usually the index of + * the item in the {@link BulkResponse#getItems} array. + */ + public static BulkItemResponse itemResponseFromXContent(XContentParser parser, int id) throws IOException { + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); + + XContentParser.Token token = parser.nextToken(); + ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); + + String currentFieldName = parser.currentName(); + token = parser.nextToken(); + + final DocWriteRequest.OpType opType = DocWriteRequest.OpType.fromString(currentFieldName); + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser); + + DocWriteResponse.Builder builder = null; + CheckedConsumer itemParser = null; + + if (opType == DocWriteRequest.OpType.INDEX || opType == DocWriteRequest.OpType.CREATE) { + final IndexResponse.Builder indexResponseBuilder = new IndexResponse.Builder(); + builder = indexResponseBuilder; + itemParser = (indexParser) -> IndexResponse.parseXContentFields(indexParser, indexResponseBuilder); + + } else if (opType == DocWriteRequest.OpType.UPDATE) { + final UpdateResponse.Builder updateResponseBuilder = new UpdateResponse.Builder(); + builder = updateResponseBuilder; + itemParser = (updateParser) -> UpdateResponse.parseXContentFields(updateParser, updateResponseBuilder); + + } else if (opType == DocWriteRequest.OpType.DELETE) { + final DeleteResponse.Builder deleteResponseBuilder = new DeleteResponse.Builder(); + builder = deleteResponseBuilder; + itemParser = (deleteParser) -> DeleteResponse.parseXContentFields(deleteParser, deleteResponseBuilder); + } else { + throwUnknownField(currentFieldName, parser); + } + + RestStatus status = null; + ElasticsearchException exception = null; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } + + if (BulkItemResponse.ERROR.equals(currentFieldName)) { + if (token == XContentParser.Token.START_OBJECT) { + exception = ElasticsearchException.fromXContent(parser); + } + } else if (BulkItemResponse.STATUS.equals(currentFieldName)) { + if (token == XContentParser.Token.VALUE_NUMBER) { + status = RestStatus.fromCode(parser.intValue()); + } + } else { + itemParser.accept(parser); + } + } + + ensureExpectedToken(XContentParser.Token.END_OBJECT, token, parser); + token = parser.nextToken(); + ensureExpectedToken(XContentParser.Token.END_OBJECT, token, parser); + + BulkItemResponse bulkItemResponse; + if (exception != null) { + Failure failure = new Failure(builder.getShardId().getIndexName(), builder.getId(), exception, status); + bulkItemResponse = BulkItemResponse.failure(id, opType, failure); + } else { + bulkItemResponse = BulkItemResponse.success(id, opType, builder.build()); + } + return bulkItemResponse; + } } diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkResponseTests.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkResponseTests.java index c1cd88e0864a4..366196b6a0eac 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkResponseTests.java @@ -24,11 +24,16 @@ import org.elasticsearch.xcontent.XContentType; import java.io.IOException; +import java.util.ArrayList; +import java.util.List; import static org.elasticsearch.ElasticsearchExceptionTests.randomExceptions; import static org.elasticsearch.action.bulk.BulkItemResponseTests.assertBulkItemResponse; import static org.elasticsearch.action.bulk.BulkResponse.NO_INGEST_TOOK; import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; +import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownField; +import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownToken; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; import static org.hamcrest.Matchers.equalTo; @@ -78,7 +83,7 @@ public void testToAndFromXContent() throws IOException { BulkResponse parsedBulkResponse; try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) { - parsedBulkResponse = BulkResponse.fromXContent(parser); + parsedBulkResponse = fromXContent(parser); assertNull(parser.nextToken()); } @@ -154,4 +159,39 @@ public void testToXContentPlacesErrorsFirst() throws IOException { } return randomDocWriteResponses; } + + private static BulkResponse fromXContent(XContentParser parser) throws IOException { + XContentParser.Token token = parser.nextToken(); + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser); + + long took = -1L; + long ingestTook = NO_INGEST_TOOK; + List items = new ArrayList<>(); + + String currentFieldName = parser.currentName(); + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token.isValue()) { + if (BulkResponse.TOOK.equals(currentFieldName)) { + took = parser.longValue(); + } else if (BulkResponse.INGEST_TOOK.equals(currentFieldName)) { + ingestTook = parser.longValue(); + } else if (BulkResponse.ERRORS.equals(currentFieldName) == false) { + throwUnknownField(currentFieldName, parser); + } + } else if (token == XContentParser.Token.START_ARRAY) { + if (BulkResponse.ITEMS.equals(currentFieldName)) { + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + items.add(BulkItemResponseTests.itemResponseFromXContent(parser, items.size())); + } + } else { + throwUnknownField(currentFieldName, parser); + } + } else { + throwUnknownToken(token, parser); + } + } + return new BulkResponse(items.toArray(new BulkItemResponse[items.size()]), took, ingestTook); + } } diff --git a/server/src/test/java/org/elasticsearch/action/get/GetResponseTests.java b/server/src/test/java/org/elasticsearch/action/get/GetResponseTests.java index ab72bf17beca9..49697101b3234 100644 --- a/server/src/test/java/org/elasticsearch/action/get/GetResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/get/GetResponseTests.java @@ -23,6 +23,7 @@ import java.io.IOException; import java.util.Collections; +import java.util.Locale; import java.util.function.Predicate; import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; @@ -69,7 +70,7 @@ private void doFromXContentTestWithRandomFields(boolean addRandomFields) throws } GetResponse parsedGetResponse; try (XContentParser parser = createParser(xContentType.xContent(), mutated)) { - parsedGetResponse = GetResponse.fromXContent(parser); + parsedGetResponse = parseInstance(parser); assertNull(parser.nextToken()); } assertEquals(expectedGetResponse.getSourceAsMap(), parsedGetResponse.getSourceAsMap()); @@ -172,7 +173,7 @@ public void testFromXContentThrowsParsingException() throws IOException { BytesReference originalBytes = toShuffledXContent(getResponse, xContentType, ToXContent.EMPTY_PARAMS, randomBoolean()); try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) { - ParsingException exception = expectThrows(ParsingException.class, () -> GetResponse.fromXContent(parser)); + ParsingException exception = expectThrows(ParsingException.class, () -> parseInstance(parser)); assertEquals("Missing required fields [_index,_id]", exception.getMessage()); } } @@ -184,4 +185,19 @@ private static GetResponse copyGetResponse(GetResponse getResponse) { private static GetResponse mutateGetResponse(GetResponse getResponse) { return new GetResponse(mutateGetResult(getResponse.getResult)); } + + private static GetResponse parseInstance(XContentParser parser) throws IOException { + GetResult getResult = GetResult.fromXContent(parser); + + // At this stage we ensure that we parsed enough information to return + // a valid GetResponse instance. If it's not the case, we throw an + // exception so that callers know it and can handle it correctly. + if (getResult.getIndex() == null && getResult.getId() == null) { + throw new ParsingException( + parser.getTokenLocation(), + String.format(Locale.ROOT, "Missing required fields [%s,%s]", GetResult._INDEX, GetResult._ID) + ); + } + return new GetResponse(getResult); + } } diff --git a/server/src/test/java/org/elasticsearch/action/ingest/GetPipelineResponseTests.java b/server/src/test/java/org/elasticsearch/action/ingest/GetPipelineResponseTests.java index 6f5841f3d2a03..c1ed3a670dffd 100644 --- a/server/src/test/java/org/elasticsearch/action/ingest/GetPipelineResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/ingest/GetPipelineResponseTests.java @@ -26,6 +26,8 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; + public class GetPipelineResponseTests extends AbstractXContentSerializingTestCase { private XContentBuilder getRandomXContentBuilder() throws IOException { @@ -69,7 +71,7 @@ public void testXContentDeserialization() throws IOException { .xContent() .createParser(xContentRegistry(), LoggingDeprecationHandler.INSTANCE, BytesReference.bytes(builder).streamInput()) ) { - parsedResponse = GetPipelineResponse.fromXContent(parser); + parsedResponse = doParseInstance(parser); } List actualPipelines = response.pipelines(); List parsedPipelines = parsedResponse.pipelines(); @@ -82,7 +84,23 @@ public void testXContentDeserialization() throws IOException { @Override protected GetPipelineResponse doParseInstance(XContentParser parser) throws IOException { - return GetPipelineResponse.fromXContent(parser); + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); + List pipelines = new ArrayList<>(); + while (parser.nextToken().equals(XContentParser.Token.FIELD_NAME)) { + String pipelineId = parser.currentName(); + parser.nextToken(); + try (XContentBuilder contentBuilder = XContentBuilder.builder(parser.contentType().xContent())) { + contentBuilder.generator().copyCurrentStructure(parser); + PipelineConfiguration pipeline = new PipelineConfiguration( + pipelineId, + BytesReference.bytes(contentBuilder), + contentBuilder.contentType() + ); + pipelines.add(pipeline); + } + } + ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.currentToken(), parser); + return new GetPipelineResponse(pipelines); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResultTests.java b/server/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResultTests.java index ebfeb310a916b..921637d06b982 100644 --- a/server/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResultTests.java +++ b/server/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResultTests.java @@ -8,6 +8,8 @@ package org.elasticsearch.action.ingest; import org.elasticsearch.test.AbstractXContentTestCase; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; @@ -17,8 +19,24 @@ import java.util.function.Predicate; import java.util.function.Supplier; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + public class SimulateDocumentVerboseResultTests extends AbstractXContentTestCase { + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "simulate_document_verbose_result", + true, + a -> new SimulateDocumentVerboseResult((List) a[0]) + ); + static { + PARSER.declareObjectArray( + constructorArg(), + SimulateProcessorResult.PARSER, + new ParseField(SimulateDocumentVerboseResult.PROCESSOR_RESULT_FIELD) + ); + } + static SimulateDocumentVerboseResult createTestInstance(boolean withFailures) { int numDocs = randomIntBetween(0, 5); List results = new ArrayList<>(); @@ -42,7 +60,7 @@ protected SimulateDocumentVerboseResult createTestInstance() { @Override protected SimulateDocumentVerboseResult doParseInstance(XContentParser parser) { - return SimulateDocumentVerboseResult.fromXContent(parser); + return PARSER.apply(parser, null); } @Override diff --git a/server/src/test/java/org/elasticsearch/search/ClearScrollResponseTests.java b/server/src/test/java/org/elasticsearch/search/ClearScrollResponseTests.java index 8b2759ff5a7a0..51f741e4f03fc 100644 --- a/server/src/test/java/org/elasticsearch/search/ClearScrollResponseTests.java +++ b/server/src/test/java/org/elasticsearch/search/ClearScrollResponseTests.java @@ -9,9 +9,12 @@ package org.elasticsearch.search; import org.elasticsearch.action.search.ClearScrollResponse; +import org.elasticsearch.action.search.ClosePointInTimeResponse; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; @@ -21,9 +24,30 @@ import java.io.IOException; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; public class ClearScrollResponseTests extends ESTestCase { + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "clear_scroll", + true, + a -> new ClosePointInTimeResponse((boolean) a[0], (int) a[1]) + ); + static { + PARSER.declareField( + constructorArg(), + (parser, context) -> parser.booleanValue(), + ClearScrollResponse.SUCCEEDED, + ObjectParser.ValueType.BOOLEAN + ); + PARSER.declareField( + constructorArg(), + (parser, context) -> parser.intValue(), + ClearScrollResponse.NUMFREED, + ObjectParser.ValueType.INT + ); + } + public void testToXContent() throws IOException { ClearScrollResponse clearScrollResponse = new ClearScrollResponse(true, 10); try (XContentBuilder builder = JsonXContent.contentBuilder()) { @@ -39,7 +63,7 @@ public void testToAndFromXContent() throws IOException { BytesReference originalBytes = toShuffledXContent(originalResponse, xContentType, ToXContent.EMPTY_PARAMS, randomBoolean()); ClearScrollResponse parsedResponse; try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) { - parsedResponse = ClearScrollResponse.fromXContent(parser); + parsedResponse = PARSER.parse(parser, null); } assertEquals(originalResponse.isSucceeded(), parsedResponse.isSucceeded()); assertEquals(originalResponse.getNumFreed(), parsedResponse.getNumFreed()); From 6106da5d40e0049bf8ed8bd9ccbddecdbcdcf0dc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Aur=C3=A9lien=20FOUCRET?= Date: Fri, 12 Jan 2024 17:11:04 +0100 Subject: [PATCH 18/35] [LTR] FieldValueExtrator - Checking if fetched values is empty. (#104314) * Checking if fetched values is empty. * Update docs/changelog/104314.yaml --- docs/changelog/104314.yaml | 5 +++++ .../xpack/ml/inference/ltr/FieldValueFeatureExtractor.java | 5 ++++- 2 files changed, 9 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/104314.yaml diff --git a/docs/changelog/104314.yaml b/docs/changelog/104314.yaml new file mode 100644 index 0000000000000..a17e810a2c023 --- /dev/null +++ b/docs/changelog/104314.yaml @@ -0,0 +1,5 @@ +pr: 104314 +summary: "[LTR] `FieldValueExtrator` - Checking if fetched values is empty" +area: Machine Learning +type: bug +issues: [] diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/FieldValueFeatureExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/FieldValueFeatureExtractor.java index 5a2e3d29df949..9014c79f0af98 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/FieldValueFeatureExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/FieldValueFeatureExtractor.java @@ -53,7 +53,10 @@ public void setNextReader(LeafReaderContext segmentContext) { public void addFeatures(Map featureMap, int docId) throws IOException { Source source = sourceLookup.getSource(this.segmentContext, docId); for (FieldValueFetcher vf : this.valueFetcherList) { - featureMap.put(vf.fieldName(), vf.valueFetcher().fetchValues(source, docId, new ArrayList<>()).get(0)); + List values = vf.valueFetcher().fetchValues(source, docId, new ArrayList<>()); + if (values.isEmpty() == false) { + featureMap.put(vf.fieldName(), values.get(0)); + } } } From 08c9332350f841990d9f980021a20a6b19d45ee1 Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Fri, 12 Jan 2024 17:23:40 +0100 Subject: [PATCH 19/35] ESQL: Update moved `heap-attack` QA suite's build group (#104312) This updates ESQL's `heap-attack` QA suite's build group. --- test/external-modules/esql-heap-attack/build.gradle | 1 + 1 file changed, 1 insertion(+) diff --git a/test/external-modules/esql-heap-attack/build.gradle b/test/external-modules/esql-heap-attack/build.gradle index 9f1cdfac61aa1..3a95f3f0b59c8 100644 --- a/test/external-modules/esql-heap-attack/build.gradle +++ b/test/external-modules/esql-heap-attack/build.gradle @@ -11,6 +11,7 @@ apply plugin: 'elasticsearch.internal-java-rest-test' // Necessary to use tests in Serverless apply plugin: 'elasticsearch.internal-test-artifact' +group = 'org.elasticsearch.plugin' esplugin { description 'A test module that can trigger out of memory' From 5d6b833fe6c98321534fca70fa8c44ee09f530fb Mon Sep 17 00:00:00 2001 From: Chris Hegarty <62058229+ChrisHegarty@users.noreply.github.com> Date: Fri, 12 Jan 2024 16:50:00 +0000 Subject: [PATCH 20/35] ES|QL Minor async query doc parameter clarification (#104327) This commit adds a minor clarification to an ESQL async query doc parameter. --- docs/reference/esql/esql-async-query-api.asciidoc | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/docs/reference/esql/esql-async-query-api.asciidoc b/docs/reference/esql/esql-async-query-api.asciidoc index 0a78a923523cc..0d15eb313a61f 100644 --- a/docs/reference/esql/esql-async-query-api.asciidoc +++ b/docs/reference/esql/esql-async-query-api.asciidoc @@ -93,8 +93,9 @@ parameters: Timeout duration to wait for the request to finish. Defaults to a 1 second, meaning the request waits for 1 second for the query results. -If this parameter is specified and the request completes during this period, -complete results are returned. +If the query completes during this period then results will be +returned. Otherwise, a query `id` is returned that can later be used to +retrieve the results. If the request does not complete during this period, a query <> is returned. From 7311ab1785cd8e311cc40899f1404c87568cd566 Mon Sep 17 00:00:00 2001 From: Jake Landis Date: Fri, 12 Jan 2024 11:20:01 -0600 Subject: [PATCH 21/35] Prefer new test cluster framework for new FIPS setting (#104287) https://github.com/elastic/elasticsearch/pull/103483 introduced a new setting for FIPS only. Due to the way FIPS is configured with the elder gradle test cluster framework this setting was getting applied to elder clusters in BWC tests that did not have the settting causing test failures. The new test framework has better semantics for version specific configuration. This commit updates applies the new setting via the new framework with a version specific condition. Adding this setting to the test clusters is a simple way to test the setting (which will cause errors if the required providers are not found in the cluster). The pseudo test does not care which framework is used for configuration. Also, using the new framework allows to remove some hacky configuration previously needed to handle some elder test cluster configuration that used elder versions. Fixes: https://github.com/elastic/elasticsearch/issues/104234 --- .../src/main/groovy/elasticsearch.fips.gradle | 1 - .../local/FipsEnabledClusterConfigProvider.java | 1 + .../multi-cluster-tests-with-security/build.gradle | 13 ------------- .../multi-cluster-tests-with-security/build.gradle | 12 ------------ 4 files changed, 1 insertion(+), 26 deletions(-) diff --git a/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle b/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle index aaae18401685a..f691d4bd996a7 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle @@ -79,7 +79,6 @@ if (BuildParams.inFipsJvm) { // with no x-pack. Tests having security explicitly enabled/disabled will override this setting setting 'xpack.security.enabled', 'false' setting 'xpack.security.fips_mode.enabled', 'true' - setting 'xpack.security.fips_mode.required_providers', '["BCFIPS", "BCJSSE"]' setting 'xpack.license.self_generated.type', 'trial' setting 'xpack.security.authc.password_hashing.algorithm', 'pbkdf2_stretch' keystorePassword 'keystore-password' diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/FipsEnabledClusterConfigProvider.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/FipsEnabledClusterConfigProvider.java index 473456f6b0cc3..3341b20a89d3c 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/FipsEnabledClusterConfigProvider.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/FipsEnabledClusterConfigProvider.java @@ -33,6 +33,7 @@ public void apply(LocalClusterSpecBuilder builder) { .setting("xpack.security.fips_mode.enabled", "true") .setting("xpack.license.self_generated.type", "trial") .setting("xpack.security.authc.password_hashing.algorithm", "pbkdf2_stretch") + .setting("xpack.security.fips_mode.required_providers", () -> "[BCFIPS, BCJSSE]", n -> n.getVersion().onOrAfter("8.13.0")) .keystorePassword("keystore-password"); } } diff --git a/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/build.gradle b/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/build.gradle index 9d931974d25d5..d102490820a07 100644 --- a/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/build.gradle +++ b/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/build.gradle @@ -50,29 +50,16 @@ testClusters.register('mixed-cluster') { tasks.register('remote-cluster', RestIntegTestTask) { mustRunAfter("precommit") systemProperty 'tests.rest.suite', 'remote_cluster' - maybeDisableForFips(it) } tasks.register('mixed-cluster', RestIntegTestTask) { dependsOn 'remote-cluster' useCluster remoteCluster systemProperty 'tests.rest.suite', 'multi_cluster' - maybeDisableForFips(it) } tasks.register("integTest") { dependsOn 'mixed-cluster' - maybeDisableForFips(it) } tasks.named("check").configure { dependsOn("integTest") } - -//TODO: remove with version 8.14. A new FIPS setting was added in 8.13. Since FIPS configures all test clusters and this specific integTest uses -// the previous minor version, that setting is not available when running in FIPS until 8.14. -def maybeDisableForFips(task) { - if (BuildParams.inFipsJvm) { - if(Version.fromString(project.version).before(Version.fromString('8.14.0'))) { - task.enabled = false - } - } -} diff --git a/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/build.gradle b/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/build.gradle index 8f129789d46b7..ae98c08746fab 100644 --- a/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/build.gradle +++ b/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/build.gradle @@ -54,29 +54,17 @@ testClusters.register('mixed-cluster') { tasks.register('remote-cluster', RestIntegTestTask) { mustRunAfter("precommit") systemProperty 'tests.rest.suite', 'remote_cluster' - maybeDisableForFips(it) } tasks.register('mixed-cluster', RestIntegTestTask) { dependsOn 'remote-cluster' useCluster remoteCluster systemProperty 'tests.rest.suite', 'multi_cluster' - maybeDisableForFips(it) } tasks.register("integTest") { dependsOn 'mixed-cluster' - maybeDisableForFips(it) } tasks.named("check").configure { dependsOn("integTest") } -//TODO: remove with version 8.14. A new FIPS setting was added in 8.13. Since FIPS configures all test clusters and this specific integTest uses -// the previous minor version, that setting is not available when running in FIPS until 8.14. -def maybeDisableForFips(task) { - if (BuildParams.inFipsJvm) { - if(Version.fromString(project.version).before(Version.fromString('8.14.0'))) { - task.enabled = false - } - } -} From aa1a5138febbb467449b98393f1394a1e9424fd3 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Fri, 12 Jan 2024 09:39:31 -0800 Subject: [PATCH 22/35] Fail fast on heap attack tests (#104328) We can't use assume after a test fails. --- .../xpack/esql/heap_attack/HeapAttackIT.java | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java index 578c29d210797..102b65df1bfde 100644 --- a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java +++ b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java @@ -75,6 +75,11 @@ protected String getTestRestCluster() { return cluster.getHttpAddresses(); } + @Before + public void skipOnAborted() { + assumeFalse("skip on aborted", SUITE_ABORTED); + } + /** * This used to fail, but we've since compacted top n so it actually succeeds now. */ @@ -552,7 +557,9 @@ private static void assertWriteResponse(Response response) throws IOException { @Before @After public void assertRequestBreakerEmpty() throws Exception { - assumeFalse("suite was aborted", SUITE_ABORTED); + if (SUITE_ABORTED) { + return; + } assertBusy(() -> { HttpEntity entity = adminClient().performRequest(new Request("GET", "/_nodes/stats")).getEntity(); Map stats = XContentHelper.convertToMap(XContentType.JSON.xContent(), entity.getContent(), false); From 4f261fc7628e8cad2bbd97ceb7e53854872d8be9 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Fri, 12 Jan 2024 09:40:34 -0800 Subject: [PATCH 23/35] Find break limits for DocBlock tests on fly (#104213) The test failure is related to #104159, where we had an overestimate of the RAM usage for X-ArrayVector. Instead of updating the break limits, this PR uses the breaker utility that @nik9000 wrote to dynamically compute the limits on-the-fly. Closes #104191 --- .../compute/data/DocVectorTests.java | 112 +++++++++++------- 1 file changed, 69 insertions(+), 43 deletions(-) diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DocVectorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DocVectorTests.java index df1662e1dfb6d..2f9cf6ec57775 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DocVectorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DocVectorTests.java @@ -12,11 +12,13 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.compute.operator.ComputeTestCase; import org.elasticsearch.core.Releasables; +import org.elasticsearch.test.BreakerTestUtil; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; +import java.util.function.Function; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -62,34 +64,31 @@ public void testNonDecreasingDescendingDocs() { docs.close(); } - private static int MAX_BUILD_BREAKS_LIMIT = 1391; - public void testBuildBreaks() { - testBuildBreaks(ByteSizeValue.ofBytes(between(0, MAX_BUILD_BREAKS_LIMIT))); - } - - public void testBuildBreaksMax() { - testBuildBreaks(ByteSizeValue.ofBytes(MAX_BUILD_BREAKS_LIMIT)); - } - - private void testBuildBreaks(ByteSizeValue limit) { - int size = 100; - BlockFactory blockFactory = blockFactory(limit); - Exception e = expectThrows(CircuitBreakingException.class, () -> { - try (DocBlock.Builder builder = DocBlock.newBlockBuilder(blockFactory, size)) { - for (int r = 0; r < size; r++) { - builder.appendShard(3 - size % 4); - builder.appendSegment(size % 10); - builder.appendDoc(size); - } - builder.build().close(); - } + var maxBreakLimit = BreakerTestUtil.findBreakerLimit(ByteSizeValue.ofMb(128), limit -> { + BlockFactory blockFactory = blockFactory(limit); + buildDocBlock(blockFactory).close(); }); + var limit = ByteSizeValue.ofBytes(randomLongBetween(0, maxBreakLimit.getBytes())); + BlockFactory blockFactory = blockFactory(limit); + Exception e = expectThrows(CircuitBreakingException.class, () -> buildDocBlock(blockFactory).close()); assertThat(e.getMessage(), equalTo("over test limit")); logger.info("break position", e); assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); } + private DocBlock buildDocBlock(BlockFactory blockFactory) { + int size = 100; + try (DocBlock.Builder builder = DocBlock.newBlockBuilder(blockFactory, size)) { + for (int r = 0; r < size; r++) { + builder.appendShard(3 - r % 4); + builder.appendSegment(r % 10); + builder.appendDoc(size); + } + return builder.build(); + } + } + public void testShardSegmentDocMap() { assertShardSegmentDocMap( new int[][] { @@ -171,25 +170,31 @@ private void assertShardSegmentDocMap(int[][] data, int[][] expected) { assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); } - // TODO these are really difficult to maintain. can we figure these out of the fly? - private static final int MAX_SHARD_SEGMENT_DOC_MAP_BREAKS = 2220; - - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104191") public void testShardSegmentDocMapBreaks() { - testShardSegmentDocMapBreaks(ByteSizeValue.ofBytes(between(MAX_BUILD_BREAKS_LIMIT + 1, MAX_SHARD_SEGMENT_DOC_MAP_BREAKS))); - } - - public void testShardSegmentDocMapBreaksMax() { - testShardSegmentDocMapBreaks(ByteSizeValue.ofBytes(MAX_SHARD_SEGMENT_DOC_MAP_BREAKS)); + ByteSizeValue buildBreakLimit = BreakerTestUtil.findBreakerLimit(ByteSizeValue.ofMb(128), limit -> { + BlockFactory blockFactory = blockFactory(limit); + buildDocBlock(blockFactory).close(); + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + }); + ByteSizeValue docMapBreakLimit = BreakerTestUtil.findBreakerLimit(ByteSizeValue.ofMb(128), limit -> { + BlockFactory blockFactory = blockFactory(limit); + try (DocBlock docBlock = buildDocBlock(blockFactory)) { + docBlock.asVector().shardSegmentDocMapForwards(); + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + }); + var limit = ByteSizeValue.ofBytes(randomLongBetween(buildBreakLimit.getBytes() + 1, docMapBreakLimit.getBytes())); + BlockFactory blockFactory = blockFactory(limit); + testShardSegmentDocMapBreaks(blockFactory); + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); } - private void testShardSegmentDocMapBreaks(ByteSizeValue limit) { + private void testShardSegmentDocMapBreaks(BlockFactory blockFactory) { int size = 100; - BlockFactory blockFactory = blockFactory(limit); try (DocBlock.Builder builder = DocBlock.newBlockBuilder(blockFactory, size)) { for (int r = 0; r < size; r++) { - builder.appendShard(3 - size % 4); - builder.appendSegment(size % 10); + builder.appendShard(3 - r % 4); + builder.appendSegment(r % 10); builder.appendDoc(size); } try (DocBlock docBlock = builder.build()) { @@ -255,15 +260,36 @@ public void testFilter() { } public void testFilterBreaks() { - BlockFactory factory = blockFactory(ByteSizeValue.ofBytes(between(250, 370))); - try ( - DocVector docs = new DocVector( - factory.newConstantIntVector(0, 10), - factory.newConstantIntVector(0, 10), - factory.newIntArrayVector(new int[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 }, 10), - false - ) - ) { + Function buildDocVector = factory -> { + IntVector shards = null; + IntVector segments = null; + IntVector docs = null; + DocVector result = null; + try { + shards = factory.newConstantIntVector(0, 10); + segments = factory.newConstantIntVector(0, 10); + docs = factory.newConstantIntVector(0, 10); + result = new DocVector(shards, segments, docs, false); + return result; + } finally { + if (result == null) { + Releasables.close(shards, segments, docs); + } + } + }; + ByteSizeValue buildBreakLimit = BreakerTestUtil.findBreakerLimit(ByteSizeValue.ofMb(128), limit -> { + BlockFactory factory = blockFactory(limit); + buildDocVector.apply(factory).close(); + }); + ByteSizeValue filterBreakLimit = BreakerTestUtil.findBreakerLimit(ByteSizeValue.ofMb(128), limit -> { + BlockFactory factory = blockFactory(limit); + try (DocVector docs = buildDocVector.apply(factory)) { + docs.filter(1, 2, 3).close(); + } + }); + ByteSizeValue limit = ByteSizeValue.ofBytes(randomLongBetween(buildBreakLimit.getBytes() + 1, filterBreakLimit.getBytes())); + BlockFactory factory = blockFactory(limit); + try (DocVector docs = buildDocVector.apply(factory)) { Exception e = expectThrows(CircuitBreakingException.class, () -> docs.filter(1, 2, 3)); assertThat(e.getMessage(), equalTo("over test limit")); } From d5ae347474b914c380afb07788c8689abfb0e85a Mon Sep 17 00:00:00 2001 From: Keith Massey Date: Fri, 12 Jan 2024 11:40:55 -0600 Subject: [PATCH 24/35] Re-enabling MultiNodesStatsTests.testMultipleNodes() (#104329) --- .../org/elasticsearch/xpack/monitoring/MultiNodesStatsTests.java | 1 - 1 file changed, 1 deletion(-) diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/MultiNodesStatsTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/MultiNodesStatsTests.java index 44859b73ffb2e..c8aae302e357b 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/MultiNodesStatsTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/MultiNodesStatsTests.java @@ -41,7 +41,6 @@ public void cleanup() throws Exception { wipeMonitoringIndices(); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/96374") public void testMultipleNodes() throws Exception { int nodes = 0; From 50ac28012d39d3f444e653f93cf4d05fbcff3533 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 12 Jan 2024 12:48:29 -0500 Subject: [PATCH 25/35] ESQL: Run async tests more carefully (#104330) The ESQL async tests run the ESQL yaml tests two extra time - once under the async endpoint with the `wait_for_completion_timeout` set to a long time and *again* with `wait_for_completion_timeout` set to a short time, expecting to receive an `id` for the query. That second way is tricky! Even with a `0ms` timeout sometimes the request will complete. That's great, but the tests didn't realize that was possible. And it's tricky to get the warnings and `catch` sections working properly with that. This reworks how we run these commands, breaking apart the way we run a single API and running it as two, taking into account that the "start the query" request could also complete the query. Closes #104294 --- .../rest/yaml/section/ApiCallSection.java | 8 +- .../test/rest/yaml/section/DoSection.java | 76 ++++++------ .../qa/single_node/EsqlClientYamlAsyncIT.java | 16 +-- .../EsqlClientYamlAsyncSubmitAndFetchIT.java | 112 +++++++++++++----- 4 files changed, 133 insertions(+), 79 deletions(-) diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/ApiCallSection.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/ApiCallSection.java index 1708c5977486d..58c1e3b82e336 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/ApiCallSection.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/ApiCallSection.java @@ -32,6 +32,10 @@ public ApiCallSection(String api) { this.api = api; } + public String getApi() { + return api; + } + public ApiCallSection copyWithNewApi(String api) { ApiCallSection copy = new ApiCallSection(api); for (var e : params.entrySet()) { @@ -45,10 +49,6 @@ public ApiCallSection copyWithNewApi(String api) { return copy; } - public String getApi() { - return api; - } - public Map getParams() { // make sure we never modify the parameters once returned return unmodifiableMap(params); diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java index a23a433f812c2..00b92eac40d7f 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java @@ -343,7 +343,6 @@ public XContentLocation getLocation() { @Override public void execute(ClientYamlTestExecutionContext executionContext) throws IOException { - if ("param".equals(catchParam)) { // client should throw validation error before sending request // lets just return without doing anything as we don't have any client to test here @@ -359,17 +358,7 @@ public void execute(ClientYamlTestExecutionContext executionContext) throws IOEx apiCallSection.getHeaders(), apiCallSection.getNodeSelector() ); - if (Strings.hasLength(catchParam)) { - String catchStatusCode; - if (CATCHES.containsKey(catchParam)) { - catchStatusCode = CATCHES.get(catchParam).v1(); - } else if (catchParam.startsWith("/") && catchParam.endsWith("/")) { - catchStatusCode = "4xx|5xx"; - } else { - throw new UnsupportedOperationException("catch value [" + catchParam + "] not supported"); - } - fail(formatStatusCodeMessage(response, catchStatusCode)); - } + failIfHasCatch(response); final String testPath = executionContext.getClientYamlTestCandidate() != null ? executionContext.getClientYamlTestCandidate().getTestPath() : null; @@ -393,27 +382,23 @@ public void execute(ClientYamlTestExecutionContext executionContext) throws IOEx } checkWarningHeaders(response.getWarningHeaders(), testPath); } catch (ClientYamlTestResponseException e) { - ClientYamlTestResponse restTestResponse = e.getRestTestResponse(); - if (Strings.hasLength(catchParam) == false) { - fail(formatStatusCodeMessage(restTestResponse, "2xx")); - } else if (CATCHES.containsKey(catchParam)) { - assertStatusCode(restTestResponse); - } else if (catchParam.length() > 2 && catchParam.startsWith("/") && catchParam.endsWith("/")) { - // the text of the error message matches regular expression - assertThat( - formatStatusCodeMessage(restTestResponse, "4xx|5xx"), - e.getResponseException().getResponse().getStatusLine().getStatusCode(), - greaterThanOrEqualTo(400) - ); - Object error = executionContext.response("error"); - assertThat("error was expected in the response", error, notNullValue()); - // remove delimiters from regex - String regex = catchParam.substring(1, catchParam.length() - 1); - assertThat("the error message was expected to match the provided regex but didn't", error.toString(), matches(regex)); - } else { - throw new UnsupportedOperationException("catch value [" + catchParam + "] not supported"); - } + checkResponseException(e, executionContext); + } + } + + public void failIfHasCatch(ClientYamlTestResponse response) { + if (Strings.hasLength(catchParam) == false) { + return; + } + String catchStatusCode; + if (CATCHES.containsKey(catchParam)) { + catchStatusCode = CATCHES.get(catchParam).v1(); + } else if (catchParam.startsWith("/") && catchParam.endsWith("/")) { + catchStatusCode = "4xx|5xx"; + } else { + throw new UnsupportedOperationException("catch value [" + catchParam + "] not supported"); } + fail(formatStatusCodeMessage(response, catchStatusCode)); } void checkElasticProductHeader(final List productHeaders) { @@ -448,7 +433,7 @@ void checkWarningHeaders(final List warningHeaders) { /** * Check that the response contains only the warning headers that we expect. */ - void checkWarningHeaders(final List warningHeaders, String testPath) { + public void checkWarningHeaders(final List warningHeaders, String testPath) { final List unexpected = new ArrayList<>(); final List unmatched = new ArrayList<>(); final List missing = new ArrayList<>(); @@ -536,6 +521,31 @@ void checkWarningHeaders(final List warningHeaders, String testPath) { } } + public void checkResponseException(ClientYamlTestResponseException e, ClientYamlTestExecutionContext executionContext) + throws IOException { + + ClientYamlTestResponse restTestResponse = e.getRestTestResponse(); + if (Strings.hasLength(catchParam) == false) { + fail(formatStatusCodeMessage(restTestResponse, "2xx")); + } else if (CATCHES.containsKey(catchParam)) { + assertStatusCode(restTestResponse); + } else if (catchParam.length() > 2 && catchParam.startsWith("/") && catchParam.endsWith("/")) { + // the text of the error message matches regular expression + assertThat( + formatStatusCodeMessage(restTestResponse, "4xx|5xx"), + e.getResponseException().getResponse().getStatusLine().getStatusCode(), + greaterThanOrEqualTo(400) + ); + Object error = executionContext.response("error"); + assertThat("error was expected in the response", error, notNullValue()); + // remove delimiters from regex + String regex = catchParam.substring(1, catchParam.length() - 1); + assertThat("the error message was expected to match the provided regex but didn't", error.toString(), matches(regex)); + } else { + throw new UnsupportedOperationException("catch value [" + catchParam + "] not supported"); + } + } + private static void appendBadHeaders(final StringBuilder sb, final List headers, final String message) { if (headers.isEmpty() == false) { sb.append(message).append(" [\n"); diff --git a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncIT.java b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncIT.java index a38e34d7842d8..c2fa41a5241db 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncIT.java @@ -20,7 +20,6 @@ import java.util.List; import java.util.Map; import java.util.function.Function; -import java.util.stream.Stream; /** * Run the ESQL yaml tests against the async esql endpoint with a 30 minute {@code wait_until_completion_timeout}. @@ -40,11 +39,11 @@ public static Iterable parameters() throws Exception { body.put("wait_for_completion_timeout", "30m"); } doSection.setApiCallSection(copy); - return Stream.of(doSection); + return doSection; }); } - public static Iterable parameters(Function> modify) throws Exception { + public static Iterable parameters(Function modify) throws Exception { List result = new ArrayList<>(); for (Object[] orig : ESClientYamlSuiteTestCase.createParameters()) { assert orig.length == 1; @@ -54,7 +53,7 @@ public static Iterable parameters(Function modifyExecutableSection(e, modify)).toList() + candidate.getTestSection().getExecutableSections().stream().map(e -> modifyExecutableSection(e, modify)).toList() ); result.add(new Object[] { new ClientYamlTestCandidate(candidate.getRestTestSuite(), modified) }); } catch (IllegalArgumentException e) { @@ -64,12 +63,9 @@ public static Iterable parameters(Function modifyExecutableSection( - ExecutableSection e, - Function> modify - ) { + private static ExecutableSection modifyExecutableSection(ExecutableSection e, Function modify) { if (false == (e instanceof DoSection)) { - return Stream.of(e); + return e; } DoSection doSection = (DoSection) e; String api = doSection.getApiCallSection().getApi(); @@ -78,7 +74,7 @@ private static Stream modifyExecutableSection( case "esql.async_query", "esql.async_query_get" -> throw new IllegalArgumentException( "The esql yaml tests can't contain async_query or async_query_get because we modify them on the fly and *add* those." ); - default -> Stream.of(e); + default -> e; }; } } diff --git a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncSubmitAndFetchIT.java b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncSubmitAndFetchIT.java index be2bfcb8a2787..34eb2421b0432 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncSubmitAndFetchIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncSubmitAndFetchIT.java @@ -9,19 +9,22 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; -import org.elasticsearch.test.rest.yaml.section.ApiCallSection; +import org.elasticsearch.test.rest.yaml.ClientYamlTestExecutionContext; +import org.elasticsearch.test.rest.yaml.ClientYamlTestResponse; +import org.elasticsearch.test.rest.yaml.ClientYamlTestResponseException; import org.elasticsearch.test.rest.yaml.section.DoSection; +import org.elasticsearch.test.rest.yaml.section.ExecutableSection; +import org.elasticsearch.xcontent.XContentLocation; +import java.io.IOException; +import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.stream.Stream; /** * Run the ESQL yaml tests async and then fetch the results with a long wait time. */ -@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104294") public class EsqlClientYamlAsyncSubmitAndFetchIT extends AbstractEsqlClientYamlIT { public EsqlClientYamlAsyncSubmitAndFetchIT(final ClientYamlTestCandidate testCandidate) { super(testCandidate); @@ -29,34 +32,79 @@ public EsqlClientYamlAsyncSubmitAndFetchIT(final ClientYamlTestCandidate testCan @ParametersFactory public static Iterable parameters() throws Exception { - return EsqlClientYamlAsyncIT.parameters(doSection -> { - ApiCallSection copy = doSection.getApiCallSection().copyWithNewApi("esql.async_query"); - for (Map body : copy.getBodies()) { - body.put("wait_for_completion_timeout", "0ms"); - body.put("keep_on_completion", true); + return EsqlClientYamlAsyncIT.parameters(DoEsqlAsync::new); + } + + private static class DoEsqlAsync implements ExecutableSection { + private final DoSection original; + + private DoEsqlAsync(DoSection original) { + this.original = original; + } + + @Override + public XContentLocation getLocation() { + return original.getLocation(); + } + + @Override + public void execute(ClientYamlTestExecutionContext executionContext) throws IOException { + try { + // Start the query + List> bodies = original.getApiCallSection().getBodies().stream().map(m -> { + Map body = new HashMap<>(m); + if (randomBoolean()) { + /* + * Try to force the request to go async by setting the timeout to 0. + * This doesn't *actually* force the request async - if it finishes + * super duper faster it won't get async. But that's life. + */ + body.put("wait_for_completion_timeout", "0ms"); + } + return body; + }).toList(); + ClientYamlTestResponse startResponse = executionContext.callApi( + "esql.async_query", + original.getApiCallSection().getParams(), + bodies, + original.getApiCallSection().getHeaders(), + original.getApiCallSection().getNodeSelector() + ); + + String id = (String) startResponse.evaluate("id"); + boolean finishedEarly = id == null; + if (finishedEarly) { + /* + * If we finished early, make sure we don't have a "catch" + * param and expect and error. And make sure we match the + * warnings folks have asked for. + */ + original.failIfHasCatch(startResponse); + original.checkWarningHeaders(startResponse.getWarningHeaders(), testPath(executionContext)); + return; + } + + /* + * Ok, we didn't finish before the timeout. Fine, let's fetch the result. + */ + ClientYamlTestResponse fetchResponse = executionContext.callApi( + "esql.async_query_get", + Map.of("wait_for_completion_timeout", "30m", "id", id), + List.of(), + original.getApiCallSection().getHeaders(), + original.getApiCallSection().getNodeSelector() + ); + original.failIfHasCatch(fetchResponse); + original.checkWarningHeaders(fetchResponse.getWarningHeaders(), testPath(executionContext)); + } catch (ClientYamlTestResponseException e) { + original.checkResponseException(e, executionContext); } - doSection.setApiCallSection(copy); - - DoSection fetch = new DoSection(doSection.getLocation()); - fetch.setApiCallSection(new ApiCallSection("esql.async_query_get")); - fetch.getApiCallSection().addParam("wait_for_completion_timeout", "30m"); - fetch.getApiCallSection().addParam("id", "$body.id"); - - /* - * The request to start the query doesn't make warnings or errors so shift - * those to the fetch. - */ - fetch.setExpectedWarningHeaders(doSection.getExpectedWarningHeaders()); - fetch.setExpectedWarningHeadersRegex(doSection.getExpectedWarningHeadersRegex()); - fetch.setAllowedWarningHeaders(doSection.getAllowedWarningHeaders()); - fetch.setAllowedWarningHeadersRegex(doSection.getAllowedWarningHeadersRegex()); - fetch.setCatch(doSection.getCatch()); - doSection.setExpectedWarningHeaders(List.of()); - doSection.setExpectedWarningHeadersRegex(List.of()); - doSection.setAllowedWarningHeaders(List.of()); - doSection.setAllowedWarningHeadersRegex(List.of()); - doSection.setCatch(null); - return Stream.of(doSection, fetch); - }); + } + + private String testPath(ClientYamlTestExecutionContext executionContext) { + return executionContext.getClientYamlTestCandidate() != null + ? executionContext.getClientYamlTestCandidate().getTestPath() + : null; + } } } From 2a79d781eb13da6d132ca4c15f1edf6a38e21a93 Mon Sep 17 00:00:00 2001 From: James Baiera Date: Fri, 12 Jan 2024 12:55:40 -0500 Subject: [PATCH 26/35] Data streams fix failure store delete (#104281) This PR adds the any failure store indices to the list of indices to be deleted when deleting a data stream. --- docs/changelog/104281.yaml | 5 ++ .../DeleteDataStreamTransportAction.java | 1 + .../DeleteDataStreamTransportActionTests.java | 25 ++++++ .../GetDataStreamsTransportActionTests.java | 1 + .../test/data_stream/10_basic.yml | 79 +++++++++++++++++++ .../metadata/DataStreamTestHelper.java | 55 +++++++++++-- 6 files changed, 161 insertions(+), 5 deletions(-) create mode 100644 docs/changelog/104281.yaml diff --git a/docs/changelog/104281.yaml b/docs/changelog/104281.yaml new file mode 100644 index 0000000000000..087e91d83ab3b --- /dev/null +++ b/docs/changelog/104281.yaml @@ -0,0 +1,5 @@ +pr: 104281 +summary: Data streams fix failure store delete +area: Data streams +type: bug +issues: [] diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DeleteDataStreamTransportAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DeleteDataStreamTransportAction.java index e756ba32ec699..6e7528c470d49 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DeleteDataStreamTransportAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DeleteDataStreamTransportAction.java @@ -155,6 +155,7 @@ static ClusterState removeDataStream( DataStream dataStream = currentState.metadata().dataStreams().get(dataStreamName); assert dataStream != null; backingIndicesToRemove.addAll(dataStream.getIndices()); + backingIndicesToRemove.addAll(dataStream.getFailureIndices()); } // first delete the data streams and then the indices: diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/DeleteDataStreamTransportActionTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/DeleteDataStreamTransportActionTests.java index 29c88b7f75463..a5c3b348b1f1b 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/DeleteDataStreamTransportActionTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/DeleteDataStreamTransportActionTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.snapshots.SnapshotInProgressException; import org.elasticsearch.test.ESTestCase; +import org.junit.Assume; import java.util.Collections; import java.util.List; @@ -55,6 +56,30 @@ public void testDeleteDataStream() { } } + public void testDeleteDataStreamWithFailureStore() { + Assume.assumeTrue(DataStream.isFailureStoreEnabled()); + + final String dataStreamName = "my-data-stream"; + final List otherIndices = randomSubsetOf(List.of("foo", "bar", "baz")); + + ClusterState cs = DataStreamTestHelper.getClusterStateWithDataStreams( + List.of(new Tuple<>(dataStreamName, 2)), + otherIndices, + System.currentTimeMillis(), + Settings.EMPTY, + 1, + false, + true + ); + DeleteDataStreamAction.Request req = new DeleteDataStreamAction.Request(new String[] { dataStreamName }); + ClusterState newState = DeleteDataStreamTransportAction.removeDataStream(iner, cs, req, validator, Settings.EMPTY); + assertThat(newState.metadata().dataStreams().size(), equalTo(0)); + assertThat(newState.metadata().indices().size(), equalTo(otherIndices.size())); + for (String indexName : otherIndices) { + assertThat(newState.metadata().indices().get(indexName).getIndex().getName(), equalTo(indexName)); + } + } + public void testDeleteMultipleDataStreams() { String[] dataStreamNames = { "foo", "bar", "baz", "eggplant" }; ClusterState cs = DataStreamTestHelper.getClusterStateWithDataStreams( diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsTransportActionTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsTransportActionTests.java index c24d386dcb26e..637fb44affb6f 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsTransportActionTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsTransportActionTests.java @@ -215,6 +215,7 @@ public void testGetTimeSeriesMixedDataStream() { instant.toEpochMilli(), Settings.EMPTY, 0, + false, false ); DataStreamTestHelper.getClusterStateWithDataStream(mBuilder, dataStream1, List.of(new Tuple<>(twoHoursAgo, twoHoursAhead))); diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml index 6496930764ab8..f5837f6d8c286 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml @@ -303,6 +303,11 @@ setup: name: failure-data-stream2 - is_true: acknowledged + - do: + indices.delete_index_template: + name: my-template4 + - is_true: acknowledged + --- "Create data stream with invalid name": - skip: @@ -530,6 +535,80 @@ setup: indices.get: index: $idx0name +--- +"Delete data stream with failure stores": + - skip: + version: " - 8.11.99" + reason: "data streams only supported in 8.12+" + + - do: + allowed_warnings: + - "index template [my-template4] has index patterns [failure-data-stream1, failure-data-stream2] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-template4] will take precedence during new index creation" + indices.put_index_template: + name: my-template4 + body: + index_patterns: [ failure-data-stream1 ] + data_stream: + failure_store: true + + - do: + indices.create_data_stream: + name: failure-data-stream1 + - is_true: acknowledged + + - do: + indices.create: + index: test_index + body: + settings: + number_of_shards: 1 + number_of_replicas: 1 + + # save the backing index names for later use + - do: + indices.get_data_stream: + name: failure-data-stream1 + + - set: { data_streams.0.indices.0.index_name: idx0name } + - set: { data_streams.0.failure_indices.0.index_name: fs0name } + + - do: + indices.get: + index: ['.ds-failure-data-stream1-*000001', 'test_index'] + + - is_true: test_index.settings + - is_true: .$idx0name.settings + + - do: + indices.get_data_stream: {} + - match: { data_streams.0.name: failure-data-stream1 } + - match: { data_streams.0.timestamp_field.name: '@timestamp' } + - match: { data_streams.0.generation: 1 } + - length: { data_streams.0.indices: 1 } + - match: { data_streams.0.indices.0.index_name: '/\.ds-failure-data-stream1-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - length: { data_streams.0.failure_indices: 1 } + - match: { data_streams.0.failure_indices.0.index_name: '/\.fs-failure-data-stream1-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + + - do: + indices.delete_data_stream: + name: failure-data-stream1 + - is_true: acknowledged + + - do: + catch: missing + indices.get: + index: $idx0name + + - do: + catch: missing + indices.get: + index: $fs0name + + - do: + indices.delete_index_template: + name: my-template4 + - is_true: acknowledged + --- "Delete data stream missing behaviour": - skip: diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java index 5c5123e03454f..d0b30bff92f3e 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java @@ -68,6 +68,7 @@ import static org.elasticsearch.cluster.metadata.DataStream.BACKING_INDEX_PREFIX; import static org.elasticsearch.cluster.metadata.DataStream.DATE_FORMATTER; import static org.elasticsearch.cluster.metadata.DataStream.getDefaultBackingIndexName; +import static org.elasticsearch.cluster.metadata.DataStream.getDefaultFailureStoreName; import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_INDEX_UUID; import static org.elasticsearch.test.ESTestCase.generateRandomStringArray; import static org.elasticsearch.test.ESTestCase.randomAlphaOfLength; @@ -111,7 +112,19 @@ public static DataStream newInstance( boolean replicated, @Nullable DataStreamLifecycle lifecycle ) { - return new DataStream(name, indices, generation, metadata, false, replicated, false, false, null, lifecycle, false, List.of()); + return newInstance(name, indices, generation, metadata, replicated, lifecycle, List.of()); + } + + public static DataStream newInstance( + String name, + List indices, + long generation, + Map metadata, + boolean replicated, + @Nullable DataStreamLifecycle lifecycle, + List failureStores + ) { + return new DataStream(name, indices, generation, metadata, false, replicated, false, false, null, lifecycle, false, failureStores); } public static String getLegacyDefaultBackingIndexName( @@ -318,9 +331,21 @@ public static ClusterState getClusterStateWithDataStreams( Settings settings, int replicas, boolean replicated + ) { + return getClusterStateWithDataStreams(dataStreams, indexNames, currentTime, settings, replicas, replicated, false); + } + + public static ClusterState getClusterStateWithDataStreams( + List> dataStreams, + List indexNames, + long currentTime, + Settings settings, + int replicas, + boolean replicated, + boolean storeFailures ) { Metadata.Builder builder = Metadata.builder(); - getClusterStateWithDataStreams(builder, dataStreams, indexNames, currentTime, settings, replicas, replicated); + getClusterStateWithDataStreams(builder, dataStreams, indexNames, currentTime, settings, replicas, replicated, storeFailures); return ClusterState.builder(new ClusterName("_name")).metadata(builder).build(); } @@ -331,13 +356,16 @@ public static void getClusterStateWithDataStreams( long currentTime, Settings settings, int replicas, - boolean replicated + boolean replicated, + boolean storeFailures ) { builder.put( "template_1", ComposableIndexTemplate.builder() .indexPatterns(List.of("*")) - .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .dataStreamTemplate( + new ComposableIndexTemplate.DataStreamTemplate(false, false, DataStream.isFailureStoreEnabled() && storeFailures) + ) .build() ); @@ -351,12 +379,29 @@ public static void getClusterStateWithDataStreams( } allIndices.addAll(backingIndices); + List failureStores = new ArrayList<>(); + if (DataStream.isFailureStoreEnabled() && storeFailures) { + for (int failureStoreNumber = 1; failureStoreNumber <= dsTuple.v2(); failureStoreNumber++) { + failureStores.add( + createIndexMetadata( + getDefaultFailureStoreName(dsTuple.v1(), failureStoreNumber, currentTime), + true, + settings, + replicas + ) + ); + } + allIndices.addAll(failureStores); + } + DataStream ds = DataStreamTestHelper.newInstance( dsTuple.v1(), backingIndices.stream().map(IndexMetadata::getIndex).collect(Collectors.toList()), dsTuple.v2(), null, - replicated + replicated, + null, + failureStores.stream().map(IndexMetadata::getIndex).collect(Collectors.toList()) ); builder.put(ds); } From a84ce721f6a9e66b111c58b7ac6169b5aef6ab4d Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 12 Jan 2024 13:03:12 -0500 Subject: [PATCH 27/35] ESQL: Fix bug in topn tests (#104210) This fixes a bug in the topn tests which failed because we tried to sort of a geo field. Geo fields aren't valid sort keys but the test is randomized and rarely picks them. This stops it from picking them. Most of the text of this change is actually just me making debugging easier. Closes #104167 --- .../operator/topn/KeyExtractorForBoolean.java | 41 ++++++++++------- .../topn/KeyExtractorForBytesRef.java | 41 ++++++++++------- .../operator/topn/KeyExtractorForDouble.java | 41 ++++++++++------- .../operator/topn/KeyExtractorForInt.java | 41 ++++++++++------- .../operator/topn/KeyExtractorForLong.java | 41 ++++++++++------- .../topn/ResultBuilderForBoolean.java | 4 ++ .../topn/ResultBuilderForBytesRef.java | 4 ++ .../operator/topn/ResultBuilderForDouble.java | 4 ++ .../operator/topn/ResultBuilderForInt.java | 4 ++ .../operator/topn/ResultBuilderForLong.java | 4 ++ .../topn/ValueExtractorForBoolean.java | 4 ++ .../topn/ValueExtractorForBytesRef.java | 4 ++ .../topn/ValueExtractorForDouble.java | 4 ++ .../operator/topn/ValueExtractorForInt.java | 4 ++ .../operator/topn/ValueExtractorForLong.java | 4 ++ .../operator/topn/X-KeyExtractor.java.st | 45 ++++++++++++------- .../operator/topn/X-ResultBuilder.java.st | 4 ++ .../operator/topn/X-ValueExtractor.java.st | 4 ++ .../operator/topn/TopNOperatorTests.java | 30 ++++++++----- 19 files changed, 226 insertions(+), 102 deletions(-) diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForBoolean.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForBoolean.java index 40fe7ffdde661..b537b6d96fc9d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForBoolean.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForBoolean.java @@ -11,20 +11,26 @@ import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; +import java.util.Locale; + +/** + * Extracts sort keys for top-n from their {@link BooleanBlock}s. + * This class is generated. Edit {@code X-KeyExtractor.java.st} instead. + */ abstract class KeyExtractorForBoolean implements KeyExtractor { static KeyExtractorForBoolean extractorFor(TopNEncoder encoder, boolean ascending, byte nul, byte nonNul, BooleanBlock block) { BooleanVector v = block.asVector(); if (v != null) { - return new KeyExtractorForBoolean.ForVector(encoder, nul, nonNul, v); + return new KeyExtractorForBoolean.FromVector(encoder, nul, nonNul, v); } if (ascending) { return block.mvSortedAscending() - ? new KeyExtractorForBoolean.MinForAscending(encoder, nul, nonNul, block) - : new KeyExtractorForBoolean.MinForUnordered(encoder, nul, nonNul, block); + ? new KeyExtractorForBoolean.MinFromAscendingBlock(encoder, nul, nonNul, block) + : new KeyExtractorForBoolean.MinFromUnorderedBlock(encoder, nul, nonNul, block); } return block.mvSortedAscending() - ? new KeyExtractorForBoolean.MaxForAscending(encoder, nul, nonNul, block) - : new KeyExtractorForBoolean.MaxForUnordered(encoder, nul, nonNul, block); + ? new KeyExtractorForBoolean.MaxFromAscendingBlock(encoder, nul, nonNul, block) + : new KeyExtractorForBoolean.MaxFromUnorderedBlock(encoder, nul, nonNul, block); } private final byte nul; @@ -47,10 +53,15 @@ protected final int nul(BreakingBytesRefBuilder key) { return 1; } - static class ForVector extends KeyExtractorForBoolean { + @Override + public final String toString() { + return String.format(Locale.ROOT, "KeyExtractorForBoolean%s(%s, %s)", getClass().getSimpleName(), nul, nonNul); + } + + static class FromVector extends KeyExtractorForBoolean { private final BooleanVector vector; - ForVector(TopNEncoder encoder, byte nul, byte nonNul, BooleanVector vector) { + FromVector(TopNEncoder encoder, byte nul, byte nonNul, BooleanVector vector) { super(encoder, nul, nonNul); this.vector = vector; } @@ -61,10 +72,10 @@ public int writeKey(BreakingBytesRefBuilder key, int position) { } } - static class MinForAscending extends KeyExtractorForBoolean { + static class MinFromAscendingBlock extends KeyExtractorForBoolean { private final BooleanBlock block; - MinForAscending(TopNEncoder encoder, byte nul, byte nonNul, BooleanBlock block) { + MinFromAscendingBlock(TopNEncoder encoder, byte nul, byte nonNul, BooleanBlock block) { super(encoder, nul, nonNul); this.block = block; } @@ -78,10 +89,10 @@ public int writeKey(BreakingBytesRefBuilder key, int position) { } } - static class MaxForAscending extends KeyExtractorForBoolean { + static class MaxFromAscendingBlock extends KeyExtractorForBoolean { private final BooleanBlock block; - MaxForAscending(TopNEncoder encoder, byte nul, byte nonNul, BooleanBlock block) { + MaxFromAscendingBlock(TopNEncoder encoder, byte nul, byte nonNul, BooleanBlock block) { super(encoder, nul, nonNul); this.block = block; } @@ -95,10 +106,10 @@ public int writeKey(BreakingBytesRefBuilder key, int position) { } } - static class MinForUnordered extends KeyExtractorForBoolean { + static class MinFromUnorderedBlock extends KeyExtractorForBoolean { private final BooleanBlock block; - MinForUnordered(TopNEncoder encoder, byte nul, byte nonNul, BooleanBlock block) { + MinFromUnorderedBlock(TopNEncoder encoder, byte nul, byte nonNul, BooleanBlock block) { super(encoder, nul, nonNul); this.block = block; } @@ -120,10 +131,10 @@ public int writeKey(BreakingBytesRefBuilder key, int position) { } } - static class MaxForUnordered extends KeyExtractorForBoolean { + static class MaxFromUnorderedBlock extends KeyExtractorForBoolean { private final BooleanBlock block; - MaxForUnordered(TopNEncoder encoder, byte nul, byte nonNul, BooleanBlock block) { + MaxFromUnorderedBlock(TopNEncoder encoder, byte nul, byte nonNul, BooleanBlock block) { super(encoder, nul, nonNul); this.block = block; } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForBytesRef.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForBytesRef.java index 2f546a46aaeaf..bf07905019dad 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForBytesRef.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForBytesRef.java @@ -12,20 +12,26 @@ import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; +import java.util.Locale; + +/** + * Extracts sort keys for top-n from their {@link BytesRefBlock}s. + * This class is generated. Edit {@code X-KeyExtractor.java.st} instead. + */ abstract class KeyExtractorForBytesRef implements KeyExtractor { static KeyExtractorForBytesRef extractorFor(TopNEncoder encoder, boolean ascending, byte nul, byte nonNul, BytesRefBlock block) { BytesRefVector v = block.asVector(); if (v != null) { - return new KeyExtractorForBytesRef.ForVector(encoder, nul, nonNul, v); + return new KeyExtractorForBytesRef.FromVector(encoder, nul, nonNul, v); } if (ascending) { return block.mvSortedAscending() - ? new KeyExtractorForBytesRef.MinForAscending(encoder, nul, nonNul, block) - : new KeyExtractorForBytesRef.MinForUnordered(encoder, nul, nonNul, block); + ? new KeyExtractorForBytesRef.MinFromAscendingBlock(encoder, nul, nonNul, block) + : new KeyExtractorForBytesRef.MinFromUnorderedBlock(encoder, nul, nonNul, block); } return block.mvSortedAscending() - ? new KeyExtractorForBytesRef.MaxForAscending(encoder, nul, nonNul, block) - : new KeyExtractorForBytesRef.MaxForUnordered(encoder, nul, nonNul, block); + ? new KeyExtractorForBytesRef.MaxFromAscendingBlock(encoder, nul, nonNul, block) + : new KeyExtractorForBytesRef.MaxFromUnorderedBlock(encoder, nul, nonNul, block); } private final TopNEncoder encoder; @@ -49,10 +55,15 @@ protected final int nul(BreakingBytesRefBuilder key) { return 1; } - static class ForVector extends KeyExtractorForBytesRef { + @Override + public final String toString() { + return String.format(Locale.ROOT, "KeyExtractorForBytesRef%s(%s, %s, %s)", getClass().getSimpleName(), encoder, nul, nonNul); + } + + static class FromVector extends KeyExtractorForBytesRef { private final BytesRefVector vector; - ForVector(TopNEncoder encoder, byte nul, byte nonNul, BytesRefVector vector) { + FromVector(TopNEncoder encoder, byte nul, byte nonNul, BytesRefVector vector) { super(encoder, nul, nonNul); this.vector = vector; } @@ -63,10 +74,10 @@ public int writeKey(BreakingBytesRefBuilder key, int position) { } } - static class MinForAscending extends KeyExtractorForBytesRef { + static class MinFromAscendingBlock extends KeyExtractorForBytesRef { private final BytesRefBlock block; - MinForAscending(TopNEncoder encoder, byte nul, byte nonNul, BytesRefBlock block) { + MinFromAscendingBlock(TopNEncoder encoder, byte nul, byte nonNul, BytesRefBlock block) { super(encoder, nul, nonNul); this.block = block; } @@ -80,10 +91,10 @@ public int writeKey(BreakingBytesRefBuilder key, int position) { } } - static class MaxForAscending extends KeyExtractorForBytesRef { + static class MaxFromAscendingBlock extends KeyExtractorForBytesRef { private final BytesRefBlock block; - MaxForAscending(TopNEncoder encoder, byte nul, byte nonNul, BytesRefBlock block) { + MaxFromAscendingBlock(TopNEncoder encoder, byte nul, byte nonNul, BytesRefBlock block) { super(encoder, nul, nonNul); this.block = block; } @@ -97,12 +108,12 @@ public int writeKey(BreakingBytesRefBuilder key, int position) { } } - static class MinForUnordered extends KeyExtractorForBytesRef { + static class MinFromUnorderedBlock extends KeyExtractorForBytesRef { private final BytesRefBlock block; private final BytesRef minScratch = new BytesRef(); - MinForUnordered(TopNEncoder encoder, byte nul, byte nonNul, BytesRefBlock block) { + MinFromUnorderedBlock(TopNEncoder encoder, byte nul, byte nonNul, BytesRefBlock block) { super(encoder, nul, nonNul); this.block = block; } @@ -128,12 +139,12 @@ public int writeKey(BreakingBytesRefBuilder key, int position) { } } - static class MaxForUnordered extends KeyExtractorForBytesRef { + static class MaxFromUnorderedBlock extends KeyExtractorForBytesRef { private final BytesRefBlock block; private final BytesRef maxScratch = new BytesRef(); - MaxForUnordered(TopNEncoder encoder, byte nul, byte nonNul, BytesRefBlock block) { + MaxFromUnorderedBlock(TopNEncoder encoder, byte nul, byte nonNul, BytesRefBlock block) { super(encoder, nul, nonNul); this.block = block; } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForDouble.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForDouble.java index 5e821b9e24db5..03477a65a3cde 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForDouble.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForDouble.java @@ -11,20 +11,26 @@ import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; +import java.util.Locale; + +/** + * Extracts sort keys for top-n from their {@link DoubleBlock}s. + * This class is generated. Edit {@code X-KeyExtractor.java.st} instead. + */ abstract class KeyExtractorForDouble implements KeyExtractor { static KeyExtractorForDouble extractorFor(TopNEncoder encoder, boolean ascending, byte nul, byte nonNul, DoubleBlock block) { DoubleVector v = block.asVector(); if (v != null) { - return new KeyExtractorForDouble.ForVector(encoder, nul, nonNul, v); + return new KeyExtractorForDouble.FromVector(encoder, nul, nonNul, v); } if (ascending) { return block.mvSortedAscending() - ? new KeyExtractorForDouble.MinForAscending(encoder, nul, nonNul, block) - : new KeyExtractorForDouble.MinForUnordered(encoder, nul, nonNul, block); + ? new KeyExtractorForDouble.MinFromAscendingBlock(encoder, nul, nonNul, block) + : new KeyExtractorForDouble.MinFromUnorderedBlock(encoder, nul, nonNul, block); } return block.mvSortedAscending() - ? new KeyExtractorForDouble.MaxForAscending(encoder, nul, nonNul, block) - : new KeyExtractorForDouble.MaxForUnordered(encoder, nul, nonNul, block); + ? new KeyExtractorForDouble.MaxFromAscendingBlock(encoder, nul, nonNul, block) + : new KeyExtractorForDouble.MaxFromUnorderedBlock(encoder, nul, nonNul, block); } private final byte nul; @@ -47,10 +53,15 @@ protected final int nul(BreakingBytesRefBuilder key) { return 1; } - static class ForVector extends KeyExtractorForDouble { + @Override + public final String toString() { + return String.format(Locale.ROOT, "KeyExtractorForDouble%s(%s, %s)", getClass().getSimpleName(), nul, nonNul); + } + + static class FromVector extends KeyExtractorForDouble { private final DoubleVector vector; - ForVector(TopNEncoder encoder, byte nul, byte nonNul, DoubleVector vector) { + FromVector(TopNEncoder encoder, byte nul, byte nonNul, DoubleVector vector) { super(encoder, nul, nonNul); this.vector = vector; } @@ -61,10 +72,10 @@ public int writeKey(BreakingBytesRefBuilder key, int position) { } } - static class MinForAscending extends KeyExtractorForDouble { + static class MinFromAscendingBlock extends KeyExtractorForDouble { private final DoubleBlock block; - MinForAscending(TopNEncoder encoder, byte nul, byte nonNul, DoubleBlock block) { + MinFromAscendingBlock(TopNEncoder encoder, byte nul, byte nonNul, DoubleBlock block) { super(encoder, nul, nonNul); this.block = block; } @@ -78,10 +89,10 @@ public int writeKey(BreakingBytesRefBuilder key, int position) { } } - static class MaxForAscending extends KeyExtractorForDouble { + static class MaxFromAscendingBlock extends KeyExtractorForDouble { private final DoubleBlock block; - MaxForAscending(TopNEncoder encoder, byte nul, byte nonNul, DoubleBlock block) { + MaxFromAscendingBlock(TopNEncoder encoder, byte nul, byte nonNul, DoubleBlock block) { super(encoder, nul, nonNul); this.block = block; } @@ -95,10 +106,10 @@ public int writeKey(BreakingBytesRefBuilder key, int position) { } } - static class MinForUnordered extends KeyExtractorForDouble { + static class MinFromUnorderedBlock extends KeyExtractorForDouble { private final DoubleBlock block; - MinForUnordered(TopNEncoder encoder, byte nul, byte nonNul, DoubleBlock block) { + MinFromUnorderedBlock(TopNEncoder encoder, byte nul, byte nonNul, DoubleBlock block) { super(encoder, nul, nonNul); this.block = block; } @@ -119,10 +130,10 @@ public int writeKey(BreakingBytesRefBuilder key, int position) { } } - static class MaxForUnordered extends KeyExtractorForDouble { + static class MaxFromUnorderedBlock extends KeyExtractorForDouble { private final DoubleBlock block; - MaxForUnordered(TopNEncoder encoder, byte nul, byte nonNul, DoubleBlock block) { + MaxFromUnorderedBlock(TopNEncoder encoder, byte nul, byte nonNul, DoubleBlock block) { super(encoder, nul, nonNul); this.block = block; } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForInt.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForInt.java index d4269a622f098..5f45df662efdd 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForInt.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForInt.java @@ -11,20 +11,26 @@ import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; +import java.util.Locale; + +/** + * Extracts sort keys for top-n from their {@link IntBlock}s. + * This class is generated. Edit {@code X-KeyExtractor.java.st} instead. + */ abstract class KeyExtractorForInt implements KeyExtractor { static KeyExtractorForInt extractorFor(TopNEncoder encoder, boolean ascending, byte nul, byte nonNul, IntBlock block) { IntVector v = block.asVector(); if (v != null) { - return new KeyExtractorForInt.ForVector(encoder, nul, nonNul, v); + return new KeyExtractorForInt.FromVector(encoder, nul, nonNul, v); } if (ascending) { return block.mvSortedAscending() - ? new KeyExtractorForInt.MinForAscending(encoder, nul, nonNul, block) - : new KeyExtractorForInt.MinForUnordered(encoder, nul, nonNul, block); + ? new KeyExtractorForInt.MinFromAscendingBlock(encoder, nul, nonNul, block) + : new KeyExtractorForInt.MinFromUnorderedBlock(encoder, nul, nonNul, block); } return block.mvSortedAscending() - ? new KeyExtractorForInt.MaxForAscending(encoder, nul, nonNul, block) - : new KeyExtractorForInt.MaxForUnordered(encoder, nul, nonNul, block); + ? new KeyExtractorForInt.MaxFromAscendingBlock(encoder, nul, nonNul, block) + : new KeyExtractorForInt.MaxFromUnorderedBlock(encoder, nul, nonNul, block); } private final byte nul; @@ -47,10 +53,15 @@ protected final int nul(BreakingBytesRefBuilder key) { return 1; } - static class ForVector extends KeyExtractorForInt { + @Override + public final String toString() { + return String.format(Locale.ROOT, "KeyExtractorForInt%s(%s, %s)", getClass().getSimpleName(), nul, nonNul); + } + + static class FromVector extends KeyExtractorForInt { private final IntVector vector; - ForVector(TopNEncoder encoder, byte nul, byte nonNul, IntVector vector) { + FromVector(TopNEncoder encoder, byte nul, byte nonNul, IntVector vector) { super(encoder, nul, nonNul); this.vector = vector; } @@ -61,10 +72,10 @@ public int writeKey(BreakingBytesRefBuilder key, int position) { } } - static class MinForAscending extends KeyExtractorForInt { + static class MinFromAscendingBlock extends KeyExtractorForInt { private final IntBlock block; - MinForAscending(TopNEncoder encoder, byte nul, byte nonNul, IntBlock block) { + MinFromAscendingBlock(TopNEncoder encoder, byte nul, byte nonNul, IntBlock block) { super(encoder, nul, nonNul); this.block = block; } @@ -78,10 +89,10 @@ public int writeKey(BreakingBytesRefBuilder key, int position) { } } - static class MaxForAscending extends KeyExtractorForInt { + static class MaxFromAscendingBlock extends KeyExtractorForInt { private final IntBlock block; - MaxForAscending(TopNEncoder encoder, byte nul, byte nonNul, IntBlock block) { + MaxFromAscendingBlock(TopNEncoder encoder, byte nul, byte nonNul, IntBlock block) { super(encoder, nul, nonNul); this.block = block; } @@ -95,10 +106,10 @@ public int writeKey(BreakingBytesRefBuilder key, int position) { } } - static class MinForUnordered extends KeyExtractorForInt { + static class MinFromUnorderedBlock extends KeyExtractorForInt { private final IntBlock block; - MinForUnordered(TopNEncoder encoder, byte nul, byte nonNul, IntBlock block) { + MinFromUnorderedBlock(TopNEncoder encoder, byte nul, byte nonNul, IntBlock block) { super(encoder, nul, nonNul); this.block = block; } @@ -119,10 +130,10 @@ public int writeKey(BreakingBytesRefBuilder key, int position) { } } - static class MaxForUnordered extends KeyExtractorForInt { + static class MaxFromUnorderedBlock extends KeyExtractorForInt { private final IntBlock block; - MaxForUnordered(TopNEncoder encoder, byte nul, byte nonNul, IntBlock block) { + MaxFromUnorderedBlock(TopNEncoder encoder, byte nul, byte nonNul, IntBlock block) { super(encoder, nul, nonNul); this.block = block; } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForLong.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForLong.java index 6a200efff529d..e61ab644ecfe1 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForLong.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForLong.java @@ -11,20 +11,26 @@ import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; +import java.util.Locale; + +/** + * Extracts sort keys for top-n from their {@link LongBlock}s. + * This class is generated. Edit {@code X-KeyExtractor.java.st} instead. + */ abstract class KeyExtractorForLong implements KeyExtractor { static KeyExtractorForLong extractorFor(TopNEncoder encoder, boolean ascending, byte nul, byte nonNul, LongBlock block) { LongVector v = block.asVector(); if (v != null) { - return new KeyExtractorForLong.ForVector(encoder, nul, nonNul, v); + return new KeyExtractorForLong.FromVector(encoder, nul, nonNul, v); } if (ascending) { return block.mvSortedAscending() - ? new KeyExtractorForLong.MinForAscending(encoder, nul, nonNul, block) - : new KeyExtractorForLong.MinForUnordered(encoder, nul, nonNul, block); + ? new KeyExtractorForLong.MinFromAscendingBlock(encoder, nul, nonNul, block) + : new KeyExtractorForLong.MinFromUnorderedBlock(encoder, nul, nonNul, block); } return block.mvSortedAscending() - ? new KeyExtractorForLong.MaxForAscending(encoder, nul, nonNul, block) - : new KeyExtractorForLong.MaxForUnordered(encoder, nul, nonNul, block); + ? new KeyExtractorForLong.MaxFromAscendingBlock(encoder, nul, nonNul, block) + : new KeyExtractorForLong.MaxFromUnorderedBlock(encoder, nul, nonNul, block); } private final byte nul; @@ -47,10 +53,15 @@ protected final int nul(BreakingBytesRefBuilder key) { return 1; } - static class ForVector extends KeyExtractorForLong { + @Override + public final String toString() { + return String.format(Locale.ROOT, "KeyExtractorForLong%s(%s, %s)", getClass().getSimpleName(), nul, nonNul); + } + + static class FromVector extends KeyExtractorForLong { private final LongVector vector; - ForVector(TopNEncoder encoder, byte nul, byte nonNul, LongVector vector) { + FromVector(TopNEncoder encoder, byte nul, byte nonNul, LongVector vector) { super(encoder, nul, nonNul); this.vector = vector; } @@ -61,10 +72,10 @@ public int writeKey(BreakingBytesRefBuilder key, int position) { } } - static class MinForAscending extends KeyExtractorForLong { + static class MinFromAscendingBlock extends KeyExtractorForLong { private final LongBlock block; - MinForAscending(TopNEncoder encoder, byte nul, byte nonNul, LongBlock block) { + MinFromAscendingBlock(TopNEncoder encoder, byte nul, byte nonNul, LongBlock block) { super(encoder, nul, nonNul); this.block = block; } @@ -78,10 +89,10 @@ public int writeKey(BreakingBytesRefBuilder key, int position) { } } - static class MaxForAscending extends KeyExtractorForLong { + static class MaxFromAscendingBlock extends KeyExtractorForLong { private final LongBlock block; - MaxForAscending(TopNEncoder encoder, byte nul, byte nonNul, LongBlock block) { + MaxFromAscendingBlock(TopNEncoder encoder, byte nul, byte nonNul, LongBlock block) { super(encoder, nul, nonNul); this.block = block; } @@ -95,10 +106,10 @@ public int writeKey(BreakingBytesRefBuilder key, int position) { } } - static class MinForUnordered extends KeyExtractorForLong { + static class MinFromUnorderedBlock extends KeyExtractorForLong { private final LongBlock block; - MinForUnordered(TopNEncoder encoder, byte nul, byte nonNul, LongBlock block) { + MinFromUnorderedBlock(TopNEncoder encoder, byte nul, byte nonNul, LongBlock block) { super(encoder, nul, nonNul); this.block = block; } @@ -119,10 +130,10 @@ public int writeKey(BreakingBytesRefBuilder key, int position) { } } - static class MaxForUnordered extends KeyExtractorForLong { + static class MaxFromUnorderedBlock extends KeyExtractorForLong { private final LongBlock block; - MaxForUnordered(TopNEncoder encoder, byte nul, byte nonNul, LongBlock block) { + MaxFromUnorderedBlock(TopNEncoder encoder, byte nul, byte nonNul, LongBlock block) { super(encoder, nul, nonNul); this.block = block; } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBoolean.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBoolean.java index 184ef69f00d85..e6b8d70a63ed7 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBoolean.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBoolean.java @@ -11,6 +11,10 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanBlock; +/** + * Builds the resulting {@link BooleanBlock} for some column in a top-n. + * This class is generated. Edit {@code X-ResultBuilder.java.st} instead. + */ class ResultBuilderForBoolean implements ResultBuilder { private final BooleanBlock.Builder builder; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBytesRef.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBytesRef.java index 4008f7fbd924b..637cddb9b3089 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBytesRef.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBytesRef.java @@ -11,6 +11,10 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BytesRefBlock; +/** + * Builds the resulting {@link BytesRefBlock} for some column in a top-n. + * This class is generated. Edit {@code X-ResultBuilder.java.st} instead. + */ class ResultBuilderForBytesRef implements ResultBuilder { private final BytesRefBlock.Builder builder; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForDouble.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForDouble.java index f06a1e814ef43..e7119ee714c34 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForDouble.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForDouble.java @@ -11,6 +11,10 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; +/** + * Builds the resulting {@link DoubleBlock} for some column in a top-n. + * This class is generated. Edit {@code X-ResultBuilder.java.st} instead. + */ class ResultBuilderForDouble implements ResultBuilder { private final DoubleBlock.Builder builder; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForInt.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForInt.java index 848bbf9ab6a0a..ad1236975141b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForInt.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForInt.java @@ -11,6 +11,10 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntBlock; +/** + * Builds the resulting {@link IntBlock} for some column in a top-n. + * This class is generated. Edit {@code X-ResultBuilder.java.st} instead. + */ class ResultBuilderForInt implements ResultBuilder { private final IntBlock.Builder builder; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForLong.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForLong.java index b4361ad83180a..cad392c3d525c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForLong.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForLong.java @@ -11,6 +11,10 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; +/** + * Builds the resulting {@link LongBlock} for some column in a top-n. + * This class is generated. Edit {@code X-ResultBuilder.java.st} instead. + */ class ResultBuilderForLong implements ResultBuilder { private final LongBlock.Builder builder; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForBoolean.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForBoolean.java index b13dd3ce7f2b0..535618da01727 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForBoolean.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForBoolean.java @@ -11,6 +11,10 @@ import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; +/** + * Extracts non-sort-key values for top-n from their {@link BooleanBlock}s. + * This class is generated. Edit {@code X-KeyExtractor.java.st} instead. + */ abstract class ValueExtractorForBoolean implements ValueExtractor { static ValueExtractorForBoolean extractorFor(TopNEncoder encoder, boolean inKey, BooleanBlock block) { BooleanVector vector = block.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForBytesRef.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForBytesRef.java index 65c5da5737a59..70065fd544759 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForBytesRef.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForBytesRef.java @@ -12,6 +12,10 @@ import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; +/** + * Extracts non-sort-key values for top-n from their {@link BytesRefBlock}s. + * This class is generated. Edit {@code X-KeyExtractor.java.st} instead. + */ abstract class ValueExtractorForBytesRef implements ValueExtractor { static ValueExtractorForBytesRef extractorFor(TopNEncoder encoder, boolean inKey, BytesRefBlock block) { BytesRefVector vector = block.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForDouble.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForDouble.java index d20f2bf53972a..b504196dff7e1 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForDouble.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForDouble.java @@ -11,6 +11,10 @@ import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; +/** + * Extracts non-sort-key values for top-n from their {@link DoubleBlock}s. + * This class is generated. Edit {@code X-KeyExtractor.java.st} instead. + */ abstract class ValueExtractorForDouble implements ValueExtractor { static ValueExtractorForDouble extractorFor(TopNEncoder encoder, boolean inKey, DoubleBlock block) { DoubleVector vector = block.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForInt.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForInt.java index d20368f874e8e..485d9f4bb8559 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForInt.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForInt.java @@ -11,6 +11,10 @@ import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; +/** + * Extracts non-sort-key values for top-n from their {@link IntBlock}s. + * This class is generated. Edit {@code X-KeyExtractor.java.st} instead. + */ abstract class ValueExtractorForInt implements ValueExtractor { static ValueExtractorForInt extractorFor(TopNEncoder encoder, boolean inKey, IntBlock block) { IntVector vector = block.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForLong.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForLong.java index b7b566b3eda3d..4a244746bd0d3 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForLong.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForLong.java @@ -11,6 +11,10 @@ import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; +/** + * Extracts non-sort-key values for top-n from their {@link LongBlock}s. + * This class is generated. Edit {@code X-KeyExtractor.java.st} instead. + */ abstract class ValueExtractorForLong implements ValueExtractor { static ValueExtractorForLong extractorFor(TopNEncoder encoder, boolean inKey, LongBlock block) { LongVector vector = block.asVector(); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/X-KeyExtractor.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/X-KeyExtractor.java.st index dbe0b23af93bb..90a4044a10a93 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/X-KeyExtractor.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/X-KeyExtractor.java.st @@ -14,20 +14,26 @@ import org.elasticsearch.compute.data.$Type$Block; import org.elasticsearch.compute.data.$Type$Vector; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; +import java.util.Locale; + +/** + * Extracts sort keys for top-n from their {@link $Type$Block}s. + * This class is generated. Edit {@code X-KeyExtractor.java.st} instead. + */ abstract class KeyExtractorFor$Type$ implements KeyExtractor { static KeyExtractorFor$Type$ extractorFor(TopNEncoder encoder, boolean ascending, byte nul, byte nonNul, $Type$Block block) { $Type$Vector v = block.asVector(); if (v != null) { - return new KeyExtractorFor$Type$.ForVector(encoder, nul, nonNul, v); + return new KeyExtractorFor$Type$.FromVector(encoder, nul, nonNul, v); } if (ascending) { return block.mvSortedAscending() - ? new KeyExtractorFor$Type$.MinForAscending(encoder, nul, nonNul, block) - : new KeyExtractorFor$Type$.MinForUnordered(encoder, nul, nonNul, block); + ? new KeyExtractorFor$Type$.MinFromAscendingBlock(encoder, nul, nonNul, block) + : new KeyExtractorFor$Type$.MinFromUnorderedBlock(encoder, nul, nonNul, block); } return block.mvSortedAscending() - ? new KeyExtractorFor$Type$.MaxForAscending(encoder, nul, nonNul, block) - : new KeyExtractorFor$Type$.MaxForUnordered(encoder, nul, nonNul, block); + ? new KeyExtractorFor$Type$.MaxFromAscendingBlock(encoder, nul, nonNul, block) + : new KeyExtractorFor$Type$.MaxFromUnorderedBlock(encoder, nul, nonNul, block); } $if(BytesRef)$ @@ -65,10 +71,19 @@ $endif$ return 1; } - static class ForVector extends KeyExtractorFor$Type$ { + @Override + public final String toString() { +$if(BytesRef)$ + return String.format(Locale.ROOT, "KeyExtractorFor$Type$%s(%s, %s, %s)", getClass().getSimpleName(), encoder, nul, nonNul); +$else$ + return String.format(Locale.ROOT, "KeyExtractorFor$Type$%s(%s, %s)", getClass().getSimpleName(), nul, nonNul); +$endif$ + } + + static class FromVector extends KeyExtractorFor$Type$ { private final $Type$Vector vector; - ForVector(TopNEncoder encoder, byte nul, byte nonNul, $Type$Vector vector) { + FromVector(TopNEncoder encoder, byte nul, byte nonNul, $Type$Vector vector) { super(encoder, nul, nonNul); this.vector = vector; } @@ -83,10 +98,10 @@ $endif$ } } - static class MinForAscending extends KeyExtractorFor$Type$ { + static class MinFromAscendingBlock extends KeyExtractorFor$Type$ { private final $Type$Block block; - MinForAscending(TopNEncoder encoder, byte nul, byte nonNul, $Type$Block block) { + MinFromAscendingBlock(TopNEncoder encoder, byte nul, byte nonNul, $Type$Block block) { super(encoder, nul, nonNul); this.block = block; } @@ -104,10 +119,10 @@ $endif$ } } - static class MaxForAscending extends KeyExtractorFor$Type$ { + static class MaxFromAscendingBlock extends KeyExtractorFor$Type$ { private final $Type$Block block; - MaxForAscending(TopNEncoder encoder, byte nul, byte nonNul, $Type$Block block) { + MaxFromAscendingBlock(TopNEncoder encoder, byte nul, byte nonNul, $Type$Block block) { super(encoder, nul, nonNul); this.block = block; } @@ -125,14 +140,14 @@ $endif$ } } - static class MinForUnordered extends KeyExtractorFor$Type$ { + static class MinFromUnorderedBlock extends KeyExtractorFor$Type$ { private final $Type$Block block; $if(BytesRef)$ private final BytesRef minScratch = new BytesRef(); $endif$ - MinForUnordered(TopNEncoder encoder, byte nul, byte nonNul, $Type$Block block) { + MinFromUnorderedBlock(TopNEncoder encoder, byte nul, byte nonNul, $Type$Block block) { super(encoder, nul, nonNul); this.block = block; } @@ -173,14 +188,14 @@ $endif$ } } - static class MaxForUnordered extends KeyExtractorFor$Type$ { + static class MaxFromUnorderedBlock extends KeyExtractorFor$Type$ { private final $Type$Block block; $if(BytesRef)$ private final BytesRef maxScratch = new BytesRef(); $endif$ - MaxForUnordered(TopNEncoder encoder, byte nul, byte nonNul, $Type$Block block) { + MaxFromUnorderedBlock(TopNEncoder encoder, byte nul, byte nonNul, $Type$Block block) { super(encoder, nul, nonNul); this.block = block; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/X-ResultBuilder.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/X-ResultBuilder.java.st index 49bece755820f..4858dba3b4de7 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/X-ResultBuilder.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/X-ResultBuilder.java.st @@ -11,6 +11,10 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.$Type$Block; +/** + * Builds the resulting {@link $Type$Block} for some column in a top-n. + * This class is generated. Edit {@code X-ResultBuilder.java.st} instead. + */ class ResultBuilderFor$Type$ implements ResultBuilder { private final $Type$Block.Builder builder; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/X-ValueExtractor.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/X-ValueExtractor.java.st index 0e25e44834c17..ef80df5c334f2 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/X-ValueExtractor.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/X-ValueExtractor.java.st @@ -14,6 +14,10 @@ import org.elasticsearch.compute.data.$Type$Block; import org.elasticsearch.compute.data.$Type$Vector; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; +/** + * Extracts non-sort-key values for top-n from their {@link $Type$Block}s. + * This class is generated. Edit {@code X-KeyExtractor.java.st} instead. + */ abstract class ValueExtractorFor$Type$ implements ValueExtractor { static ValueExtractorFor$Type$ extractorFor(TopNEncoder encoder, boolean inKey, $Type$Block block) { $Type$Vector vector = block.asVector(); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java index 22b17190c0355..10fecd122672a 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java @@ -936,7 +936,6 @@ private void assertSortingOnMV( assertMap(actualValues, matchesList(List.of(expectedValues.subList(0, topCount)))); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104167") public void testRandomMultiValuesTopN() { DriverContext driverContext = driverContext(); int rows = randomIntBetween(50, 100); @@ -947,6 +946,7 @@ public void testRandomMultiValuesTopN() { Set uniqueOrders = new LinkedHashSet<>(sortingByColumns); List>> expectedValues = new ArrayList<>(rows); List blocks = new ArrayList<>(blocksCount); + boolean[] validSortKeys = new boolean[blocksCount]; List elementTypes = new ArrayList<>(blocksCount); List encoders = new ArrayList<>(blocksCount); @@ -960,6 +960,7 @@ public void testRandomMultiValuesTopN() { () -> randomFrom(ElementType.values()) ); elementTypes.add(e); + validSortKeys[type] = true; try (Block.Builder builder = e.newBlockBuilder(rows, driverContext().blockFactory())) { List previousValue = null; Function randomValueSupplier = (blockType) -> randomValue(blockType); @@ -967,23 +968,22 @@ public void testRandomMultiValuesTopN() { if (rarely()) { randomValueSupplier = switch (randomInt(2)) { case 0 -> { - // use the right BytesRef encoder (don't touch the bytes) + // Simulate ips encoders.add(TopNEncoder.IP); - // deal with IP fields (BytesRef block) like ES does and properly encode the ip addresses yield (blockType) -> new BytesRef(InetAddressPoint.encode(randomIp(randomBoolean()))); } case 1 -> { - // use the right BytesRef encoder (don't touch the bytes) + // Simulate version fields encoders.add(TopNEncoder.VERSION); - // create a valid Version yield (blockType) -> randomVersion().toBytesRef(); } - default -> { - // use the right BytesRef encoder (don't touch the bytes) + case 2 -> { + // Simulate geo_shape and geo_point encoders.add(DEFAULT_UNSORTABLE); - // create a valid geo_point + validSortKeys[type] = false; yield (blockType) -> randomPointAsWKB(); } + default -> throw new UnsupportedOperationException(); }; } else { encoders.add(UTF8); @@ -1033,10 +1033,16 @@ public void testRandomMultiValuesTopN() { } } - // simulate the LogicalPlanOptimizer.PruneRedundantSortClauses by eliminating duplicate sorting columns (same column, same asc/desc, - // same "nulls" handling) - while (uniqueOrders.size() < sortingByColumns) { - int column = randomIntBetween(0, blocksCount - 1); + /* + * Build sort keys, making sure not to include duplicates. This could + * build fewer than the desired sort columns, but it's more important + * to make sure that we don't include dups + * (to simulate LogicalPlanOptimizer.PruneRedundantSortClauses) and + * not to include sort keys that simulate geo objects. Those aren't + * sortable at all. + */ + for (int i = 0; i < sortingByColumns; i++) { + int column = randomValueOtherThanMany(c -> false == validSortKeys[c], () -> randomIntBetween(0, blocksCount - 1)); uniqueOrders.add(new TopNOperator.SortOrder(column, randomBoolean(), randomBoolean())); } From 97d0c8c07a1c6a1108aa511d266928f59cc6a219 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 12 Jan 2024 15:36:14 -0500 Subject: [PATCH 28/35] ESQL: Fix error test on windows (#104340) This fixes a test on windows - the error message contains the platform local line endings because it comes from the jvm. Closes #104296 Closes #104245 --- .../function/scalar/string/ReplaceTests.java | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceTests.java index 60268b9e27764..6c6500bfc333d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceTests.java @@ -80,7 +80,7 @@ public static Iterable parameters() { ) ); - suppliers.add(new TestCaseSupplier(List.of(DataTypes.KEYWORD, DataTypes.KEYWORD, DataTypes.KEYWORD), () -> { + suppliers.add(new TestCaseSupplier("syntax error", List.of(DataTypes.KEYWORD, DataTypes.KEYWORD, DataTypes.KEYWORD), () -> { String text = randomAlphaOfLength(10); String invalidRegex = "["; String newStr = randomAlphaOfLength(5); @@ -94,8 +94,16 @@ public static Iterable parameters() { DataTypes.KEYWORD, equalTo(null) ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") - .withWarning("Line -1:-1: java.util.regex.PatternSyntaxException: Unclosed character class near index 0\n[\n^") - .withFoldingException(PatternSyntaxException.class, "Unclosed character class near index 0\n[\n^"); + .withWarning( + "Line -1:-1: java.util.regex.PatternSyntaxException: Unclosed character class near index 0\n[\n^".replaceAll( + "\n", + System.lineSeparator() + ) + ) + .withFoldingException( + PatternSyntaxException.class, + "Unclosed character class near index 0\n[\n^".replaceAll("\n", System.lineSeparator()) + ); })); return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(false, suppliers))); } From 3da01e0a1690bb1c304b8237da918d4e2a933889 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 12 Jan 2024 15:37:52 -0500 Subject: [PATCH 29/35] ESQL: Fix old version tests (#104333) This weakens an assertion in the ESQL tests rolling upgrade tests so they'll pass against older versions of Elasticsearch. Apparently the warning message changed. There isn't a good reason to be so strict about the assertion anyway. Closes #104101 --- .../xpack/restart/FullClusterRestartIT.java | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java index 6854035281670..4234c8e7913ba 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java @@ -24,7 +24,6 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.UpdateForV9; -import org.elasticsearch.index.mapper.FieldNamesFieldMapper; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.test.StreamsUtils; @@ -996,7 +995,6 @@ public void testDataStreams() throws Exception { /** * Tests that a single document survives. Super basic smoke test. */ - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104101") public void testDisableFieldNameField() throws IOException { assumeTrue("can only disable field names field before 8.0", Version.fromString(getOldClusterVersion()).before(Version.V_8_0_0)); String docLocation = "/nofnf/_doc/1"; @@ -1023,10 +1021,11 @@ public void testDisableFieldNameField() throws IOException { } } }"""); - createIndex.setOptions( - RequestOptions.DEFAULT.toBuilder() - .setWarningsHandler(warnings -> false == warnings.equals(List.of(FieldNamesFieldMapper.ENABLED_DEPRECATION_MESSAGE))) - ); + createIndex.setOptions(RequestOptions.DEFAULT.toBuilder().setWarningsHandler(warnings -> switch (warnings.size()) { + case 0 -> false; // old versions don't return a warning + case 1 -> false == warnings.get(0).contains("_field_names"); + default -> true; + })); client().performRequest(createIndex); Request createDoc = new Request("PUT", docLocation); From 63b3e66fdf30fe57c2a433848bdef425d850ed2b Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Fri, 12 Jan 2024 13:46:05 -0800 Subject: [PATCH 30/35] AwaitsFix #104343 --- .../allocation/allocator/DesiredBalanceComputerTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceComputerTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceComputerTests.java index 9fe168074f41e..1c2b35fe050f5 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceComputerTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceComputerTests.java @@ -581,6 +581,7 @@ public void testAppliesMoveCommands() { ); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104343") public void testDesiredBalanceShouldConvergeInABigCluster() { var nodes = randomIntBetween(3, 7); var nodeIds = new ArrayList(nodes); From 149f4a1376819fc01107fcbe5ed9ad5278051bf3 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Fri, 12 Jan 2024 14:48:01 -0800 Subject: [PATCH 31/35] AwaitsFix #103108 --- .../xpack/ml/integration/MlDistributedFailureIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java index 33fd7c108863b..942729bb81c64 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java @@ -541,6 +541,7 @@ public void testClusterWithTwoMlNodes_RunsDatafeed_GivenOriginalNodeGoesDown() t }); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/103108") public void testClusterWithTwoMlNodes_StopsDatafeed_GivenJobFailsOnReassign() throws Exception { internalCluster().ensureAtMostNumDataNodes(0); logger.info("Starting dedicated master node..."); From 3c6ab3aba6f3c42326229571edb31c195b10579a Mon Sep 17 00:00:00 2001 From: Henning Andersen <33268011+henningandersen@users.noreply.github.com> Date: Sat, 13 Jan 2024 12:49:39 +0100 Subject: [PATCH 32/35] Increase stateless refresh thread pool (#104332) The refresh thread pool is sized too aggressively low for current state of stateless, so increasing it. Relates ES-7633 and ES-7631 --- .../main/java/org/elasticsearch/threadpool/ThreadPool.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java index bfcd8c8a396f5..17cafaee19bb4 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java +++ b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -228,7 +229,9 @@ public ThreadPool(final Settings settings, final ExecutorBuilder... customBui new ScalingExecutorBuilder(Names.MANAGEMENT, 1, boundedBy(allocatedProcessors, 1, 5), TimeValue.timeValueMinutes(5), false) ); builders.put(Names.FLUSH, new ScalingExecutorBuilder(Names.FLUSH, 1, halfProcMaxAt5, TimeValue.timeValueMinutes(5), false)); - builders.put(Names.REFRESH, new ScalingExecutorBuilder(Names.REFRESH, 1, halfProcMaxAt10, TimeValue.timeValueMinutes(5), false)); + // TODO: remove (or refine) this temporary stateless custom refresh pool sizing once ES-7631 is solved. + final int refreshThreads = DiscoveryNode.isStateless(settings) ? allocatedProcessors : halfProcMaxAt10; + builders.put(Names.REFRESH, new ScalingExecutorBuilder(Names.REFRESH, 1, refreshThreads, TimeValue.timeValueMinutes(5), false)); builders.put(Names.WARMER, new ScalingExecutorBuilder(Names.WARMER, 1, halfProcMaxAt5, TimeValue.timeValueMinutes(5), false)); final int maxSnapshotCores = getMaxSnapshotThreadPoolSize(allocatedProcessors); builders.put(Names.SNAPSHOT, new ScalingExecutorBuilder(Names.SNAPSHOT, 1, maxSnapshotCores, TimeValue.timeValueMinutes(5), false)); From 87e6b206c021d6153a89f7f8d1e844b1f3f5441b Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Sat, 13 Jan 2024 10:26:44 -0800 Subject: [PATCH 33/35] AwaitsFix #104348 --- .../resources/rest-api-spec/test/data_stream/10_basic.yml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml index f5837f6d8c286..22b541425b74f 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml @@ -538,8 +538,10 @@ setup: --- "Delete data stream with failure stores": - skip: - version: " - 8.11.99" - reason: "data streams only supported in 8.12+" + # version: " - 8.11.99" + # reason: "data streams only supported in 8.12+" + version: all + reason: AwaitsFix https://github.com/elastic/elasticsearch/issues/104348 - do: allowed_warnings: From ba3d9c6de46331f52f75a9212d609dd530d3a71b Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Sat, 13 Jan 2024 10:34:49 -0800 Subject: [PATCH 34/35] AwaitsFix #104349 --- .../org/elasticsearch/index/shard/ShardSplittingQueryTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/test/java/org/elasticsearch/index/shard/ShardSplittingQueryTests.java b/server/src/test/java/org/elasticsearch/index/shard/ShardSplittingQueryTests.java index 851ad18500add..0895d680046c9 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/ShardSplittingQueryTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/ShardSplittingQueryTests.java @@ -68,6 +68,7 @@ public void testSplitOnID() throws IOException { dir.close(); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104349") public void testSplitOnRouting() throws IOException { SeqNoFieldMapper.SequenceIDFields sequenceIDFields = SeqNoFieldMapper.SequenceIDFields.emptySeqID(); Directory dir = newFSDirectory(createTempDir()); From 4d7e0ec1ef195480a7dde39a9876ecde875b1bdc Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Sat, 13 Jan 2024 15:03:00 -0800 Subject: [PATCH 35/35] Bump min target page size to 32 (#104335) The current MIN_TARGET_PAGE_SIZE is set to 10, which may be too low. I think most of the optimizations in ESQL are focused on processing rows rather than pages. The overhead of processing many pages can be significant in some cases. For instance, the execution time of HeapAttackIT#testGroupOnManyLongs decreased from 52 seconds to 28 seconds when I increased MIN_TARGET_PAGE_SIZE from 10 to 32. Therefore, I propose raising the MIN_TARGET_PAGE_SIZE to 32. --- .../main/java/org/elasticsearch/compute/operator/Operator.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Operator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Operator.java index 63dbdf2be09bf..fd6589bf5a913 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Operator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Operator.java @@ -39,7 +39,7 @@ public interface Operator extends Releasable { * non-trivial overhead and it's just not worth building even * smaller blocks without under normal circumstances. */ - int MIN_TARGET_PAGE_SIZE = 10; + int MIN_TARGET_PAGE_SIZE = 32; /** * whether the given operator can accept more input pages